forked from s444417/ProjektAI
removed venv from repo, sleep after collecting orders
This commit is contained in:
parent
23b85d2549
commit
1be0ecff48
56
.gitignore
vendored
56
.gitignore
vendored
@ -1,8 +1,3 @@
|
|||||||
|
|
||||||
# Created by https://www.gitignore.io/api/python
|
|
||||||
# Edit at https://www.gitignore.io/?templates=python
|
|
||||||
|
|
||||||
### Python ###
|
|
||||||
# Byte-compiled / optimized / DLL files
|
# Byte-compiled / optimized / DLL files
|
||||||
__pycache__/
|
__pycache__/
|
||||||
*.py[cod]
|
*.py[cod]
|
||||||
@ -25,14 +20,10 @@ parts/
|
|||||||
sdist/
|
sdist/
|
||||||
var/
|
var/
|
||||||
wheels/
|
wheels/
|
||||||
pip-wheel-metadata/
|
|
||||||
share/python-wheels/
|
share/python-wheels/
|
||||||
*.egg-info/
|
*.egg-info/
|
||||||
.installed.cfg
|
.installed.cfg
|
||||||
*.egg
|
*.egg
|
||||||
.idea
|
|
||||||
|
|
||||||
/idea/workspace.xml
|
|
||||||
MANIFEST
|
MANIFEST
|
||||||
|
|
||||||
# PyInstaller
|
# PyInstaller
|
||||||
@ -55,26 +46,31 @@ htmlcov/
|
|||||||
nosetests.xml
|
nosetests.xml
|
||||||
coverage.xml
|
coverage.xml
|
||||||
*.cover
|
*.cover
|
||||||
|
*.py,cover
|
||||||
.hypothesis/
|
.hypothesis/
|
||||||
.pytest_cache/
|
.pytest_cache/
|
||||||
.idea
|
cover/
|
||||||
.idea/*
|
|
||||||
|
|
||||||
# Translations
|
# Translations
|
||||||
*.mo
|
*.mo
|
||||||
*.pot
|
*.pot
|
||||||
|
|
||||||
# Scrapy stuff:
|
|
||||||
.scrapy
|
|
||||||
|
|
||||||
# Sphinx documentation
|
|
||||||
docs/_build/
|
|
||||||
|
|
||||||
# PyBuilder
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
target/
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
# pyenv
|
# pyenv
|
||||||
.python-version
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
# pipenv
|
# pipenv
|
||||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
@ -83,12 +79,25 @@ target/
|
|||||||
# install all needed dependencies.
|
# install all needed dependencies.
|
||||||
#Pipfile.lock
|
#Pipfile.lock
|
||||||
|
|
||||||
# celery beat schedule file
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
celerybeat-schedule
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
# SageMath parsed files
|
# SageMath parsed files
|
||||||
*.sage.py
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
# Spyder project settings
|
# Spyder project settings
|
||||||
.spyderproject
|
.spyderproject
|
||||||
.spyproject
|
.spyproject
|
||||||
@ -96,11 +105,6 @@ celerybeat-schedule
|
|||||||
# Rope project settings
|
# Rope project settings
|
||||||
.ropeproject
|
.ropeproject
|
||||||
|
|
||||||
# Mr Developer
|
|
||||||
.mr.developer.cfg
|
|
||||||
.project
|
|
||||||
.pydevproject
|
|
||||||
|
|
||||||
# mkdocs documentation
|
# mkdocs documentation
|
||||||
/site
|
/site
|
||||||
|
|
||||||
@ -112,4 +116,8 @@ dmypy.json
|
|||||||
# Pyre type checker
|
# Pyre type checker
|
||||||
.pyre/
|
.pyre/
|
||||||
|
|
||||||
# End of https://www.gitignore.io/api/python
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
@ -26,18 +26,6 @@ class KitchenManager(threading.Thread):
|
|||||||
def pass_and_return_order(self, orders, kitchen):
|
def pass_and_return_order(self, orders, kitchen):
|
||||||
return kitchen.pass_and_return_order(orders)
|
return kitchen.pass_and_return_order(orders)
|
||||||
|
|
||||||
def draw_orders(self, orders, kitchen):
|
|
||||||
if orders:
|
|
||||||
for i, order in enumerate(orders):
|
|
||||||
dishes = order[1]
|
|
||||||
if dishes:
|
|
||||||
paths = kitchen.draw_order(dishes, self._gridboard)
|
|
||||||
print("Order nr{}: paths:"
|
|
||||||
"{}".format(i, paths))
|
|
||||||
return paths
|
|
||||||
|
|
||||||
# TODO: recognize here
|
|
||||||
|
|
||||||
def draw_single_order(self, order, kitchen):
|
def draw_single_order(self, order, kitchen):
|
||||||
dishes = order[1]
|
dishes = order[1]
|
||||||
if dishes:
|
if dishes:
|
||||||
|
@ -179,6 +179,7 @@ class WaiterManager(threading.Thread):
|
|||||||
table = self._table_manager.get_table(waiter.get_remaining_positions()[0])
|
table = self._table_manager.get_table(waiter.get_remaining_positions()[0])
|
||||||
table.setStatus(Status.Served)
|
table.setStatus(Status.Served)
|
||||||
waiter.get_remaining_positions().pop(0)
|
waiter.get_remaining_positions().pop(0)
|
||||||
|
time.sleep(2)
|
||||||
|
|
||||||
if not waiter.get_remaining_positions() == []:
|
if not waiter.get_remaining_positions() == []:
|
||||||
waiter.set_target('return_order')
|
waiter.set_target('return_order')
|
||||||
|
@ -1,27 +0,0 @@
|
|||||||
/*
|
|
||||||
pygame - Python Game Library
|
|
||||||
|
|
||||||
This library is free software; you can redistribute it and/or
|
|
||||||
modify it under the terms of the GNU Library General Public
|
|
||||||
License as published by the Free Software Foundation; either
|
|
||||||
version 2 of the License, or (at your option) any later version.
|
|
||||||
|
|
||||||
This library is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
Library General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Library General Public
|
|
||||||
License along with this library; if not, write to the Free
|
|
||||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
||||||
|
|
||||||
*/
|
|
||||||
|
|
||||||
#ifndef _CAMERA_H
|
|
||||||
#define _CAMERA_H
|
|
||||||
|
|
||||||
#include "_pygame.h"
|
|
||||||
#include "camera.h"
|
|
||||||
|
|
||||||
#endif
|
|
||||||
|
|
@ -1,864 +0,0 @@
|
|||||||
/*
|
|
||||||
pygame - Python Game Library
|
|
||||||
Copyright (C) 2000-2001 Pete Shinners
|
|
||||||
|
|
||||||
This library is free software; you can redistribute it and/or
|
|
||||||
modify it under the terms of the GNU Library General Public
|
|
||||||
License as published by the Free Software Foundation; either
|
|
||||||
version 2 of the License, or (at your option) any later version.
|
|
||||||
|
|
||||||
This library is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
Library General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Library General Public
|
|
||||||
License along with this library; if not, write to the Free
|
|
||||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
||||||
|
|
||||||
Pete Shinners
|
|
||||||
pete@shinners.org
|
|
||||||
*/
|
|
||||||
|
|
||||||
#ifndef _PYGAME_H
|
|
||||||
#define _PYGAME_H
|
|
||||||
|
|
||||||
/** This header file includes all the definitions for the
|
|
||||||
** base pygame extensions. This header only requires
|
|
||||||
** SDL and Python includes. The reason for functions
|
|
||||||
** prototyped with #define's is to allow for maximum
|
|
||||||
** python portability. It also uses python as the
|
|
||||||
** runtime linker, which allows for late binding. For more
|
|
||||||
** information on this style of development, read the Python
|
|
||||||
** docs on this subject.
|
|
||||||
** http://www.python.org/doc/current/ext/using-cobjects.html
|
|
||||||
**
|
|
||||||
** If using this to build your own derived extensions,
|
|
||||||
** you'll see that the functions available here are mainly
|
|
||||||
** used to help convert between python objects and SDL objects.
|
|
||||||
** Since this library doesn't add a lot of functionality to
|
|
||||||
** the SDL libarary, it doesn't need to offer a lot either.
|
|
||||||
**
|
|
||||||
** When initializing your extension module, you must manually
|
|
||||||
** import the modules you want to use. (this is the part about
|
|
||||||
** using python as the runtime linker). Each module has its
|
|
||||||
** own import_xxx() routine. You need to perform this import
|
|
||||||
** after you have initialized your own module, and before
|
|
||||||
** you call any routines from that module. Since every module
|
|
||||||
** in pygame does this, there are plenty of examples.
|
|
||||||
**
|
|
||||||
** The base module does include some useful conversion routines
|
|
||||||
** that you are free to use in your own extension.
|
|
||||||
**
|
|
||||||
** When making changes, it is very important to keep the
|
|
||||||
** FIRSTSLOT and NUMSLOT constants up to date for each
|
|
||||||
** section. Also be sure not to overlap any of the slots.
|
|
||||||
** When you do make a mistake with this, it will result
|
|
||||||
** is a dereferenced NULL pointer that is easier to diagnose
|
|
||||||
** than it could be :]
|
|
||||||
**/
|
|
||||||
#if defined(HAVE_SNPRINTF) /* defined in python.h (pyerrors.h) and SDL.h \
|
|
||||||
(SDL_config.h) */
|
|
||||||
#undef HAVE_SNPRINTF /* remove GCC redefine warning */
|
|
||||||
#endif
|
|
||||||
|
|
||||||
// This must be before all else
|
|
||||||
#if defined(__SYMBIAN32__) && defined(OPENC)
|
|
||||||
#include <sys/types.h>
|
|
||||||
|
|
||||||
#if defined(__WINS__)
|
|
||||||
void *
|
|
||||||
_alloca(size_t size);
|
|
||||||
#define alloca _alloca
|
|
||||||
#endif
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#define PG_STRINGIZE_HELPER(x) #x
|
|
||||||
#define PG_STRINGIZE(x) PG_STRINGIZE_HELPER(x)
|
|
||||||
#define PG_WARN(desc) message(__FILE__ "(" PG_STRINGIZE(__LINE__) "): WARNING: " #desc)
|
|
||||||
|
|
||||||
/* This is unconditionally defined in Python.h */
|
|
||||||
#if defined(_POSIX_C_SOURCE)
|
|
||||||
#undef _POSIX_C_SOURCE
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#include <Python.h>
|
|
||||||
|
|
||||||
/* the version macros are defined since version 1.9.5 */
|
|
||||||
#define PG_MAJOR_VERSION 1
|
|
||||||
#define PG_MINOR_VERSION 9
|
|
||||||
#define PG_PATCH_VERSION 6
|
|
||||||
#define PG_VERSIONNUM(MAJOR, MINOR, PATCH) (1000*(MAJOR) + 100*(MINOR) + (PATCH))
|
|
||||||
#define PG_VERSION_ATLEAST(MAJOR, MINOR, PATCH) \
|
|
||||||
(PG_VERSIONNUM(PG_MAJOR_VERSION, PG_MINOR_VERSION, PG_PATCH_VERSION) >= \
|
|
||||||
PG_VERSIONNUM(MAJOR, MINOR, PATCH))
|
|
||||||
|
|
||||||
/* Cobjects vanish in Python 3.2; so we will code as though we use capsules */
|
|
||||||
#if defined(Py_CAPSULE_H)
|
|
||||||
#define PG_HAVE_CAPSULE 1
|
|
||||||
#else
|
|
||||||
#define PG_HAVE_CAPSULE 0
|
|
||||||
#endif
|
|
||||||
#if defined(Py_COBJECT_H)
|
|
||||||
#define PG_HAVE_COBJECT 1
|
|
||||||
#else
|
|
||||||
#define PG_HAVE_COBJECT 0
|
|
||||||
#endif
|
|
||||||
#if !PG_HAVE_CAPSULE
|
|
||||||
#define PyCapsule_New(ptr, n, dfn) PyCObject_FromVoidPtr(ptr, dfn)
|
|
||||||
#define PyCapsule_GetPointer(obj, n) PyCObject_AsVoidPtr(obj)
|
|
||||||
#define PyCapsule_CheckExact(obj) PyCObject_Check(obj)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* Pygame uses Py_buffer (PEP 3118) to exchange array information internally;
|
|
||||||
* define here as needed.
|
|
||||||
*/
|
|
||||||
#if !defined(PyBUF_SIMPLE)
|
|
||||||
typedef struct bufferinfo {
|
|
||||||
void *buf;
|
|
||||||
PyObject *obj;
|
|
||||||
Py_ssize_t len;
|
|
||||||
Py_ssize_t itemsize;
|
|
||||||
int readonly;
|
|
||||||
int ndim;
|
|
||||||
char *format;
|
|
||||||
Py_ssize_t *shape;
|
|
||||||
Py_ssize_t *strides;
|
|
||||||
Py_ssize_t *suboffsets;
|
|
||||||
void *internal;
|
|
||||||
} Py_buffer;
|
|
||||||
|
|
||||||
/* Flags for getting buffers */
|
|
||||||
#define PyBUF_SIMPLE 0
|
|
||||||
#define PyBUF_WRITABLE 0x0001
|
|
||||||
/* we used to include an E, backwards compatible alias */
|
|
||||||
#define PyBUF_WRITEABLE PyBUF_WRITABLE
|
|
||||||
#define PyBUF_FORMAT 0x0004
|
|
||||||
#define PyBUF_ND 0x0008
|
|
||||||
#define PyBUF_STRIDES (0x0010 | PyBUF_ND)
|
|
||||||
#define PyBUF_C_CONTIGUOUS (0x0020 | PyBUF_STRIDES)
|
|
||||||
#define PyBUF_F_CONTIGUOUS (0x0040 | PyBUF_STRIDES)
|
|
||||||
#define PyBUF_ANY_CONTIGUOUS (0x0080 | PyBUF_STRIDES)
|
|
||||||
#define PyBUF_INDIRECT (0x0100 | PyBUF_STRIDES)
|
|
||||||
|
|
||||||
#define PyBUF_CONTIG (PyBUF_ND | PyBUF_WRITABLE)
|
|
||||||
#define PyBUF_CONTIG_RO (PyBUF_ND)
|
|
||||||
|
|
||||||
#define PyBUF_STRIDED (PyBUF_STRIDES | PyBUF_WRITABLE)
|
|
||||||
#define PyBUF_STRIDED_RO (PyBUF_STRIDES)
|
|
||||||
|
|
||||||
#define PyBUF_RECORDS (PyBUF_STRIDES | PyBUF_WRITABLE | PyBUF_FORMAT)
|
|
||||||
#define PyBUF_RECORDS_RO (PyBUF_STRIDES | PyBUF_FORMAT)
|
|
||||||
|
|
||||||
#define PyBUF_FULL (PyBUF_INDIRECT | PyBUF_WRITABLE | PyBUF_FORMAT)
|
|
||||||
#define PyBUF_FULL_RO (PyBUF_INDIRECT | PyBUF_FORMAT)
|
|
||||||
|
|
||||||
#define PyBUF_READ 0x100
|
|
||||||
#define PyBUF_WRITE 0x200
|
|
||||||
#define PyBUF_SHADOW 0x400
|
|
||||||
|
|
||||||
typedef int (*getbufferproc)(PyObject *, Py_buffer *, int);
|
|
||||||
typedef void (*releasebufferproc)(Py_buffer *);
|
|
||||||
#endif /* #if !defined(PyBUF_SIMPLE) */
|
|
||||||
|
|
||||||
/* Flag indicating a pg_buffer; used for assertions within callbacks */
|
|
||||||
#ifndef NDEBUG
|
|
||||||
#define PyBUF_PYGAME 0x4000
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#define PyBUF_HAS_FLAG(f, F) (((f) & (F)) == (F))
|
|
||||||
|
|
||||||
/* Array information exchange struct C type; inherits from Py_buffer
|
|
||||||
*
|
|
||||||
* Pygame uses its own Py_buffer derived C struct as an internal representation
|
|
||||||
* of an imported array buffer. The extended Py_buffer allows for a
|
|
||||||
* per-instance release callback,
|
|
||||||
*/
|
|
||||||
typedef void (*pybuffer_releaseproc)(Py_buffer *);
|
|
||||||
|
|
||||||
typedef struct pg_bufferinfo_s {
|
|
||||||
Py_buffer view;
|
|
||||||
PyObject *consumer; /* Input: Borrowed reference */
|
|
||||||
pybuffer_releaseproc release_buffer;
|
|
||||||
} pg_buffer;
|
|
||||||
|
|
||||||
/* Operating system specific adjustments
|
|
||||||
*/
|
|
||||||
// No signal()
|
|
||||||
#if defined(__SYMBIAN32__) && defined(HAVE_SIGNAL_H)
|
|
||||||
#undef HAVE_SIGNAL_H
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#if defined(HAVE_SNPRINTF)
|
|
||||||
#undef HAVE_SNPRINTF
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifdef MS_WIN32 /*Python gives us MS_WIN32, SDL needs just WIN32*/
|
|
||||||
#ifndef WIN32
|
|
||||||
#define WIN32
|
|
||||||
#endif
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/// Prefix when initializing module
|
|
||||||
#define MODPREFIX ""
|
|
||||||
/// Prefix when importing module
|
|
||||||
#define IMPPREFIX "pygame."
|
|
||||||
|
|
||||||
#ifdef __SYMBIAN32__
|
|
||||||
#undef MODPREFIX
|
|
||||||
#undef IMPPREFIX
|
|
||||||
// On Symbian there is no pygame package. The extensions are built-in or in
|
|
||||||
// sys\bin.
|
|
||||||
#define MODPREFIX "pygame_"
|
|
||||||
#define IMPPREFIX "pygame_"
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#include <SDL.h>
|
|
||||||
|
|
||||||
/* Pygame's SDL version macros:
|
|
||||||
* IS_SDLv1 is 1 if SDL 1.x.x, 0 otherwise
|
|
||||||
* IS_SDLv2 is 1 if at least SDL 2.0.0, 0 otherwise
|
|
||||||
*/
|
|
||||||
#if (SDL_VERSION_ATLEAST(2, 0, 0))
|
|
||||||
#define IS_SDLv1 0
|
|
||||||
#define IS_SDLv2 1
|
|
||||||
#else
|
|
||||||
#define IS_SDLv1 1
|
|
||||||
#define IS_SDLv2 0
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/*#if IS_SDLv1 && PG_MAJOR_VERSION >= 2
|
|
||||||
#error pygame 2 requires SDL 2
|
|
||||||
#endif*/
|
|
||||||
|
|
||||||
#if IS_SDLv2
|
|
||||||
/* SDL 1.2 constants removed from SDL 2 */
|
|
||||||
typedef enum {
|
|
||||||
SDL_HWSURFACE = 0,
|
|
||||||
SDL_RESIZABLE = SDL_WINDOW_RESIZABLE,
|
|
||||||
SDL_ASYNCBLIT = 0,
|
|
||||||
SDL_OPENGL = SDL_WINDOW_OPENGL,
|
|
||||||
SDL_OPENGLBLIT = 0,
|
|
||||||
SDL_ANYFORMAT = 0,
|
|
||||||
SDL_HWPALETTE = 0,
|
|
||||||
SDL_DOUBLEBUF = 0,
|
|
||||||
SDL_FULLSCREEN = SDL_WINDOW_FULLSCREEN,
|
|
||||||
SDL_HWACCEL = 0,
|
|
||||||
SDL_SRCCOLORKEY = 0,
|
|
||||||
SDL_RLEACCELOK = 0,
|
|
||||||
SDL_SRCALPHA = 0,
|
|
||||||
SDL_NOFRAME = SDL_WINDOW_BORDERLESS,
|
|
||||||
SDL_GL_SWAP_CONTROL = 0,
|
|
||||||
TIMER_RESOLUTION = 0
|
|
||||||
} PygameVideoFlags;
|
|
||||||
|
|
||||||
/* the wheel button constants were removed from SDL 2 */
|
|
||||||
typedef enum {
|
|
||||||
PGM_BUTTON_LEFT = SDL_BUTTON_LEFT,
|
|
||||||
PGM_BUTTON_RIGHT = SDL_BUTTON_RIGHT,
|
|
||||||
PGM_BUTTON_MIDDLE = SDL_BUTTON_MIDDLE,
|
|
||||||
PGM_BUTTON_WHEELUP = 4,
|
|
||||||
PGM_BUTTON_WHEELDOWN = 5,
|
|
||||||
PGM_BUTTON_X1 = SDL_BUTTON_X1 + 2,
|
|
||||||
PGM_BUTTON_X2 = SDL_BUTTON_X2 + 2,
|
|
||||||
PGM_BUTTON_KEEP = 0x80
|
|
||||||
} PygameMouseFlags;
|
|
||||||
|
|
||||||
typedef enum {
|
|
||||||
SDL_NOEVENT = 0,
|
|
||||||
/* SDL 1.2 allowed for 8 user defined events. */
|
|
||||||
SDL_NUMEVENTS = SDL_USEREVENT + 8,
|
|
||||||
SDL_ACTIVEEVENT = SDL_NUMEVENTS,
|
|
||||||
PGE_EVENTBEGIN = SDL_NUMEVENTS,
|
|
||||||
SDL_VIDEORESIZE,
|
|
||||||
SDL_VIDEOEXPOSE,
|
|
||||||
PGE_KEYREPEAT,
|
|
||||||
PGE_EVENTEND
|
|
||||||
} PygameEventCode;
|
|
||||||
|
|
||||||
#define PGE_NUMEVENTS (PGE_EVENTEND - PGE_EVENTBEGIN)
|
|
||||||
|
|
||||||
typedef enum {
|
|
||||||
SDL_APPFOCUSMOUSE,
|
|
||||||
SDL_APPINPUTFOCUS,
|
|
||||||
SDL_APPACTIVE
|
|
||||||
} PygameAppCode;
|
|
||||||
|
|
||||||
/* Surface flags: based on SDL 1.2 flags */
|
|
||||||
typedef enum {
|
|
||||||
PGS_SWSURFACE = 0x00000000,
|
|
||||||
PGS_HWSURFACE = 0x00000001,
|
|
||||||
PGS_ASYNCBLIT = 0x00000004,
|
|
||||||
|
|
||||||
PGS_ANYFORMAT = 0x10000000,
|
|
||||||
PGS_HWPALETTE = 0x20000000,
|
|
||||||
PGS_DOUBLEBUF = 0x40000000,
|
|
||||||
PGS_FULLSCREEN = 0x80000000,
|
|
||||||
PGS_OPENGL = 0x00000002,
|
|
||||||
PGS_OPENGLBLIT = 0x0000000A,
|
|
||||||
PGS_RESIZABLE = 0x00000010,
|
|
||||||
PGS_NOFRAME = 0x00000020,
|
|
||||||
PGS_SHOWN = 0x00000040, /* Added from SDL 2 */
|
|
||||||
PGS_HIDDEN = 0x00000080, /* Added from SDL 2 */
|
|
||||||
|
|
||||||
PGS_HWACCEL = 0x00000100,
|
|
||||||
PGS_SRCCOLORKEY = 0x00001000,
|
|
||||||
PGS_RLEACCELOK = 0x00002000,
|
|
||||||
PGS_RLEACCEL = 0x00004000,
|
|
||||||
PGS_SRCALPHA = 0x00010000,
|
|
||||||
PGS_PREALLOC = 0x01000000
|
|
||||||
} PygameSurfaceFlags;
|
|
||||||
|
|
||||||
typedef struct {
|
|
||||||
Uint32 hw_available:1;
|
|
||||||
Uint32 wm_available:1;
|
|
||||||
Uint32 blit_hw:1;
|
|
||||||
Uint32 blit_hw_CC:1;
|
|
||||||
Uint32 blit_hw_A:1;
|
|
||||||
Uint32 blit_sw:1;
|
|
||||||
Uint32 blit_sw_CC:1;
|
|
||||||
Uint32 blit_sw_A:1;
|
|
||||||
Uint32 blit_fill:1;
|
|
||||||
Uint32 video_mem;
|
|
||||||
SDL_PixelFormat *vfmt;
|
|
||||||
SDL_PixelFormat vfmt_data;
|
|
||||||
int current_w;
|
|
||||||
int current_h;
|
|
||||||
} pg_VideoInfo;
|
|
||||||
|
|
||||||
#endif /* IS_SDLv2 */
|
|
||||||
/* macros used throughout the source */
|
|
||||||
#define RAISE(x, y) (PyErr_SetString((x), (y)), (PyObject *)NULL)
|
|
||||||
|
|
||||||
#ifdef WITH_THREAD
|
|
||||||
#define PG_CHECK_THREADS() (1)
|
|
||||||
#else /* ~WITH_THREAD */
|
|
||||||
#define PG_CHECK_THREADS() \
|
|
||||||
(RAISE(PyExc_NotImplementedError, \
|
|
||||||
"Python built without thread support"))
|
|
||||||
#endif /* ~WITH_THREAD */
|
|
||||||
|
|
||||||
#define PyType_Init(x) (((x).ob_type) = &PyType_Type)
|
|
||||||
#define PYGAMEAPI_LOCAL_ENTRY "_PYGAME_C_API"
|
|
||||||
|
|
||||||
#ifndef MIN
|
|
||||||
#define MIN(a, b) ((a) < (b) ? (a) : (b))
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifndef MAX
|
|
||||||
#define MAX(a, b) ((a) > (b) ? (a) : (b))
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifndef ABS
|
|
||||||
#define ABS(a) (((a) < 0) ? -(a) : (a))
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* test sdl initializations */
|
|
||||||
#define VIDEO_INIT_CHECK() \
|
|
||||||
if (!SDL_WasInit(SDL_INIT_VIDEO)) \
|
|
||||||
return RAISE(pgExc_SDLError, "video system not initialized")
|
|
||||||
|
|
||||||
#define CDROM_INIT_CHECK() \
|
|
||||||
if (!SDL_WasInit(SDL_INIT_CDROM)) \
|
|
||||||
return RAISE(pgExc_SDLError, "cdrom system not initialized")
|
|
||||||
|
|
||||||
#define JOYSTICK_INIT_CHECK() \
|
|
||||||
if (!SDL_WasInit(SDL_INIT_JOYSTICK)) \
|
|
||||||
return RAISE(pgExc_SDLError, "joystick system not initialized")
|
|
||||||
|
|
||||||
/* BASE */
|
|
||||||
#define VIEW_CONTIGUOUS 1
|
|
||||||
#define VIEW_C_ORDER 2
|
|
||||||
#define VIEW_F_ORDER 4
|
|
||||||
|
|
||||||
#define PYGAMEAPI_BASE_FIRSTSLOT 0
|
|
||||||
#if IS_SDLv1
|
|
||||||
#define PYGAMEAPI_BASE_NUMSLOTS 19
|
|
||||||
#else /* IS_SDLv2 */
|
|
||||||
#define PYGAMEAPI_BASE_NUMSLOTS 23
|
|
||||||
#endif /* IS_SDLv2 */
|
|
||||||
#ifndef PYGAMEAPI_BASE_INTERNAL
|
|
||||||
#define pgExc_SDLError ((PyObject *)PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT])
|
|
||||||
|
|
||||||
#define pg_RegisterQuit \
|
|
||||||
(*(void (*)(void (*)(void)))PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 1])
|
|
||||||
|
|
||||||
#define pg_IntFromObj \
|
|
||||||
(*(int (*)(PyObject *, int *))PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 2])
|
|
||||||
|
|
||||||
#define pg_IntFromObjIndex \
|
|
||||||
(*(int (*)(PyObject *, int, \
|
|
||||||
int *))PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 3])
|
|
||||||
|
|
||||||
#define pg_TwoIntsFromObj \
|
|
||||||
(*(int (*)(PyObject *, int *, \
|
|
||||||
int *))PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 4])
|
|
||||||
|
|
||||||
#define pg_FloatFromObj \
|
|
||||||
(*(int (*)(PyObject *, float *))PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 5])
|
|
||||||
|
|
||||||
#define pg_FloatFromObjIndex \
|
|
||||||
(*(int (*)(PyObject *, int, \
|
|
||||||
float *))PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 6])
|
|
||||||
|
|
||||||
#define pg_TwoFloatsFromObj \
|
|
||||||
(*(int (*)(PyObject *, float *, \
|
|
||||||
float *))PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 7])
|
|
||||||
|
|
||||||
#define pg_UintFromObj \
|
|
||||||
(*(int (*)(PyObject *, \
|
|
||||||
Uint32 *))PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 8])
|
|
||||||
|
|
||||||
#define pg_UintFromObjIndex \
|
|
||||||
(*(int (*)(PyObject *, int, \
|
|
||||||
Uint32 *))PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 9])
|
|
||||||
|
|
||||||
#define pgVideo_AutoQuit \
|
|
||||||
(*(void (*)(void))PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 10])
|
|
||||||
|
|
||||||
#define pgVideo_AutoInit \
|
|
||||||
(*(int (*)(void))PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 11])
|
|
||||||
|
|
||||||
#define pg_RGBAFromObj \
|
|
||||||
(*(int (*)(PyObject *, \
|
|
||||||
Uint8 *))PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 12])
|
|
||||||
|
|
||||||
#define pgBuffer_AsArrayInterface \
|
|
||||||
(*(PyObject * (*)(Py_buffer *)) \
|
|
||||||
PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 13])
|
|
||||||
|
|
||||||
#define pgBuffer_AsArrayStruct \
|
|
||||||
(*(PyObject * (*)(Py_buffer *)) \
|
|
||||||
PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 14])
|
|
||||||
|
|
||||||
#define pgObject_GetBuffer \
|
|
||||||
(*(int (*)(PyObject *, pg_buffer *, \
|
|
||||||
int))PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 15])
|
|
||||||
|
|
||||||
#define pgBuffer_Release \
|
|
||||||
(*(void (*)(pg_buffer *))PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 16])
|
|
||||||
|
|
||||||
#define pgDict_AsBuffer \
|
|
||||||
(*(int (*)(pg_buffer *, PyObject *, \
|
|
||||||
int))PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 17])
|
|
||||||
|
|
||||||
#define pgExc_BufferError \
|
|
||||||
((PyObject *)PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 18])
|
|
||||||
|
|
||||||
#if IS_SDLv2
|
|
||||||
#define pg_GetDefaultWindow \
|
|
||||||
(*(SDL_Window * (*)(void)) PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 19])
|
|
||||||
|
|
||||||
#define pg_SetDefaultWindow \
|
|
||||||
(*(void (*)(SDL_Window *))PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 20])
|
|
||||||
|
|
||||||
#define pg_GetDefaultWindowSurface \
|
|
||||||
(*(PyObject * (*)(void)) PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 21])
|
|
||||||
|
|
||||||
#define pg_SetDefaultWindowSurface \
|
|
||||||
(*(void (*)(PyObject *))PyGAME_C_API[PYGAMEAPI_BASE_FIRSTSLOT + 22])
|
|
||||||
|
|
||||||
#endif /* IS_SDLv2 */
|
|
||||||
|
|
||||||
#define import_pygame_base() IMPORT_PYGAME_MODULE(base, BASE)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* RECT */
|
|
||||||
#define PYGAMEAPI_RECT_FIRSTSLOT \
|
|
||||||
(PYGAMEAPI_BASE_FIRSTSLOT + PYGAMEAPI_BASE_NUMSLOTS)
|
|
||||||
#define PYGAMEAPI_RECT_NUMSLOTS 4
|
|
||||||
|
|
||||||
#if IS_SDLv1
|
|
||||||
typedef struct {
|
|
||||||
int x, y;
|
|
||||||
int w, h;
|
|
||||||
} GAME_Rect;
|
|
||||||
#else
|
|
||||||
typedef SDL_Rect GAME_Rect;
|
|
||||||
#endif
|
|
||||||
|
|
||||||
typedef struct {
|
|
||||||
PyObject_HEAD GAME_Rect r;
|
|
||||||
PyObject *weakreflist;
|
|
||||||
} pgRectObject;
|
|
||||||
|
|
||||||
#define pgRect_AsRect(x) (((pgRectObject *)x)->r)
|
|
||||||
#ifndef PYGAMEAPI_RECT_INTERNAL
|
|
||||||
#define pgRect_Check(x) \
|
|
||||||
((x)->ob_type == \
|
|
||||||
(PyTypeObject *)PyGAME_C_API[PYGAMEAPI_RECT_FIRSTSLOT + 0])
|
|
||||||
#define pgRect_Type \
|
|
||||||
(*(PyTypeObject *)PyGAME_C_API[PYGAMEAPI_RECT_FIRSTSLOT + 0])
|
|
||||||
#define pgRect_New \
|
|
||||||
(*(PyObject * (*)(SDL_Rect *)) PyGAME_C_API[PYGAMEAPI_RECT_FIRSTSLOT + 1])
|
|
||||||
#define pgRect_New4 \
|
|
||||||
(*(PyObject * (*)(int, int, int, int)) \
|
|
||||||
PyGAME_C_API[PYGAMEAPI_RECT_FIRSTSLOT + 2])
|
|
||||||
#define pgRect_FromObject \
|
|
||||||
(*(GAME_Rect * (*)(PyObject *, GAME_Rect *)) \
|
|
||||||
PyGAME_C_API[PYGAMEAPI_RECT_FIRSTSLOT + 3])
|
|
||||||
|
|
||||||
#define import_pygame_rect() IMPORT_PYGAME_MODULE(rect, RECT)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* CDROM */
|
|
||||||
#define PYGAMEAPI_CDROM_FIRSTSLOT \
|
|
||||||
(PYGAMEAPI_RECT_FIRSTSLOT + PYGAMEAPI_RECT_NUMSLOTS)
|
|
||||||
#define PYGAMEAPI_CDROM_NUMSLOTS 2
|
|
||||||
|
|
||||||
typedef struct {
|
|
||||||
PyObject_HEAD int id;
|
|
||||||
} pgCDObject;
|
|
||||||
|
|
||||||
#define pgCD_AsID(x) (((pgCDObject *)x)->id)
|
|
||||||
#ifndef PYGAMEAPI_CDROM_INTERNAL
|
|
||||||
#define pgCD_Check(x) \
|
|
||||||
((x)->ob_type == \
|
|
||||||
(PyTypeObject *)PyGAME_C_API[PYGAMEAPI_CDROM_FIRSTSLOT + 0])
|
|
||||||
#define pgCD_Type \
|
|
||||||
(*(PyTypeObject *)PyGAME_C_API[PYGAMEAPI_CDROM_FIRSTSLOT + 0])
|
|
||||||
#define pgCD_New \
|
|
||||||
(*(PyObject * (*)(int)) PyGAME_C_API[PYGAMEAPI_CDROM_FIRSTSLOT + 1])
|
|
||||||
|
|
||||||
#define import_pygame_cd() IMPORT_PYGAME_MODULE(cdrom, CDROM)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* JOYSTICK */
|
|
||||||
#define PYGAMEAPI_JOYSTICK_FIRSTSLOT \
|
|
||||||
(PYGAMEAPI_CDROM_FIRSTSLOT + PYGAMEAPI_CDROM_NUMSLOTS)
|
|
||||||
#define PYGAMEAPI_JOYSTICK_NUMSLOTS 2
|
|
||||||
|
|
||||||
typedef struct {
|
|
||||||
PyObject_HEAD int id;
|
|
||||||
} pgJoystickObject;
|
|
||||||
|
|
||||||
#define pgJoystick_AsID(x) (((pgJoystickObject *)x)->id)
|
|
||||||
|
|
||||||
#ifndef PYGAMEAPI_JOYSTICK_INTERNAL
|
|
||||||
#define pgJoystick_Check(x) \
|
|
||||||
((x)->ob_type == \
|
|
||||||
(PyTypeObject *)PyGAME_C_API[PYGAMEAPI_JOYSTICK_FIRSTSLOT + 0])
|
|
||||||
|
|
||||||
#define pgJoystick_Type \
|
|
||||||
(*(PyTypeObject *)PyGAME_C_API[PYGAMEAPI_JOYSTICK_FIRSTSLOT + 0])
|
|
||||||
#define pgJoystick_New \
|
|
||||||
(*(PyObject * (*)(int)) PyGAME_C_API[PYGAMEAPI_JOYSTICK_FIRSTSLOT + 1])
|
|
||||||
|
|
||||||
#define import_pygame_joystick() IMPORT_PYGAME_MODULE(joystick, JOYSTICK)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* DISPLAY */
|
|
||||||
#define PYGAMEAPI_DISPLAY_FIRSTSLOT \
|
|
||||||
(PYGAMEAPI_JOYSTICK_FIRSTSLOT + PYGAMEAPI_JOYSTICK_NUMSLOTS)
|
|
||||||
#define PYGAMEAPI_DISPLAY_NUMSLOTS 2
|
|
||||||
|
|
||||||
typedef struct {
|
|
||||||
#if IS_SDLv1
|
|
||||||
PyObject_HEAD SDL_VideoInfo info;
|
|
||||||
#else
|
|
||||||
PyObject_HEAD pg_VideoInfo info;
|
|
||||||
#endif
|
|
||||||
} pgVidInfoObject;
|
|
||||||
|
|
||||||
#define pgVidInfo_AsVidInfo(x) (((pgVidInfoObject *)x)->info)
|
|
||||||
#ifndef PYGAMEAPI_DISPLAY_INTERNAL
|
|
||||||
#define pgVidInfo_Check(x) \
|
|
||||||
((x)->ob_type == \
|
|
||||||
(PyTypeObject *)PyGAME_C_API[PYGAMEAPI_DISPLAY_FIRSTSLOT + 0])
|
|
||||||
|
|
||||||
#define pgVidInfo_Type \
|
|
||||||
(*(PyTypeObject *)PyGAME_C_API[PYGAMEAPI_DISPLAY_FIRSTSLOT + 0])
|
|
||||||
|
|
||||||
#if IS_SDLv1
|
|
||||||
#define pgVidInfo_New \
|
|
||||||
(*(PyObject * (*)(SDL_VideoInfo *)) \
|
|
||||||
PyGAME_C_API[PYGAMEAPI_DISPLAY_FIRSTSLOT + 1])
|
|
||||||
#else
|
|
||||||
#define pgVidInfo_New \
|
|
||||||
(*(PyObject * (*)(pg_VideoInfo *)) \
|
|
||||||
PyGAME_C_API[PYGAMEAPI_DISPLAY_FIRSTSLOT + 1])
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#define import_pygame_display() IMPORT_PYGAME_MODULE(display, DISPLAY)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* SURFACE */
|
|
||||||
#define PYGAMEAPI_SURFACE_FIRSTSLOT \
|
|
||||||
(PYGAMEAPI_DISPLAY_FIRSTSLOT + PYGAMEAPI_DISPLAY_NUMSLOTS)
|
|
||||||
#define PYGAMEAPI_SURFACE_NUMSLOTS 3
|
|
||||||
typedef struct {
|
|
||||||
PyObject_HEAD SDL_Surface *surf;
|
|
||||||
#if IS_SDLv2
|
|
||||||
int owner;
|
|
||||||
#endif /* IS_SDLv2 */
|
|
||||||
struct pgSubSurface_Data *subsurface; /*ptr to subsurface data (if a
|
|
||||||
* subsurface)*/
|
|
||||||
PyObject *weakreflist;
|
|
||||||
PyObject *locklist;
|
|
||||||
PyObject *dependency;
|
|
||||||
} pgSurfaceObject;
|
|
||||||
#define pgSurface_AsSurface(x) (((pgSurfaceObject *)x)->surf)
|
|
||||||
#ifndef PYGAMEAPI_SURFACE_INTERNAL
|
|
||||||
#define pgSurface_Check(x) \
|
|
||||||
(PyObject_IsInstance((x), \
|
|
||||||
(PyObject *)PyGAME_C_API[PYGAMEAPI_SURFACE_FIRSTSLOT + 0]))
|
|
||||||
#define pgSurface_Type \
|
|
||||||
(*(PyTypeObject *)PyGAME_C_API[PYGAMEAPI_SURFACE_FIRSTSLOT + 0])
|
|
||||||
#if IS_SDLv1
|
|
||||||
#define pgSurface_New \
|
|
||||||
(*(PyObject * (*)(SDL_Surface *)) \
|
|
||||||
PyGAME_C_API[PYGAMEAPI_SURFACE_FIRSTSLOT + 1])
|
|
||||||
#else /* IS_SDLv2 */
|
|
||||||
#define pgSurface_New2 \
|
|
||||||
(*(PyObject * (*)(SDL_Surface *, int)) \
|
|
||||||
PyGAME_C_API[PYGAMEAPI_SURFACE_FIRSTSLOT + 1])
|
|
||||||
#endif /* IS_SDLv2 */
|
|
||||||
#define pgSurface_Blit \
|
|
||||||
(*(int (*)(PyObject *, PyObject *, SDL_Rect *, SDL_Rect *, \
|
|
||||||
int))PyGAME_C_API[PYGAMEAPI_SURFACE_FIRSTSLOT + 2])
|
|
||||||
|
|
||||||
#define import_pygame_surface() \
|
|
||||||
do { \
|
|
||||||
IMPORT_PYGAME_MODULE(surface, SURFACE); \
|
|
||||||
if (PyErr_Occurred() != NULL) \
|
|
||||||
break; \
|
|
||||||
IMPORT_PYGAME_MODULE(surflock, SURFLOCK); \
|
|
||||||
} while (0)
|
|
||||||
|
|
||||||
#if IS_SDLv2
|
|
||||||
#define pgSurface_New(surface) pgSurface_New2((surface), 1)
|
|
||||||
#define pgSurface_NewNoOwn(surface) pgSurface_New2((surface), 0)
|
|
||||||
#endif /* IS_SDLv2 */
|
|
||||||
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* SURFLOCK */ /*auto import/init by surface*/
|
|
||||||
#define PYGAMEAPI_SURFLOCK_FIRSTSLOT \
|
|
||||||
(PYGAMEAPI_SURFACE_FIRSTSLOT + PYGAMEAPI_SURFACE_NUMSLOTS)
|
|
||||||
#define PYGAMEAPI_SURFLOCK_NUMSLOTS 8
|
|
||||||
struct pgSubSurface_Data {
|
|
||||||
PyObject *owner;
|
|
||||||
int pixeloffset;
|
|
||||||
int offsetx, offsety;
|
|
||||||
};
|
|
||||||
|
|
||||||
typedef struct {
|
|
||||||
PyObject_HEAD PyObject *surface;
|
|
||||||
PyObject *lockobj;
|
|
||||||
PyObject *weakrefs;
|
|
||||||
} pgLifetimeLockObject;
|
|
||||||
|
|
||||||
#ifndef PYGAMEAPI_SURFLOCK_INTERNAL
|
|
||||||
#define pgLifetimeLock_Check(x) \
|
|
||||||
((x)->ob_type == \
|
|
||||||
(PyTypeObject *)PyGAME_C_API[PYGAMEAPI_SURFLOCK_FIRSTSLOT + 0])
|
|
||||||
#define pgSurface_Prep(x) \
|
|
||||||
if (((pgSurfaceObject *)x)->subsurface) \
|
|
||||||
(*(*(void (*)( \
|
|
||||||
PyObject *))PyGAME_C_API[PYGAMEAPI_SURFLOCK_FIRSTSLOT + 1]))(x)
|
|
||||||
|
|
||||||
#define pgSurface_Unprep(x) \
|
|
||||||
if (((pgSurfaceObject *)x)->subsurface) \
|
|
||||||
(*(*(void (*)( \
|
|
||||||
PyObject *))PyGAME_C_API[PYGAMEAPI_SURFLOCK_FIRSTSLOT + 2]))(x)
|
|
||||||
|
|
||||||
#define pgSurface_Lock \
|
|
||||||
(*(int (*)(PyObject *))PyGAME_C_API[PYGAMEAPI_SURFLOCK_FIRSTSLOT + 3])
|
|
||||||
#define pgSurface_Unlock \
|
|
||||||
(*(int (*)(PyObject *))PyGAME_C_API[PYGAMEAPI_SURFLOCK_FIRSTSLOT + 4])
|
|
||||||
#define pgSurface_LockBy \
|
|
||||||
(*(int (*)(PyObject *, \
|
|
||||||
PyObject *))PyGAME_C_API[PYGAMEAPI_SURFLOCK_FIRSTSLOT + 5])
|
|
||||||
#define pgSurface_UnlockBy \
|
|
||||||
(*(int (*)(PyObject *, \
|
|
||||||
PyObject *))PyGAME_C_API[PYGAMEAPI_SURFLOCK_FIRSTSLOT + 6])
|
|
||||||
#define pgSurface_LockLifetime \
|
|
||||||
(*(PyObject * (*)(PyObject *, PyObject *)) \
|
|
||||||
PyGAME_C_API[PYGAMEAPI_SURFLOCK_FIRSTSLOT + 7])
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* EVENT */
|
|
||||||
#define PYGAMEAPI_EVENT_FIRSTSLOT \
|
|
||||||
(PYGAMEAPI_SURFLOCK_FIRSTSLOT + PYGAMEAPI_SURFLOCK_NUMSLOTS)
|
|
||||||
#if IS_SDLv1
|
|
||||||
#define PYGAMEAPI_EVENT_NUMSLOTS 4
|
|
||||||
#else /* IS_SDLv2 */
|
|
||||||
#define PYGAMEAPI_EVENT_NUMSLOTS 6
|
|
||||||
#endif /* IS_SDLv2 */
|
|
||||||
|
|
||||||
typedef struct {
|
|
||||||
PyObject_HEAD int type;
|
|
||||||
PyObject *dict;
|
|
||||||
} pgEventObject;
|
|
||||||
|
|
||||||
#ifndef PYGAMEAPI_EVENT_INTERNAL
|
|
||||||
#define pgEvent_Check(x) \
|
|
||||||
((x)->ob_type == \
|
|
||||||
(PyTypeObject *)PyGAME_C_API[PYGAMEAPI_EVENT_FIRSTSLOT + 0])
|
|
||||||
#define pgEvent_Type \
|
|
||||||
(*(PyTypeObject *)PyGAME_C_API[PYGAMEAPI_EVENT_FIRSTSLOT + 0])
|
|
||||||
#define pgEvent_New \
|
|
||||||
(*(PyObject * (*)(SDL_Event *)) \
|
|
||||||
PyGAME_C_API[PYGAMEAPI_EVENT_FIRSTSLOT + 1])
|
|
||||||
#define pgEvent_New2 \
|
|
||||||
(*(PyObject * (*)(int, PyObject *)) \
|
|
||||||
PyGAME_C_API[PYGAMEAPI_EVENT_FIRSTSLOT + 2])
|
|
||||||
#define pgEvent_FillUserEvent \
|
|
||||||
(*(int (*)(pgEventObject *, \
|
|
||||||
SDL_Event *))PyGAME_C_API[PYGAMEAPI_EVENT_FIRSTSLOT + 3])
|
|
||||||
#if IS_SDLv2
|
|
||||||
#define pg_EnableKeyRepeat \
|
|
||||||
(*(int (*)(int, int))PyGAME_C_API[PYGAMEAPI_EVENT_FIRSTSLOT + 4])
|
|
||||||
#define pg_GetKeyRepeat \
|
|
||||||
(*(void (*)(int *, int *))PyGAME_C_API[PYGAMEAPI_EVENT_FIRSTSLOT + 5])
|
|
||||||
#endif /* IS_SDLv2 */
|
|
||||||
#define import_pygame_event() IMPORT_PYGAME_MODULE(event, EVENT)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* RWOBJECT */
|
|
||||||
/*the rwobject are only needed for C side work, not accessable from python*/
|
|
||||||
#define PYGAMEAPI_RWOBJECT_FIRSTSLOT \
|
|
||||||
(PYGAMEAPI_EVENT_FIRSTSLOT + PYGAMEAPI_EVENT_NUMSLOTS)
|
|
||||||
#define PYGAMEAPI_RWOBJECT_NUMSLOTS 6
|
|
||||||
#ifndef PYGAMEAPI_RWOBJECT_INTERNAL
|
|
||||||
#define pgRWops_FromObject \
|
|
||||||
(*(SDL_RWops * (*)(PyObject *)) \
|
|
||||||
PyGAME_C_API[PYGAMEAPI_RWOBJECT_FIRSTSLOT + 0])
|
|
||||||
#define pgRWops_IsFileObject \
|
|
||||||
(*(int (*)(SDL_RWops *))PyGAME_C_API[PYGAMEAPI_RWOBJECT_FIRSTSLOT + 1])
|
|
||||||
#define pg_EncodeFilePath \
|
|
||||||
(*(PyObject * (*)(PyObject *, PyObject *)) \
|
|
||||||
PyGAME_C_API[PYGAMEAPI_RWOBJECT_FIRSTSLOT + 2])
|
|
||||||
#define pg_EncodeString \
|
|
||||||
(*(PyObject * (*)(PyObject *, const char *, const char *, PyObject *)) \
|
|
||||||
PyGAME_C_API[PYGAMEAPI_RWOBJECT_FIRSTSLOT + 3])
|
|
||||||
#define pgRWops_FromFileObject \
|
|
||||||
(*(SDL_RWops * (*)(PyObject *)) \
|
|
||||||
PyGAME_C_API[PYGAMEAPI_RWOBJECT_FIRSTSLOT + 4])
|
|
||||||
#define pgRWops_ReleaseObject \
|
|
||||||
(*(int (*)(SDL_RWops *)) \
|
|
||||||
PyGAME_C_API[PYGAMEAPI_RWOBJECT_FIRSTSLOT + 5])
|
|
||||||
#define import_pygame_rwobject() IMPORT_PYGAME_MODULE(rwobject, RWOBJECT)
|
|
||||||
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* PixelArray */
|
|
||||||
#define PYGAMEAPI_PIXELARRAY_FIRSTSLOT \
|
|
||||||
(PYGAMEAPI_RWOBJECT_FIRSTSLOT + PYGAMEAPI_RWOBJECT_NUMSLOTS)
|
|
||||||
#define PYGAMEAPI_PIXELARRAY_NUMSLOTS 2
|
|
||||||
#ifndef PYGAMEAPI_PIXELARRAY_INTERNAL
|
|
||||||
#define PyPixelArray_Check(x) \
|
|
||||||
((x)->ob_type == \
|
|
||||||
(PyTypeObject *)PyGAME_C_API[PYGAMEAPI_PIXELARRAY_FIRSTSLOT + 0])
|
|
||||||
#define PyPixelArray_New \
|
|
||||||
(*(PyObject * (*)) PyGAME_C_API[PYGAMEAPI_PIXELARRAY_FIRSTSLOT + 1])
|
|
||||||
#define import_pygame_pixelarray() IMPORT_PYGAME_MODULE(pixelarray, PIXELARRAY)
|
|
||||||
#endif /* PYGAMEAPI_PIXELARRAY_INTERNAL */
|
|
||||||
|
|
||||||
/* Color */
|
|
||||||
#define PYGAMEAPI_COLOR_FIRSTSLOT \
|
|
||||||
(PYGAMEAPI_PIXELARRAY_FIRSTSLOT + PYGAMEAPI_PIXELARRAY_NUMSLOTS)
|
|
||||||
#define PYGAMEAPI_COLOR_NUMSLOTS 4
|
|
||||||
#ifndef PYGAMEAPI_COLOR_INTERNAL
|
|
||||||
#define pgColor_Check(x) \
|
|
||||||
((x)->ob_type == \
|
|
||||||
(PyTypeObject *)PyGAME_C_API[PYGAMEAPI_COLOR_FIRSTSLOT + 0])
|
|
||||||
#define pgColor_Type (*(PyObject *)PyGAME_C_API[PYGAMEAPI_COLOR_FIRSTSLOT])
|
|
||||||
#define pgColor_New \
|
|
||||||
(*(PyObject * (*)(Uint8 *)) PyGAME_C_API[PYGAMEAPI_COLOR_FIRSTSLOT + 1])
|
|
||||||
#define pgColor_NewLength \
|
|
||||||
(*(PyObject * (*)(Uint8 *, Uint8)) \
|
|
||||||
PyGAME_C_API[PYGAMEAPI_COLOR_FIRSTSLOT + 3])
|
|
||||||
|
|
||||||
#define pg_RGBAFromColorObj \
|
|
||||||
(*(int (*)(PyObject *, \
|
|
||||||
Uint8 *))PyGAME_C_API[PYGAMEAPI_COLOR_FIRSTSLOT + 2])
|
|
||||||
#define import_pygame_color() IMPORT_PYGAME_MODULE(color, COLOR)
|
|
||||||
#endif /* PYGAMEAPI_COLOR_INTERNAL */
|
|
||||||
|
|
||||||
/* Math */
|
|
||||||
#define PYGAMEAPI_MATH_FIRSTSLOT \
|
|
||||||
(PYGAMEAPI_COLOR_FIRSTSLOT + PYGAMEAPI_COLOR_NUMSLOTS)
|
|
||||||
#define PYGAMEAPI_MATH_NUMSLOTS 2
|
|
||||||
#ifndef PYGAMEAPI_MATH_INTERNAL
|
|
||||||
#define pgVector2_Check(x) \
|
|
||||||
((x)->ob_type == \
|
|
||||||
(PyTypeObject *)PyGAME_C_API[PYGAMEAPI_MATH_FIRSTSLOT + 0])
|
|
||||||
#define pgVector3_Check(x) \
|
|
||||||
((x)->ob_type == \
|
|
||||||
(PyTypeObject *)PyGAME_C_API[PYGAMEAPI_MATH_FIRSTSLOT + 1])
|
|
||||||
/*
|
|
||||||
#define pgVector2_New \
|
|
||||||
(*(PyObject*(*)) PyGAME_C_API[PYGAMEAPI_MATH_FIRSTSLOT + 1])
|
|
||||||
*/
|
|
||||||
#define import_pygame_math() IMPORT_PYGAME_MODULE(math, MATH)
|
|
||||||
#endif /* PYGAMEAPI_MATH_INTERNAL */
|
|
||||||
|
|
||||||
#define PG_CAPSULE_NAME(m) (IMPPREFIX m "." PYGAMEAPI_LOCAL_ENTRY)
|
|
||||||
|
|
||||||
#define _IMPORT_PYGAME_MODULE(module, MODULE, api_root) \
|
|
||||||
{ \
|
|
||||||
PyObject *_module = PyImport_ImportModule(IMPPREFIX #module); \
|
|
||||||
\
|
|
||||||
if (_module != NULL) { \
|
|
||||||
PyObject *_c_api = \
|
|
||||||
PyObject_GetAttrString(_module, PYGAMEAPI_LOCAL_ENTRY); \
|
|
||||||
\
|
|
||||||
Py_DECREF(_module); \
|
|
||||||
if (_c_api != NULL && PyCapsule_CheckExact(_c_api)) { \
|
|
||||||
void **localptr = (void **)PyCapsule_GetPointer( \
|
|
||||||
_c_api, PG_CAPSULE_NAME(#module)); \
|
|
||||||
\
|
|
||||||
if (localptr != NULL) { \
|
|
||||||
memcpy(api_root + PYGAMEAPI_##MODULE##_FIRSTSLOT, \
|
|
||||||
localptr, \
|
|
||||||
sizeof(void **) * PYGAMEAPI_##MODULE##_NUMSLOTS); \
|
|
||||||
} \
|
|
||||||
} \
|
|
||||||
Py_XDECREF(_c_api); \
|
|
||||||
} \
|
|
||||||
}
|
|
||||||
|
|
||||||
#ifndef NO_PYGAME_C_API
|
|
||||||
#define IMPORT_PYGAME_MODULE(module, MODULE) \
|
|
||||||
_IMPORT_PYGAME_MODULE(module, MODULE, PyGAME_C_API)
|
|
||||||
#define PYGAMEAPI_TOTALSLOTS \
|
|
||||||
(PYGAMEAPI_MATH_FIRSTSLOT + PYGAMEAPI_MATH_NUMSLOTS)
|
|
||||||
|
|
||||||
#ifdef PYGAME_H
|
|
||||||
void *PyGAME_C_API[PYGAMEAPI_TOTALSLOTS] = {NULL};
|
|
||||||
#else
|
|
||||||
extern void *PyGAME_C_API[PYGAMEAPI_TOTALSLOTS];
|
|
||||||
#endif
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#if PG_HAVE_CAPSULE
|
|
||||||
#define encapsulate_api(ptr, module) \
|
|
||||||
PyCapsule_New(ptr, PG_CAPSULE_NAME(module), NULL)
|
|
||||||
#else
|
|
||||||
#define encapsulate_api(ptr, module) PyCObject_FromVoidPtr(ptr, NULL)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifndef PG_INLINE
|
|
||||||
#if defined(__clang__)
|
|
||||||
#define PG_INLINE __inline__ __attribute__((__unused__))
|
|
||||||
#elif defined(__GNUC__)
|
|
||||||
#define PG_INLINE __inline__
|
|
||||||
#elif defined(_MSC_VER)
|
|
||||||
#define PG_INLINE __inline
|
|
||||||
#elif defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
|
|
||||||
#define PG_INLINE inline
|
|
||||||
#else
|
|
||||||
#define PG_INLINE
|
|
||||||
#endif
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/*last platform compiler stuff*/
|
|
||||||
#if defined(macintosh) && defined(__MWERKS__) || defined(__SYMBIAN32__)
|
|
||||||
#define PYGAME_EXPORT __declspec(export)
|
|
||||||
#else
|
|
||||||
#define PYGAME_EXPORT
|
|
||||||
#endif
|
|
||||||
|
|
||||||
|
|
||||||
#endif /* PYGAME_H */
|
|
@ -1,31 +0,0 @@
|
|||||||
/*
|
|
||||||
pygame - Python Game Library
|
|
||||||
Copyright (C) 2000-2001 Pete Shinners
|
|
||||||
Copyright (C) 2007 Marcus von Appen
|
|
||||||
|
|
||||||
This library is free software; you can redistribute it and/or
|
|
||||||
modify it under the terms of the GNU Library General Public
|
|
||||||
License as published by the Free Software Foundation; either
|
|
||||||
version 2 of the License, or (at your option) any later version.
|
|
||||||
|
|
||||||
This library is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
Library General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Library General Public
|
|
||||||
License along with this library; if not, write to the Free
|
|
||||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
||||||
|
|
||||||
Pete Shinners
|
|
||||||
pete@shinners.org
|
|
||||||
*/
|
|
||||||
|
|
||||||
#ifndef _SURFACE_H
|
|
||||||
#define _SURFACE_H
|
|
||||||
|
|
||||||
#include "_pygame.h"
|
|
||||||
#include "surface.h"
|
|
||||||
|
|
||||||
#endif
|
|
||||||
|
|
@ -1,146 +0,0 @@
|
|||||||
/*
|
|
||||||
Bitmask 1.7 - A pixel-perfect collision detection library.
|
|
||||||
|
|
||||||
Copyright (C) 2002-2005 Ulf Ekstrom except for the bitcount
|
|
||||||
function which is copyright (C) Donald W. Gillies, 1992.
|
|
||||||
|
|
||||||
This library is free software; you can redistribute it and/or
|
|
||||||
modify it under the terms of the GNU Library General Public
|
|
||||||
License as published by the Free Software Foundation; either
|
|
||||||
version 2 of the License, or (at your option) any later version.
|
|
||||||
|
|
||||||
This library is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
Library General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Library General Public
|
|
||||||
License along with this library; if not, write to the Free
|
|
||||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
||||||
*/
|
|
||||||
#ifndef BITMASK_H
|
|
||||||
#define BITMASK_H
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
|
||||||
extern "C" {
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#include <limits.h>
|
|
||||||
/* Define INLINE for different compilers. If your compiler does not
|
|
||||||
support inlining then there might be a performance hit in
|
|
||||||
bitmask_overlap_area().
|
|
||||||
*/
|
|
||||||
#ifndef INLINE
|
|
||||||
# ifdef __GNUC__
|
|
||||||
# define INLINE inline
|
|
||||||
# else
|
|
||||||
# ifdef _MSC_VER
|
|
||||||
# define INLINE __inline
|
|
||||||
# else
|
|
||||||
# define INLINE
|
|
||||||
# endif
|
|
||||||
# endif
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#define BITMASK_W unsigned long int
|
|
||||||
#define BITMASK_W_LEN (sizeof(BITMASK_W)*CHAR_BIT)
|
|
||||||
#define BITMASK_W_MASK (BITMASK_W_LEN - 1)
|
|
||||||
#define BITMASK_N(n) ((BITMASK_W)1 << (n))
|
|
||||||
|
|
||||||
typedef struct bitmask
|
|
||||||
{
|
|
||||||
int w,h;
|
|
||||||
BITMASK_W bits[1];
|
|
||||||
} bitmask_t;
|
|
||||||
|
|
||||||
/* Creates a bitmask of width w and height h, where
|
|
||||||
w and h must both be greater than or equal to 0.
|
|
||||||
The mask is automatically cleared when created.
|
|
||||||
*/
|
|
||||||
bitmask_t *bitmask_create(int w, int h);
|
|
||||||
|
|
||||||
/* Frees all the memory allocated by bitmask_create for m. */
|
|
||||||
void bitmask_free(bitmask_t *m);
|
|
||||||
|
|
||||||
/* Clears all bits in the mask */
|
|
||||||
void bitmask_clear(bitmask_t *m);
|
|
||||||
|
|
||||||
/* Sets all bits in the mask */
|
|
||||||
void bitmask_fill(bitmask_t *m);
|
|
||||||
|
|
||||||
/* Flips all bits in the mask */
|
|
||||||
void bitmask_invert(bitmask_t *m);
|
|
||||||
|
|
||||||
/* Counts the bits in the mask */
|
|
||||||
unsigned int bitmask_count(bitmask_t *m);
|
|
||||||
|
|
||||||
/* Returns nonzero if the bit at (x,y) is set. Coordinates start at
|
|
||||||
(0,0) */
|
|
||||||
static INLINE int bitmask_getbit(const bitmask_t *m, int x, int y)
|
|
||||||
{
|
|
||||||
return (m->bits[x/BITMASK_W_LEN*m->h + y] & BITMASK_N(x & BITMASK_W_MASK)) != 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Sets the bit at (x,y) */
|
|
||||||
static INLINE void bitmask_setbit(bitmask_t *m, int x, int y)
|
|
||||||
{
|
|
||||||
m->bits[x/BITMASK_W_LEN*m->h + y] |= BITMASK_N(x & BITMASK_W_MASK);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Clears the bit at (x,y) */
|
|
||||||
static INLINE void bitmask_clearbit(bitmask_t *m, int x, int y)
|
|
||||||
{
|
|
||||||
m->bits[x/BITMASK_W_LEN*m->h + y] &= ~BITMASK_N(x & BITMASK_W_MASK);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Returns nonzero if the masks overlap with the given offset.
|
|
||||||
The overlap tests uses the following offsets (which may be negative):
|
|
||||||
|
|
||||||
+----+----------..
|
|
||||||
|A | yoffset
|
|
||||||
| +-+----------..
|
|
||||||
+--|B
|
|
||||||
|xoffset
|
|
||||||
| |
|
|
||||||
: :
|
|
||||||
*/
|
|
||||||
int bitmask_overlap(const bitmask_t *a, const bitmask_t *b, int xoffset, int yoffset);
|
|
||||||
|
|
||||||
/* Like bitmask_overlap(), but will also give a point of intersection.
|
|
||||||
x and y are given in the coordinates of mask a, and are untouched
|
|
||||||
if there is no overlap. */
|
|
||||||
int bitmask_overlap_pos(const bitmask_t *a, const bitmask_t *b,
|
|
||||||
int xoffset, int yoffset, int *x, int *y);
|
|
||||||
|
|
||||||
/* Returns the number of overlapping 'pixels' */
|
|
||||||
int bitmask_overlap_area(const bitmask_t *a, const bitmask_t *b, int xoffset, int yoffset);
|
|
||||||
|
|
||||||
/* Fills a mask with the overlap of two other masks. A bitwise AND. */
|
|
||||||
void bitmask_overlap_mask (const bitmask_t *a, const bitmask_t *b, bitmask_t *c, int xoffset, int yoffset);
|
|
||||||
|
|
||||||
/* Draws mask b onto mask a (bitwise OR). Can be used to compose large
|
|
||||||
(game background?) mask from several submasks, which may speed up
|
|
||||||
the testing. */
|
|
||||||
|
|
||||||
void bitmask_draw(bitmask_t *a, const bitmask_t *b, int xoffset, int yoffset);
|
|
||||||
|
|
||||||
void bitmask_erase(bitmask_t *a, const bitmask_t *b, int xoffset, int yoffset);
|
|
||||||
|
|
||||||
/* Return a new scaled bitmask, with dimensions w*h. The quality of the
|
|
||||||
scaling may not be perfect for all circumstances, but it should
|
|
||||||
be reasonable. If either w or h is 0 a clear 1x1 mask is returned. */
|
|
||||||
bitmask_t *bitmask_scale(const bitmask_t *m, int w, int h);
|
|
||||||
|
|
||||||
/* Convolve b into a, drawing the output into o, shifted by offset. If offset
|
|
||||||
* is 0, then the (x,y) bit will be set if and only if
|
|
||||||
* bitmask_overlap(a, b, x - b->w - 1, y - b->h - 1) returns true.
|
|
||||||
*
|
|
||||||
* Modifies bits o[xoffset ... xoffset + a->w + b->w - 1)
|
|
||||||
* [yoffset ... yoffset + a->h + b->h - 1). */
|
|
||||||
void bitmask_convolve(const bitmask_t *a, const bitmask_t *b, bitmask_t *o, int xoffset, int yoffset);
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
|
||||||
} /* End of extern "C" { */
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#endif
|
|
@ -1,201 +0,0 @@
|
|||||||
/*
|
|
||||||
pygame - Python Game Library
|
|
||||||
|
|
||||||
This library is free software; you can redistribute it and/or
|
|
||||||
modify it under the terms of the GNU Library General Public
|
|
||||||
License as published by the Free Software Foundation; either
|
|
||||||
version 2 of the License, or (at your option) any later version.
|
|
||||||
|
|
||||||
This library is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
Library General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Library General Public
|
|
||||||
License along with this library; if not, write to the Free
|
|
||||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
||||||
|
|
||||||
*/
|
|
||||||
|
|
||||||
#include "pygame.h"
|
|
||||||
#include "doc/camera_doc.h"
|
|
||||||
|
|
||||||
#if defined(__unix__)
|
|
||||||
#include <structmember.h>
|
|
||||||
#include <stdio.h>
|
|
||||||
#include <stdlib.h>
|
|
||||||
#include <string.h>
|
|
||||||
#include <assert.h>
|
|
||||||
|
|
||||||
#include <fcntl.h> /* low-level i/o */
|
|
||||||
#include <unistd.h>
|
|
||||||
#include <errno.h>
|
|
||||||
#include <sys/stat.h>
|
|
||||||
#include <sys/types.h>
|
|
||||||
#include <sys/time.h>
|
|
||||||
#include <sys/mman.h>
|
|
||||||
#include <sys/ioctl.h>
|
|
||||||
|
|
||||||
/* on freebsd there is no asm/types */
|
|
||||||
#ifdef linux
|
|
||||||
#include <asm/types.h> /* for videodev2.h */
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#include <linux/videodev2.h>
|
|
||||||
#elif defined(__APPLE__)
|
|
||||||
#include <AvailabilityMacros.h>
|
|
||||||
/* We support OSX 10.6 and below. */
|
|
||||||
#if __MAC_OS_X_VERSION_MAX_ALLOWED <= 1060
|
|
||||||
#define PYGAME_MAC_CAMERA_OLD 1
|
|
||||||
#endif
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#if defined(PYGAME_MAC_CAMERA_OLD)
|
|
||||||
#include <QuickTime/QuickTime.h>
|
|
||||||
#include <QuickTime/Movies.h>
|
|
||||||
#include <QuickTime/ImageCompression.h>
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* some constants used which are not defined on non-v4l machines. */
|
|
||||||
#ifndef V4L2_PIX_FMT_RGB24
|
|
||||||
#define V4L2_PIX_FMT_RGB24 'RGB3'
|
|
||||||
#endif
|
|
||||||
#ifndef V4L2_PIX_FMT_RGB444
|
|
||||||
#define V4L2_PIX_FMT_RGB444 'R444'
|
|
||||||
#endif
|
|
||||||
#ifndef V4L2_PIX_FMT_YUYV
|
|
||||||
#define V4L2_PIX_FMT_YUYV 'YUYV'
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#define CLEAR(x) memset (&(x), 0, sizeof (x))
|
|
||||||
#define SAT(c) if (c & (~255)) { if (c < 0) c = 0; else c = 255; }
|
|
||||||
#define SAT2(c) ((c) & (~255) ? ((c) < 0 ? 0 : 255) : (c))
|
|
||||||
#define DEFAULT_WIDTH 640
|
|
||||||
#define DEFAULT_HEIGHT 480
|
|
||||||
#define RGB_OUT 1
|
|
||||||
#define YUV_OUT 2
|
|
||||||
#define HSV_OUT 4
|
|
||||||
#define CAM_V4L 1 /* deprecated. the incomplete support in pygame was removed */
|
|
||||||
#define CAM_V4L2 2
|
|
||||||
|
|
||||||
struct buffer {
|
|
||||||
void * start;
|
|
||||||
size_t length;
|
|
||||||
};
|
|
||||||
|
|
||||||
#if defined(__unix__)
|
|
||||||
typedef struct pgCameraObject {
|
|
||||||
PyObject_HEAD
|
|
||||||
char* device_name;
|
|
||||||
int camera_type;
|
|
||||||
unsigned long pixelformat;
|
|
||||||
unsigned int color_out;
|
|
||||||
struct buffer* buffers;
|
|
||||||
unsigned int n_buffers;
|
|
||||||
int width;
|
|
||||||
int height;
|
|
||||||
int size;
|
|
||||||
int hflip;
|
|
||||||
int vflip;
|
|
||||||
int brightness;
|
|
||||||
int fd;
|
|
||||||
} pgCameraObject;
|
|
||||||
#elif defined(PYGAME_MAC_CAMERA_OLD)
|
|
||||||
typedef struct pgCameraObject {
|
|
||||||
PyObject_HEAD
|
|
||||||
char* device_name; /* unieke name of the device */
|
|
||||||
OSType pixelformat;
|
|
||||||
unsigned int color_out;
|
|
||||||
SeqGrabComponent component; /* A type used by the Sequence Grabber API */
|
|
||||||
SGChannel channel; /* Channel of the Sequence Grabber */
|
|
||||||
GWorldPtr gworld; /* Pointer to the struct that holds the data of the captured image */
|
|
||||||
Rect boundsRect; /* bounds of the image frame */
|
|
||||||
long size; /* size of the image in our buffer to draw */
|
|
||||||
int hflip;
|
|
||||||
int vflip;
|
|
||||||
short depth;
|
|
||||||
struct buffer pixels;
|
|
||||||
//struct buffer tmp_pixels /* place where the flipped image in temporarly stored if hflip or vflip is true.*/
|
|
||||||
} pgCameraObject;
|
|
||||||
|
|
||||||
#else
|
|
||||||
/* generic definition.
|
|
||||||
*/
|
|
||||||
|
|
||||||
typedef struct pgCameraObject {
|
|
||||||
PyObject_HEAD
|
|
||||||
char* device_name;
|
|
||||||
int camera_type;
|
|
||||||
unsigned long pixelformat;
|
|
||||||
unsigned int color_out;
|
|
||||||
struct buffer* buffers;
|
|
||||||
unsigned int n_buffers;
|
|
||||||
int width;
|
|
||||||
int height;
|
|
||||||
int size;
|
|
||||||
int hflip;
|
|
||||||
int vflip;
|
|
||||||
int brightness;
|
|
||||||
int fd;
|
|
||||||
} pgCameraObject;
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* internal functions for colorspace conversion */
|
|
||||||
void colorspace (SDL_Surface *src, SDL_Surface *dst, int cspace);
|
|
||||||
void rgb24_to_rgb (const void* src, void* dst, int length, SDL_PixelFormat* format);
|
|
||||||
void rgb444_to_rgb (const void* src, void* dst, int length, SDL_PixelFormat* format);
|
|
||||||
void rgb_to_yuv (const void* src, void* dst, int length,
|
|
||||||
unsigned long source, SDL_PixelFormat* format);
|
|
||||||
void rgb_to_hsv (const void* src, void* dst, int length,
|
|
||||||
unsigned long source, SDL_PixelFormat* format);
|
|
||||||
void yuyv_to_rgb (const void* src, void* dst, int length, SDL_PixelFormat* format);
|
|
||||||
void yuyv_to_yuv (const void* src, void* dst, int length, SDL_PixelFormat* format);
|
|
||||||
void uyvy_to_rgb (const void* src, void* dst, int length, SDL_PixelFormat* format);
|
|
||||||
void uyvy_to_yuv (const void* src, void* dst, int length, SDL_PixelFormat* format);
|
|
||||||
void sbggr8_to_rgb (const void* src, void* dst, int width, int height,
|
|
||||||
SDL_PixelFormat* format);
|
|
||||||
void yuv420_to_rgb (const void* src, void* dst, int width, int height,
|
|
||||||
SDL_PixelFormat* format);
|
|
||||||
void yuv420_to_yuv (const void* src, void* dst, int width, int height,
|
|
||||||
SDL_PixelFormat* format);
|
|
||||||
|
|
||||||
#if defined(__unix__)
|
|
||||||
/* internal functions specific to v4l2 */
|
|
||||||
char** v4l2_list_cameras (int* num_devices);
|
|
||||||
int v4l2_get_control (int fd, int id, int *value);
|
|
||||||
int v4l2_set_control (int fd, int id, int value);
|
|
||||||
PyObject* v4l2_read_raw (pgCameraObject* self);
|
|
||||||
int v4l2_xioctl (int fd, int request, void *arg);
|
|
||||||
int v4l2_process_image (pgCameraObject* self, const void *image,
|
|
||||||
unsigned int buffer_size, SDL_Surface* surf);
|
|
||||||
int v4l2_query_buffer (pgCameraObject* self);
|
|
||||||
int v4l2_read_frame (pgCameraObject* self, SDL_Surface* surf);
|
|
||||||
int v4l2_stop_capturing (pgCameraObject* self);
|
|
||||||
int v4l2_start_capturing (pgCameraObject* self);
|
|
||||||
int v4l2_uninit_device (pgCameraObject* self);
|
|
||||||
int v4l2_init_mmap (pgCameraObject* self);
|
|
||||||
int v4l2_init_device (pgCameraObject* self);
|
|
||||||
int v4l2_close_device (pgCameraObject* self);
|
|
||||||
int v4l2_open_device (pgCameraObject* self);
|
|
||||||
|
|
||||||
#elif defined(PYGAME_MAC_CAMERA_OLD)
|
|
||||||
/* internal functions specific to mac */
|
|
||||||
char** mac_list_cameras(int* num_devices);
|
|
||||||
int mac_open_device (pgCameraObject* self);
|
|
||||||
int mac_init_device(pgCameraObject* self);
|
|
||||||
int mac_close_device (pgCameraObject* self);
|
|
||||||
int mac_start_capturing(pgCameraObject* self);
|
|
||||||
int mac_stop_capturing (pgCameraObject* self);
|
|
||||||
|
|
||||||
int mac_get_control(pgCameraObject* self, int id, int* value);
|
|
||||||
int mac_set_control(pgCameraObject* self, int id, int value);
|
|
||||||
|
|
||||||
PyObject* mac_read_raw(pgCameraObject *self);
|
|
||||||
int mac_read_frame(pgCameraObject* self, SDL_Surface* surf);
|
|
||||||
int mac_camera_idle(pgCameraObject* self);
|
|
||||||
int mac_copy_gworld_to_surface(pgCameraObject* self, SDL_Surface* surf);
|
|
||||||
|
|
||||||
void flip_image(const void* image, void* flipped_image, int width, int height,
|
|
||||||
short depth, int hflip, int vflip);
|
|
||||||
|
|
||||||
#endif
|
|
@ -1,48 +0,0 @@
|
|||||||
#ifndef _FASTEVENTS_H_
|
|
||||||
#define _FASTEVENTS_H_
|
|
||||||
/*
|
|
||||||
NET2 is a threaded, event based, network IO library for SDL.
|
|
||||||
Copyright (C) 2002 Bob Pendleton
|
|
||||||
|
|
||||||
This library is free software; you can redistribute it and/or
|
|
||||||
modify it under the terms of the GNU Lesser General Public License
|
|
||||||
as published by the Free Software Foundation; either version 2.1
|
|
||||||
of the License, or (at your option) any later version.
|
|
||||||
|
|
||||||
This library is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
Lesser General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Lesser General Public
|
|
||||||
License along with this library; if not, write to the Free
|
|
||||||
Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
|
|
||||||
02111-1307 USA
|
|
||||||
|
|
||||||
If you do not wish to comply with the terms of the LGPL please
|
|
||||||
contact the author as other terms are available for a fee.
|
|
||||||
|
|
||||||
Bob Pendleton
|
|
||||||
Bob@Pendleton.com
|
|
||||||
*/
|
|
||||||
|
|
||||||
#include "SDL.h"
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
|
||||||
extern "C" {
|
|
||||||
#endif
|
|
||||||
|
|
||||||
int FE_Init(void); // Initialize FE
|
|
||||||
void FE_Quit(void); // shutdown FE
|
|
||||||
|
|
||||||
void FE_PumpEvents(void); // replacement for SDL_PumpEvents
|
|
||||||
int FE_PollEvent(SDL_Event *event); // replacement for SDL_PollEvent
|
|
||||||
int FE_WaitEvent(SDL_Event *event); // replacement for SDL_WaitEvent
|
|
||||||
int FE_PushEvent(SDL_Event *event); // replacement for SDL_PushEvent
|
|
||||||
|
|
||||||
char *FE_GetError(void); // get the last error
|
|
||||||
#ifdef __cplusplus
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#endif
|
|
@ -1,57 +0,0 @@
|
|||||||
/*
|
|
||||||
pygame - Python Game Library
|
|
||||||
Copyright (C) 2000-2001 Pete Shinners
|
|
||||||
|
|
||||||
This library is free software; you can redistribute it and/or
|
|
||||||
modify it under the terms of the GNU Library General Public
|
|
||||||
License as published by the Free Software Foundation; either
|
|
||||||
version 2 of the License, or (at your option) any later version.
|
|
||||||
|
|
||||||
This library is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
Library General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Library General Public
|
|
||||||
License along with this library; if not, write to the Free
|
|
||||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
||||||
|
|
||||||
Pete Shinners
|
|
||||||
pete@shinners.org
|
|
||||||
*/
|
|
||||||
|
|
||||||
#include <Python.h>
|
|
||||||
#if defined(HAVE_SNPRINTF) /* also defined in SDL_ttf (SDL.h) */
|
|
||||||
#undef HAVE_SNPRINTF /* remove GCC macro redefine warning */
|
|
||||||
#endif
|
|
||||||
#include <SDL_ttf.h>
|
|
||||||
|
|
||||||
|
|
||||||
/* test font initialization */
|
|
||||||
#define FONT_INIT_CHECK() \
|
|
||||||
if(!(*(int*)PyFONT_C_API[2])) \
|
|
||||||
return RAISE(pgExc_SDLError, "font system not initialized")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#define PYGAMEAPI_FONT_FIRSTSLOT 0
|
|
||||||
#define PYGAMEAPI_FONT_NUMSLOTS 3
|
|
||||||
typedef struct {
|
|
||||||
PyObject_HEAD
|
|
||||||
TTF_Font* font;
|
|
||||||
PyObject* weakreflist;
|
|
||||||
} PyFontObject;
|
|
||||||
#define PyFont_AsFont(x) (((PyFontObject*)x)->font)
|
|
||||||
|
|
||||||
#ifndef PYGAMEAPI_FONT_INTERNAL
|
|
||||||
#define PyFont_Check(x) ((x)->ob_type == (PyTypeObject*)PyFONT_C_API[0])
|
|
||||||
#define PyFont_Type (*(PyTypeObject*)PyFONT_C_API[0])
|
|
||||||
#define PyFont_New (*(PyObject*(*)(TTF_Font*))PyFONT_C_API[1])
|
|
||||||
/*slot 2 taken by FONT_INIT_CHECK*/
|
|
||||||
|
|
||||||
#define import_pygame_font() \
|
|
||||||
_IMPORT_PYGAME_MODULE(font, FONT, PyFONT_C_API)
|
|
||||||
|
|
||||||
static void* PyFONT_C_API[PYGAMEAPI_FONT_NUMSLOTS] = {NULL};
|
|
||||||
#endif
|
|
||||||
|
|
@ -1,137 +0,0 @@
|
|||||||
/*
|
|
||||||
pygame - Python Game Library
|
|
||||||
Copyright (C) 2009 Vicent Marti
|
|
||||||
|
|
||||||
This library is free software; you can redistribute it and/or
|
|
||||||
modify it under the terms of the GNU Library General Public
|
|
||||||
License as published by the Free Software Foundation; either
|
|
||||||
version 2 of the License, or (at your option) any later version.
|
|
||||||
|
|
||||||
This library is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
Library General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Library General Public
|
|
||||||
License along with this library; if not, write to the Free
|
|
||||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
||||||
|
|
||||||
*/
|
|
||||||
#ifndef _PYGAME_FREETYPE_H_
|
|
||||||
#define _PYGAME_FREETYPE_H_
|
|
||||||
|
|
||||||
#define PGFT_PYGAME1_COMPAT
|
|
||||||
#define HAVE_PYGAME_SDL_VIDEO
|
|
||||||
#define HAVE_PYGAME_SDL_RWOPS
|
|
||||||
|
|
||||||
#include "pygame.h"
|
|
||||||
#include "pgcompat.h"
|
|
||||||
|
|
||||||
#if PY3
|
|
||||||
# define IS_PYTHON_3
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#include <ft2build.h>
|
|
||||||
#include FT_FREETYPE_H
|
|
||||||
#include FT_CACHE_H
|
|
||||||
#include FT_XFREE86_H
|
|
||||||
#include FT_TRIGONOMETRY_H
|
|
||||||
|
|
||||||
/**********************************************************
|
|
||||||
* Global module constants
|
|
||||||
**********************************************************/
|
|
||||||
|
|
||||||
/* Render styles */
|
|
||||||
#define FT_STYLE_NORMAL 0x00
|
|
||||||
#define FT_STYLE_STRONG 0x01
|
|
||||||
#define FT_STYLE_OBLIQUE 0x02
|
|
||||||
#define FT_STYLE_UNDERLINE 0x04
|
|
||||||
#define FT_STYLE_WIDE 0x08
|
|
||||||
#define FT_STYLE_DEFAULT 0xFF
|
|
||||||
|
|
||||||
/* Bounding box modes */
|
|
||||||
#define FT_BBOX_EXACT FT_GLYPH_BBOX_SUBPIXELS
|
|
||||||
#define FT_BBOX_EXACT_GRIDFIT FT_GLYPH_BBOX_GRIDFIT
|
|
||||||
#define FT_BBOX_PIXEL FT_GLYPH_BBOX_TRUNCATE
|
|
||||||
#define FT_BBOX_PIXEL_GRIDFIT FT_GLYPH_BBOX_PIXELS
|
|
||||||
|
|
||||||
/* Rendering flags */
|
|
||||||
#define FT_RFLAG_NONE (0)
|
|
||||||
#define FT_RFLAG_ANTIALIAS (1 << 0)
|
|
||||||
#define FT_RFLAG_AUTOHINT (1 << 1)
|
|
||||||
#define FT_RFLAG_VERTICAL (1 << 2)
|
|
||||||
#define FT_RFLAG_HINTED (1 << 3)
|
|
||||||
#define FT_RFLAG_KERNING (1 << 4)
|
|
||||||
#define FT_RFLAG_TRANSFORM (1 << 5)
|
|
||||||
#define FT_RFLAG_PAD (1 << 6)
|
|
||||||
#define FT_RFLAG_ORIGIN (1 << 7)
|
|
||||||
#define FT_RFLAG_UCS4 (1 << 8)
|
|
||||||
#define FT_RFLAG_USE_BITMAP_STRIKES (1 << 9)
|
|
||||||
#define FT_RFLAG_DEFAULTS (FT_RFLAG_HINTED | \
|
|
||||||
FT_RFLAG_USE_BITMAP_STRIKES | \
|
|
||||||
FT_RFLAG_ANTIALIAS)
|
|
||||||
|
|
||||||
|
|
||||||
#define FT_RENDER_NEWBYTEARRAY 0x0
|
|
||||||
#define FT_RENDER_NEWSURFACE 0x1
|
|
||||||
#define FT_RENDER_EXISTINGSURFACE 0x2
|
|
||||||
|
|
||||||
/**********************************************************
|
|
||||||
* Global module types
|
|
||||||
**********************************************************/
|
|
||||||
|
|
||||||
typedef struct _scale_s {
|
|
||||||
FT_UInt x, y;
|
|
||||||
} Scale_t;
|
|
||||||
typedef FT_Angle Angle_t;
|
|
||||||
|
|
||||||
struct fontinternals_;
|
|
||||||
struct freetypeinstance_;
|
|
||||||
|
|
||||||
typedef struct {
|
|
||||||
FT_Long font_index;
|
|
||||||
FT_Open_Args open_args;
|
|
||||||
} pgFontId;
|
|
||||||
|
|
||||||
typedef struct {
|
|
||||||
PyObject_HEAD
|
|
||||||
pgFontId id;
|
|
||||||
PyObject *path;
|
|
||||||
int is_scalable;
|
|
||||||
|
|
||||||
Scale_t face_size;
|
|
||||||
FT_Int16 style;
|
|
||||||
FT_Int16 render_flags;
|
|
||||||
double strength;
|
|
||||||
double underline_adjustment;
|
|
||||||
FT_UInt resolution;
|
|
||||||
Angle_t rotation;
|
|
||||||
FT_Matrix transform;
|
|
||||||
FT_Byte fgcolor[4];
|
|
||||||
|
|
||||||
struct freetypeinstance_ *freetype; /* Personal reference */
|
|
||||||
struct fontinternals_ *_internals;
|
|
||||||
} pgFontObject;
|
|
||||||
|
|
||||||
#define pgFont_IS_ALIVE(o) \
|
|
||||||
(((pgFontObject *)(o))->_internals != 0)
|
|
||||||
|
|
||||||
/**********************************************************
|
|
||||||
* Module declaration
|
|
||||||
**********************************************************/
|
|
||||||
#define PYGAMEAPI_FREETYPE_FIRSTSLOT 0
|
|
||||||
#define PYGAMEAPI_FREETYPE_NUMSLOTS 2
|
|
||||||
|
|
||||||
#ifndef PYGAME_FREETYPE_INTERNAL
|
|
||||||
|
|
||||||
#define pgFont_Check(x) ((x)->ob_type == (PyTypeObject*)PgFREETYPE_C_API[0])
|
|
||||||
#define pgFont_Type (*(PyTypeObject*)PgFREETYPE_C_API[1])
|
|
||||||
#define pgFont_New (*(PyObject*(*)(const char*, long))PgFREETYPE_C_API[1])
|
|
||||||
|
|
||||||
#define import_pygame_freetype() \
|
|
||||||
_IMPORT_PYGAME_MODULE(freetype, FREETYPE, PgFREETYPE_C_API)
|
|
||||||
|
|
||||||
static void *PgFREETYPE_C_API[PYGAMEAPI_FREETYPE_NUMSLOTS] = {0};
|
|
||||||
#endif /* PYGAME_FREETYPE_INTERNAL */
|
|
||||||
|
|
||||||
#endif /* _PYGAME_FREETYPE_H_ */
|
|
@ -1,25 +0,0 @@
|
|||||||
#include <Python.h>
|
|
||||||
#include "bitmask.h"
|
|
||||||
|
|
||||||
#define PYGAMEAPI_MASK_FIRSTSLOT 0
|
|
||||||
#define PYGAMEAPI_MASK_NUMSLOTS 1
|
|
||||||
#define PYGAMEAPI_LOCAL_ENTRY "_PYGAME_C_API"
|
|
||||||
|
|
||||||
typedef struct {
|
|
||||||
PyObject_HEAD
|
|
||||||
bitmask_t *mask;
|
|
||||||
} pgMaskObject;
|
|
||||||
|
|
||||||
#define pgMask_AsBitmap(x) (((pgMaskObject*)x)->mask)
|
|
||||||
|
|
||||||
#ifndef PYGAMEAPI_MASK_INTERNAL
|
|
||||||
|
|
||||||
#define pgMask_Type (*(PyTypeObject*)PyMASK_C_API[0])
|
|
||||||
#define pgMask_Check(x) ((x)->ob_type == &pgMask_Type)
|
|
||||||
|
|
||||||
#define import_pygame_mask() \
|
|
||||||
_IMPORT_PYGAME_MODULE(mask, MASK, PyMASK_C_API)
|
|
||||||
|
|
||||||
static void* PyMASK_C_API[PYGAMEAPI_MASK_NUMSLOTS] = {NULL};
|
|
||||||
#endif /* #ifndef PYGAMEAPI_MASK_INTERNAL */
|
|
||||||
|
|
@ -1,65 +0,0 @@
|
|||||||
/*
|
|
||||||
pygame - Python Game Library
|
|
||||||
Copyright (C) 2000-2001 Pete Shinners
|
|
||||||
|
|
||||||
This library is free software; you can redistribute it and/or
|
|
||||||
modify it under the terms of the GNU Library General Public
|
|
||||||
License as published by the Free Software Foundation; either
|
|
||||||
version 2 of the License, or (at your option) any later version.
|
|
||||||
|
|
||||||
This library is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
Library General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Library General Public
|
|
||||||
License along with this library; if not, write to the Free
|
|
||||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
||||||
|
|
||||||
Pete Shinners
|
|
||||||
pete@shinners.org
|
|
||||||
*/
|
|
||||||
|
|
||||||
#include <Python.h>
|
|
||||||
#include <SDL_mixer.h>
|
|
||||||
#include <structmember.h>
|
|
||||||
|
|
||||||
|
|
||||||
/* test mixer initializations */
|
|
||||||
#define MIXER_INIT_CHECK() \
|
|
||||||
if(!SDL_WasInit(SDL_INIT_AUDIO)) \
|
|
||||||
return RAISE(pgExc_SDLError, "mixer not initialized")
|
|
||||||
|
|
||||||
|
|
||||||
#define PYGAMEAPI_MIXER_FIRSTSLOT 0
|
|
||||||
#define PYGAMEAPI_MIXER_NUMSLOTS 7
|
|
||||||
typedef struct {
|
|
||||||
PyObject_HEAD
|
|
||||||
Mix_Chunk *chunk;
|
|
||||||
Uint8 *mem;
|
|
||||||
PyObject *weakreflist;
|
|
||||||
} pgSoundObject;
|
|
||||||
typedef struct {
|
|
||||||
PyObject_HEAD
|
|
||||||
int chan;
|
|
||||||
} pgChannelObject;
|
|
||||||
#define pgSound_AsChunk(x) (((pgSoundObject*)x)->chunk)
|
|
||||||
#define pgChannel_AsInt(x) (((pgChannelObject*)x)->chan)
|
|
||||||
|
|
||||||
#ifndef PYGAMEAPI_MIXER_INTERNAL
|
|
||||||
#define pgSound_Check(x) ((x)->ob_type == (PyTypeObject*)pgMIXER_C_API[0])
|
|
||||||
#define pgSound_Type (*(PyTypeObject*)pgMIXER_C_API[0])
|
|
||||||
#define pgSound_New (*(PyObject*(*)(Mix_Chunk*))pgMIXER_C_API[1])
|
|
||||||
#define pgSound_Play (*(PyObject*(*)(PyObject*, PyObject*))pgMIXER_C_API[2])
|
|
||||||
#define pgChannel_Check(x) ((x)->ob_type == (PyTypeObject*)pgMIXER_C_API[3])
|
|
||||||
#define pgChannel_Type (*(PyTypeObject*)pgMIXER_C_API[3])
|
|
||||||
#define pgChannel_New (*(PyObject*(*)(int))pgMIXER_C_API[4])
|
|
||||||
#define pgMixer_AutoInit (*(PyObject*(*)(PyObject*, PyObject*))pgMIXER_C_API[5])
|
|
||||||
#define pgMixer_AutoQuit (*(void(*)(void))pgMIXER_C_API[6])
|
|
||||||
|
|
||||||
#define import_pygame_mixer() \
|
|
||||||
_IMPORT_PYGAME_MODULE(mixer, MIXER, pgMIXER_C_API)
|
|
||||||
|
|
||||||
static void* pgMIXER_C_API[PYGAMEAPI_MIXER_NUMSLOTS] = {NULL};
|
|
||||||
#endif
|
|
||||||
|
|
@ -1,123 +0,0 @@
|
|||||||
/*
|
|
||||||
pygame - Python Game Library
|
|
||||||
Copyright (C) 2000-2001 Pete Shinners
|
|
||||||
|
|
||||||
This library is free software; you can redistribute it and/or
|
|
||||||
modify it under the terms of the GNU Library General Public
|
|
||||||
License as published by the Free Software Foundation; either
|
|
||||||
version 2 of the License, or (at your option) any later version.
|
|
||||||
|
|
||||||
This library is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
Library General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Library General Public
|
|
||||||
License along with this library; if not, write to the Free
|
|
||||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
||||||
|
|
||||||
Pete Shinners
|
|
||||||
pete@shinners.org
|
|
||||||
*/
|
|
||||||
|
|
||||||
#ifndef PALETTE_H
|
|
||||||
#define PALETTE_H
|
|
||||||
|
|
||||||
#include <SDL.h>
|
|
||||||
|
|
||||||
/* SDL 2 does not assign a default palette color scheme to a new 8 bit
|
|
||||||
* surface. Instead, the palette is set all white. This defines the SDL 1.2
|
|
||||||
* default palette.
|
|
||||||
*/
|
|
||||||
static const SDL_Color default_palette_colors[] = {
|
|
||||||
{0, 0, 0, 255}, {0, 0, 85, 255}, {0, 0, 170, 255},
|
|
||||||
{0, 0, 255, 255}, {0, 36, 0, 255}, {0, 36, 85, 255},
|
|
||||||
{0, 36, 170, 255}, {0, 36, 255, 255}, {0, 73, 0, 255},
|
|
||||||
{0, 73, 85, 255}, {0, 73, 170, 255}, {0, 73, 255, 255},
|
|
||||||
{0, 109, 0, 255}, {0, 109, 85, 255}, {0, 109, 170, 255},
|
|
||||||
{0, 109, 255, 255}, {0, 146, 0, 255}, {0, 146, 85, 255},
|
|
||||||
{0, 146, 170, 255}, {0, 146, 255, 255}, {0, 182, 0, 255},
|
|
||||||
{0, 182, 85, 255}, {0, 182, 170, 255}, {0, 182, 255, 255},
|
|
||||||
{0, 219, 0, 255}, {0, 219, 85, 255}, {0, 219, 170, 255},
|
|
||||||
{0, 219, 255, 255}, {0, 255, 0, 255}, {0, 255, 85, 255},
|
|
||||||
{0, 255, 170, 255}, {0, 255, 255, 255}, {85, 0, 0, 255},
|
|
||||||
{85, 0, 85, 255}, {85, 0, 170, 255}, {85, 0, 255, 255},
|
|
||||||
{85, 36, 0, 255}, {85, 36, 85, 255}, {85, 36, 170, 255},
|
|
||||||
{85, 36, 255, 255}, {85, 73, 0, 255}, {85, 73, 85, 255},
|
|
||||||
{85, 73, 170, 255}, {85, 73, 255, 255}, {85, 109, 0, 255},
|
|
||||||
{85, 109, 85, 255}, {85, 109, 170, 255}, {85, 109, 255, 255},
|
|
||||||
{85, 146, 0, 255}, {85, 146, 85, 255}, {85, 146, 170, 255},
|
|
||||||
{85, 146, 255, 255}, {85, 182, 0, 255}, {85, 182, 85, 255},
|
|
||||||
{85, 182, 170, 255}, {85, 182, 255, 255}, {85, 219, 0, 255},
|
|
||||||
{85, 219, 85, 255}, {85, 219, 170, 255}, {85, 219, 255, 255},
|
|
||||||
{85, 255, 0, 255}, {85, 255, 85, 255}, {85, 255, 170, 255},
|
|
||||||
{85, 255, 255, 255}, {170, 0, 0, 255}, {170, 0, 85, 255},
|
|
||||||
{170, 0, 170, 255}, {170, 0, 255, 255}, {170, 36, 0, 255},
|
|
||||||
{170, 36, 85, 255}, {170, 36, 170, 255}, {170, 36, 255, 255},
|
|
||||||
{170, 73, 0, 255}, {170, 73, 85, 255}, {170, 73, 170, 255},
|
|
||||||
{170, 73, 255, 255}, {170, 109, 0, 255}, {170, 109, 85, 255},
|
|
||||||
{170, 109, 170, 255}, {170, 109, 255, 255}, {170, 146, 0, 255},
|
|
||||||
{170, 146, 85, 255}, {170, 146, 170, 255}, {170, 146, 255, 255},
|
|
||||||
{170, 182, 0, 255}, {170, 182, 85, 255}, {170, 182, 170, 255},
|
|
||||||
{170, 182, 255, 255}, {170, 219, 0, 255}, {170, 219, 85, 255},
|
|
||||||
{170, 219, 170, 255}, {170, 219, 255, 255}, {170, 255, 0, 255},
|
|
||||||
{170, 255, 85, 255}, {170, 255, 170, 255}, {170, 255, 255, 255},
|
|
||||||
{255, 0, 0, 255}, {255, 0, 85, 255}, {255, 0, 170, 255},
|
|
||||||
{255, 0, 255, 255}, {255, 36, 0, 255}, {255, 36, 85, 255},
|
|
||||||
{255, 36, 170, 255}, {255, 36, 255, 255}, {255, 73, 0, 255},
|
|
||||||
{255, 73, 85, 255}, {255, 73, 170, 255}, {255, 73, 255, 255},
|
|
||||||
{255, 109, 0, 255}, {255, 109, 85, 255}, {255, 109, 170, 255},
|
|
||||||
{255, 109, 255, 255}, {255, 146, 0, 255}, {255, 146, 85, 255},
|
|
||||||
{255, 146, 170, 255}, {255, 146, 255, 255}, {255, 182, 0, 255},
|
|
||||||
{255, 182, 85, 255}, {255, 182, 170, 255}, {255, 182, 255, 255},
|
|
||||||
{255, 219, 0, 255}, {255, 219, 85, 255}, {255, 219, 170, 255},
|
|
||||||
{255, 219, 255, 255}, {255, 255, 0, 255}, {255, 255, 85, 255},
|
|
||||||
{255, 255, 170, 255}, {255, 255, 255, 255}, {0, 0, 0, 255},
|
|
||||||
{0, 0, 85, 255}, {0, 0, 170, 255}, {0, 0, 255, 255},
|
|
||||||
{0, 36, 0, 255}, {0, 36, 85, 255}, {0, 36, 170, 255},
|
|
||||||
{0, 36, 255, 255}, {0, 73, 0, 255}, {0, 73, 85, 255},
|
|
||||||
{0, 73, 170, 255}, {0, 73, 255, 255}, {0, 109, 0, 255},
|
|
||||||
{0, 109, 85, 255}, {0, 109, 170, 255}, {0, 109, 255, 255},
|
|
||||||
{0, 146, 0, 255}, {0, 146, 85, 255}, {0, 146, 170, 255},
|
|
||||||
{0, 146, 255, 255}, {0, 182, 0, 255}, {0, 182, 85, 255},
|
|
||||||
{0, 182, 170, 255}, {0, 182, 255, 255}, {0, 219, 0, 255},
|
|
||||||
{0, 219, 85, 255}, {0, 219, 170, 255}, {0, 219, 255, 255},
|
|
||||||
{0, 255, 0, 255}, {0, 255, 85, 255}, {0, 255, 170, 255},
|
|
||||||
{0, 255, 255, 255}, {85, 0, 0, 255}, {85, 0, 85, 255},
|
|
||||||
{85, 0, 170, 255}, {85, 0, 255, 255}, {85, 36, 0, 255},
|
|
||||||
{85, 36, 85, 255}, {85, 36, 170, 255}, {85, 36, 255, 255},
|
|
||||||
{85, 73, 0, 255}, {85, 73, 85, 255}, {85, 73, 170, 255},
|
|
||||||
{85, 73, 255, 255}, {85, 109, 0, 255}, {85, 109, 85, 255},
|
|
||||||
{85, 109, 170, 255}, {85, 109, 255, 255}, {85, 146, 0, 255},
|
|
||||||
{85, 146, 85, 255}, {85, 146, 170, 255}, {85, 146, 255, 255},
|
|
||||||
{85, 182, 0, 255}, {85, 182, 85, 255}, {85, 182, 170, 255},
|
|
||||||
{85, 182, 255, 255}, {85, 219, 0, 255}, {85, 219, 85, 255},
|
|
||||||
{85, 219, 170, 255}, {85, 219, 255, 255}, {85, 255, 0, 255},
|
|
||||||
{85, 255, 85, 255}, {85, 255, 170, 255}, {85, 255, 255, 255},
|
|
||||||
{170, 0, 0, 255}, {170, 0, 85, 255}, {170, 0, 170, 255},
|
|
||||||
{170, 0, 255, 255}, {170, 36, 0, 255}, {170, 36, 85, 255},
|
|
||||||
{170, 36, 170, 255}, {170, 36, 255, 255}, {170, 73, 0, 255},
|
|
||||||
{170, 73, 85, 255}, {170, 73, 170, 255}, {170, 73, 255, 255},
|
|
||||||
{170, 109, 0, 255}, {170, 109, 85, 255}, {170, 109, 170, 255},
|
|
||||||
{170, 109, 255, 255}, {170, 146, 0, 255}, {170, 146, 85, 255},
|
|
||||||
{170, 146, 170, 255}, {170, 146, 255, 255}, {170, 182, 0, 255},
|
|
||||||
{170, 182, 85, 255}, {170, 182, 170, 255}, {170, 182, 255, 255},
|
|
||||||
{170, 219, 0, 255}, {170, 219, 85, 255}, {170, 219, 170, 255},
|
|
||||||
{170, 219, 255, 255}, {170, 255, 0, 255}, {170, 255, 85, 255},
|
|
||||||
{170, 255, 170, 255}, {170, 255, 255, 255}, {255, 0, 0, 255},
|
|
||||||
{255, 0, 85, 255}, {255, 0, 170, 255}, {255, 0, 255, 255},
|
|
||||||
{255, 36, 0, 255}, {255, 36, 85, 255}, {255, 36, 170, 255},
|
|
||||||
{255, 36, 255, 255}, {255, 73, 0, 255}, {255, 73, 85, 255},
|
|
||||||
{255, 73, 170, 255}, {255, 73, 255, 255}, {255, 109, 0, 255},
|
|
||||||
{255, 109, 85, 255}, {255, 109, 170, 255}, {255, 109, 255, 255},
|
|
||||||
{255, 146, 0, 255}, {255, 146, 85, 255}, {255, 146, 170, 255},
|
|
||||||
{255, 146, 255, 255}, {255, 182, 0, 255}, {255, 182, 85, 255},
|
|
||||||
{255, 182, 170, 255}, {255, 182, 255, 255}, {255, 219, 0, 255},
|
|
||||||
{255, 219, 85, 255}, {255, 219, 170, 255}, {255, 219, 255, 255},
|
|
||||||
{255, 255, 0, 255}, {255, 255, 85, 255}, {255, 255, 170, 255},
|
|
||||||
{255, 255, 255, 255}};
|
|
||||||
|
|
||||||
static const int default_palette_size =
|
|
||||||
(int)(sizeof(default_palette_colors) / sizeof(SDL_Color));
|
|
||||||
|
|
||||||
#endif
|
|
@ -1,26 +0,0 @@
|
|||||||
/* array structure interface version 3 declarations */
|
|
||||||
|
|
||||||
#if !defined(PG_ARRAYINTER_HEADER)
|
|
||||||
#define PG_ARRAYINTER_HEADER
|
|
||||||
|
|
||||||
static const int PAI_CONTIGUOUS = 0x01;
|
|
||||||
static const int PAI_FORTRAN = 0x02;
|
|
||||||
static const int PAI_ALIGNED = 0x100;
|
|
||||||
static const int PAI_NOTSWAPPED = 0x200;
|
|
||||||
static const int PAI_WRITEABLE = 0x400;
|
|
||||||
static const int PAI_ARR_HAS_DESCR = 0x800;
|
|
||||||
|
|
||||||
typedef struct {
|
|
||||||
int two; /* contains the integer 2 -- simple sanity check */
|
|
||||||
int nd; /* number of dimensions */
|
|
||||||
char typekind; /* kind in array -- character code of typestr */
|
|
||||||
int itemsize; /* size of each element */
|
|
||||||
int flags; /* flags indicating how the data should be */
|
|
||||||
/* interpreted */
|
|
||||||
Py_intptr_t *shape; /* A length-nd array of shape information */
|
|
||||||
Py_intptr_t *strides; /* A length-nd array of stride information */
|
|
||||||
void *data; /* A pointer to the first element of the array */
|
|
||||||
PyObject *descr; /* NULL or a data-description */
|
|
||||||
} PyArrayInterface;
|
|
||||||
|
|
||||||
#endif
|
|
@ -1,52 +0,0 @@
|
|||||||
/*
|
|
||||||
pygame - Python Game Library
|
|
||||||
Copyright (C) 2000-2001 Pete Shinners
|
|
||||||
Copyright (C) 2007 Rene Dudfield, Richard Goedeken
|
|
||||||
|
|
||||||
This library is free software; you can redistribute it and/or
|
|
||||||
modify it under the terms of the GNU Library General Public
|
|
||||||
License as published by the Free Software Foundation; either
|
|
||||||
version 2 of the License, or (at your option) any later version.
|
|
||||||
|
|
||||||
This library is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
Library General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Library General Public
|
|
||||||
License along with this library; if not, write to the Free
|
|
||||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
||||||
|
|
||||||
Pete Shinners
|
|
||||||
pete@shinners.org
|
|
||||||
*/
|
|
||||||
|
|
||||||
/* Bufferproxy module C api.
|
|
||||||
Depends on pygame.h being included first.
|
|
||||||
*/
|
|
||||||
#if !defined(PG_BUFPROXY_HEADER)
|
|
||||||
|
|
||||||
#define PYGAMEAPI_BUFPROXY_NUMSLOTS 4
|
|
||||||
#define PYGAMEAPI_BUFPROXY_FIRSTSLOT 0
|
|
||||||
|
|
||||||
#if !(defined(PYGAMEAPI_BUFPROXY_INTERNAL) || defined(NO_PYGAME_C_API))
|
|
||||||
static void *PgBUFPROXY_C_API[PYGAMEAPI_BUFPROXY_NUMSLOTS];
|
|
||||||
|
|
||||||
typedef PyObject *(*_pgbufproxy_new_t)(PyObject *, getbufferproc);
|
|
||||||
typedef PyObject *(*_pgbufproxy_get_obj_t)(PyObject *);
|
|
||||||
typedef int (*_pgbufproxy_trip_t)(PyObject *);
|
|
||||||
|
|
||||||
#define pgBufproxy_Type (*(PyTypeObject*)PgBUFPROXY_C_API[0])
|
|
||||||
#define pgBufproxy_New (*(_pgbufproxy_new_t)PgBUFPROXY_C_API[1])
|
|
||||||
#define pgBufproxy_GetParent \
|
|
||||||
(*(_pgbufproxy_get_obj_t)PgBUFPROXY_C_API[2])
|
|
||||||
#define pgBufproxy_Trip (*(_pgbufproxy_trip_t)PgBUFPROXY_C_API[3])
|
|
||||||
#define pgBufproxy_Check(x) ((x)->ob_type == (pgBufproxy_Type))
|
|
||||||
#define import_pygame_bufferproxy() \
|
|
||||||
_IMPORT_PYGAME_MODULE(bufferproxy, BUFPROXY, PgBUFPROXY_C_API)
|
|
||||||
|
|
||||||
#endif /* #if !(defined(PYGAMEAPI_BUFPROXY_INTERNAL) || ... */
|
|
||||||
|
|
||||||
#define PG_BUFPROXY_HEADER
|
|
||||||
|
|
||||||
#endif /* #if !defined(PG_BUFPROXY_HEADER) */
|
|
@ -1,195 +0,0 @@
|
|||||||
/* Python 2.x/3.x compitibility tools
|
|
||||||
*/
|
|
||||||
|
|
||||||
#if !defined(PGCOMPAT_H)
|
|
||||||
#define PGCOMPAT_H
|
|
||||||
|
|
||||||
#if PY_MAJOR_VERSION >= 3
|
|
||||||
|
|
||||||
#define PY3 1
|
|
||||||
|
|
||||||
/* Define some aliases for the removed PyInt_* functions */
|
|
||||||
#define PyInt_Check(op) PyLong_Check(op)
|
|
||||||
#define PyInt_FromString PyLong_FromString
|
|
||||||
#define PyInt_FromUnicode PyLong_FromUnicode
|
|
||||||
#define PyInt_FromLong PyLong_FromLong
|
|
||||||
#define PyInt_FromSize_t PyLong_FromSize_t
|
|
||||||
#define PyInt_FromSsize_t PyLong_FromSsize_t
|
|
||||||
#define PyInt_AsLong PyLong_AsLong
|
|
||||||
#define PyInt_AsSsize_t PyLong_AsSsize_t
|
|
||||||
#define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask
|
|
||||||
#define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask
|
|
||||||
#define PyInt_AS_LONG PyLong_AS_LONG
|
|
||||||
#define PyNumber_Int PyNumber_Long
|
|
||||||
|
|
||||||
/* Weakrefs flags changed in 3.x */
|
|
||||||
#define Py_TPFLAGS_HAVE_WEAKREFS 0
|
|
||||||
|
|
||||||
/* Module init function returns new module instance. */
|
|
||||||
#define MODINIT_RETURN(x) return x
|
|
||||||
#define MODINIT_DEFINE(mod_name) PyMODINIT_FUNC PyInit_##mod_name (void)
|
|
||||||
#define DECREF_MOD(mod) Py_DECREF (mod)
|
|
||||||
|
|
||||||
/* Type header differs. */
|
|
||||||
#define TYPE_HEAD(x,y) PyVarObject_HEAD_INIT(x,y)
|
|
||||||
|
|
||||||
/* Text interface. Use unicode strings. */
|
|
||||||
#define Text_Type PyUnicode_Type
|
|
||||||
#define Text_Check PyUnicode_Check
|
|
||||||
|
|
||||||
#ifndef PYPY_VERSION
|
|
||||||
#define Text_FromLocale(s) PyUnicode_DecodeLocale((s), "strict")
|
|
||||||
#else /* PYPY_VERSION */
|
|
||||||
/* workaround: missing function for pypy */
|
|
||||||
#define Text_FromLocale PyUnicode_FromString
|
|
||||||
#endif /* PYPY_VERSION */
|
|
||||||
|
|
||||||
#define Text_FromUTF8 PyUnicode_FromString
|
|
||||||
#define Text_FromUTF8AndSize PyUnicode_FromStringAndSize
|
|
||||||
#define Text_FromFormat PyUnicode_FromFormat
|
|
||||||
#define Text_GetSize PyUnicode_GetSize
|
|
||||||
#define Text_GET_SIZE PyUnicode_GET_SIZE
|
|
||||||
|
|
||||||
/* Binary interface. Use bytes. */
|
|
||||||
#define Bytes_Type PyBytes_Type
|
|
||||||
#define Bytes_Check PyBytes_Check
|
|
||||||
#define Bytes_Size PyBytes_Size
|
|
||||||
#define Bytes_AsString PyBytes_AsString
|
|
||||||
#define Bytes_AsStringAndSize PyBytes_AsStringAndSize
|
|
||||||
#define Bytes_FromStringAndSize PyBytes_FromStringAndSize
|
|
||||||
#define Bytes_FromFormat PyBytes_FromFormat
|
|
||||||
#define Bytes_AS_STRING PyBytes_AS_STRING
|
|
||||||
#define Bytes_GET_SIZE PyBytes_GET_SIZE
|
|
||||||
#define Bytes_AsDecodeObject PyBytes_AsDecodedObject
|
|
||||||
|
|
||||||
#define Object_Unicode PyObject_Str
|
|
||||||
|
|
||||||
#define IsTextObj(x) (PyUnicode_Check(x) || PyBytes_Check(x))
|
|
||||||
|
|
||||||
/* Renamed builtins */
|
|
||||||
#define BUILTINS_MODULE "builtins"
|
|
||||||
#define BUILTINS_UNICODE "str"
|
|
||||||
#define BUILTINS_UNICHR "chr"
|
|
||||||
|
|
||||||
/* Defaults for unicode file path encoding */
|
|
||||||
#define UNICODE_DEF_FS_CODEC Py_FileSystemDefaultEncoding
|
|
||||||
#if defined(MS_WIN32)
|
|
||||||
#define UNICODE_DEF_FS_ERROR "replace"
|
|
||||||
#else
|
|
||||||
#define UNICODE_DEF_FS_ERROR "surrogateescape"
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#else /* #if PY_MAJOR_VERSION >= 3 */
|
|
||||||
|
|
||||||
#define PY3 0
|
|
||||||
|
|
||||||
/* Module init function returns nothing. */
|
|
||||||
#define MODINIT_RETURN(x) return
|
|
||||||
#define MODINIT_DEFINE(mod_name) PyMODINIT_FUNC init##mod_name (void)
|
|
||||||
#define DECREF_MOD(mod)
|
|
||||||
|
|
||||||
/* Type header differs. */
|
|
||||||
#define TYPE_HEAD(x,y) \
|
|
||||||
PyObject_HEAD_INIT(x) \
|
|
||||||
0,
|
|
||||||
|
|
||||||
/* Text interface. Use ascii strings. */
|
|
||||||
#define Text_Type PyString_Type
|
|
||||||
#define Text_Check PyString_Check
|
|
||||||
#define Text_FromLocale PyString_FromString
|
|
||||||
#define Text_FromUTF8 PyString_FromString
|
|
||||||
#define Text_FromUTF8AndSize PyString_FromStringAndSize
|
|
||||||
#define Text_FromFormat PyString_FromFormat
|
|
||||||
#define Text_GetSize PyString_GetSize
|
|
||||||
#define Text_GET_SIZE PyString_GET_SIZE
|
|
||||||
|
|
||||||
/* Binary interface. Use ascii strings. */
|
|
||||||
#define Bytes_Type PyString_Type
|
|
||||||
#define Bytes_Check PyString_Check
|
|
||||||
#define Bytes_Size PyString_Size
|
|
||||||
#define Bytes_AsString PyString_AsString
|
|
||||||
#define Bytes_AsStringAndSize PyString_AsStringAndSize
|
|
||||||
#define Bytes_FromStringAndSize PyString_FromStringAndSize
|
|
||||||
#define Bytes_FromFormat PyString_FromFormat
|
|
||||||
#define Bytes_AS_STRING PyString_AS_STRING
|
|
||||||
#define Bytes_GET_SIZE PyString_GET_SIZE
|
|
||||||
#define Bytes_AsDecodedObject PyString_AsDecodedObject
|
|
||||||
|
|
||||||
#define Object_Unicode PyObject_Unicode
|
|
||||||
|
|
||||||
/* Renamed builtins */
|
|
||||||
#define BUILTINS_MODULE "__builtin__"
|
|
||||||
#define BUILTINS_UNICODE "unicode"
|
|
||||||
#define BUILTINS_UNICHR "unichr"
|
|
||||||
|
|
||||||
/* Defaults for unicode file path encoding */
|
|
||||||
#define UNICODE_DEF_FS_CODEC Py_FileSystemDefaultEncoding
|
|
||||||
#define UNICODE_DEF_FS_ERROR "strict"
|
|
||||||
|
|
||||||
#endif /* #if PY_MAJOR_VERSION >= 3 */
|
|
||||||
|
|
||||||
#define PY2 (!PY3)
|
|
||||||
|
|
||||||
#define MODINIT_ERROR MODINIT_RETURN (NULL)
|
|
||||||
|
|
||||||
/* Module state. These macros are used to define per-module macros.
|
|
||||||
* v - global state variable (Python 2.x)
|
|
||||||
* s - global state structure (Python 3.x)
|
|
||||||
*/
|
|
||||||
#define PY2_GETSTATE(v) (&(v))
|
|
||||||
#define PY3_GETSTATE(s, m) ((struct s *) PyModule_GetState (m))
|
|
||||||
|
|
||||||
/* Pep 3123: Making PyObject_HEAD conform to standard C */
|
|
||||||
#if !defined(Py_TYPE)
|
|
||||||
#define Py_TYPE(o) (((PyObject *)(o))->ob_type)
|
|
||||||
#define Py_REFCNT(o) (((PyObject *)(o))->ob_refcnt)
|
|
||||||
#define Py_SIZE(o) (((PyVarObject *)(o))->ob_size)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* Encode a unicode file path */
|
|
||||||
#define Unicode_AsEncodedPath(u) \
|
|
||||||
PyUnicode_AsEncodedString ((u), UNICODE_DEF_FS_CODEC, UNICODE_DEF_FS_ERROR)
|
|
||||||
|
|
||||||
#define RELATIVE_MODULE(m) ("." m)
|
|
||||||
|
|
||||||
#define HAVE_OLD_BUFPROTO PY2
|
|
||||||
|
|
||||||
#if !defined(PG_ENABLE_OLDBUF) /* allow for command line override */
|
|
||||||
#if HAVE_OLD_BUFPROTO
|
|
||||||
#define PG_ENABLE_OLDBUF 1
|
|
||||||
#else
|
|
||||||
#define PG_ENABLE_OLDBUF 0
|
|
||||||
#endif
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifndef Py_TPFLAGS_HAVE_NEWBUFFER
|
|
||||||
#define Py_TPFLAGS_HAVE_NEWBUFFER 0
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifndef Py_TPFLAGS_HAVE_CLASS
|
|
||||||
#define Py_TPFLAGS_HAVE_CLASS 0
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifndef Py_TPFLAGS_CHECKTYPES
|
|
||||||
#define Py_TPFLAGS_CHECKTYPES 0
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#if PY_VERSION_HEX >= 0x03020000
|
|
||||||
#define Slice_GET_INDICES_EX(slice, length, start, stop, step, slicelength) \
|
|
||||||
PySlice_GetIndicesEx(slice, length, start, stop, step, slicelength)
|
|
||||||
#else
|
|
||||||
#define Slice_GET_INDICES_EX(slice, length, start, stop, step, slicelength) \
|
|
||||||
PySlice_GetIndicesEx((PySliceObject *)(slice), length, \
|
|
||||||
start, stop, step, slicelength)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/* Support new buffer protocol? */
|
|
||||||
#if !defined(PG_ENABLE_NEWBUF) /* allow for command line override */
|
|
||||||
#if !defined(PYPY_VERSION)
|
|
||||||
#define PG_ENABLE_NEWBUF 1
|
|
||||||
#else
|
|
||||||
#define PG_ENABLE_NEWBUF 0
|
|
||||||
#endif
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#endif /* #if !defined(PGCOMPAT_H) */
|
|
@ -1,16 +0,0 @@
|
|||||||
#if !defined(PGOPENGL_H)
|
|
||||||
#define PGOPENGL_H
|
|
||||||
|
|
||||||
/** This header includes definitions of Opengl functions as pointer types for
|
|
||||||
** use with the SDL function SDL_GL_GetProcAddress.
|
|
||||||
**/
|
|
||||||
|
|
||||||
#if defined(_WIN32)
|
|
||||||
#define GL_APIENTRY __stdcall
|
|
||||||
#else
|
|
||||||
#define GL_APIENTRY
|
|
||||||
#endif
|
|
||||||
|
|
||||||
typedef void (GL_APIENTRY *GL_glReadPixels_Func)(int, int, int, int, unsigned int, unsigned int, void*);
|
|
||||||
|
|
||||||
#endif
|
|
@ -1,34 +0,0 @@
|
|||||||
/*
|
|
||||||
pygame - Python Game Library
|
|
||||||
Copyright (C) 2000-2001 Pete Shinners
|
|
||||||
|
|
||||||
This library is free software; you can redistribute it and/or
|
|
||||||
modify it under the terms of the GNU Library General Public
|
|
||||||
License as published by the Free Software Foundation; either
|
|
||||||
version 2 of the License, or (at your option) any later version.
|
|
||||||
|
|
||||||
This library is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
Library General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Library General Public
|
|
||||||
License along with this library; if not, write to the Free
|
|
||||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
||||||
|
|
||||||
Pete Shinners
|
|
||||||
pete@shinners.org
|
|
||||||
*/
|
|
||||||
|
|
||||||
/* To allow the Pygame C api to be globally shared by all code within an
|
|
||||||
* extension module built from multiple C files, only include the pygame.h
|
|
||||||
* header within the top level C file, the one which calls the
|
|
||||||
* 'import_pygame_*' macros. All other C source files of the module should
|
|
||||||
* include _pygame.h instead.
|
|
||||||
*/
|
|
||||||
#ifndef PYGAME_H
|
|
||||||
#define PYGAME_H
|
|
||||||
|
|
||||||
#include "_pygame.h"
|
|
||||||
|
|
||||||
#endif
|
|
@ -1,143 +0,0 @@
|
|||||||
/*
|
|
||||||
pygame - Python Game Library
|
|
||||||
Copyright (C) 2006, 2007 Rene Dudfield, Marcus von Appen
|
|
||||||
|
|
||||||
Originally put in the public domain by Sam Lantinga.
|
|
||||||
|
|
||||||
This library is free software; you can redistribute it and/or
|
|
||||||
modify it under the terms of the GNU Library General Public
|
|
||||||
License as published by the Free Software Foundation; either
|
|
||||||
version 2 of the License, or (at your option) any later version.
|
|
||||||
|
|
||||||
This library is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
Library General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Library General Public
|
|
||||||
License along with this library; if not, write to the Free
|
|
||||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
||||||
*/
|
|
||||||
|
|
||||||
/* This is unconditionally defined in Python.h */
|
|
||||||
#if defined(_POSIX_C_SOURCE)
|
|
||||||
#undef _POSIX_C_SOURCE
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#include <Python.h>
|
|
||||||
|
|
||||||
/* Handle clipboard text and data in arbitrary formats */
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Predefined supported pygame scrap types.
|
|
||||||
*/
|
|
||||||
#define PYGAME_SCRAP_TEXT "text/plain"
|
|
||||||
#define PYGAME_SCRAP_BMP "image/bmp"
|
|
||||||
#define PYGAME_SCRAP_PPM "image/ppm"
|
|
||||||
#define PYGAME_SCRAP_PBM "image/pbm"
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The supported scrap clipboard types.
|
|
||||||
*
|
|
||||||
* This is only relevant in a X11 environment, which supports mouse
|
|
||||||
* selections as well. For Win32 and MacOS environments the default
|
|
||||||
* clipboard is used, no matter what value is passed.
|
|
||||||
*/
|
|
||||||
typedef enum
|
|
||||||
{
|
|
||||||
SCRAP_CLIPBOARD,
|
|
||||||
SCRAP_SELECTION /* only supported in X11 environments. */
|
|
||||||
} ScrapClipType;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Macro for initialization checks.
|
|
||||||
*/
|
|
||||||
#define PYGAME_SCRAP_INIT_CHECK() \
|
|
||||||
if(!pygame_scrap_initialized()) \
|
|
||||||
return (PyErr_SetString (pgExc_SDLError, \
|
|
||||||
"scrap system not initialized."), NULL)
|
|
||||||
|
|
||||||
/**
|
|
||||||
* \brief Checks, whether the pygame scrap module was initialized.
|
|
||||||
*
|
|
||||||
* \return 1 if the modules was initialized, 0 otherwise.
|
|
||||||
*/
|
|
||||||
extern int
|
|
||||||
pygame_scrap_initialized (void);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* \brief Initializes the pygame scrap module internals. Call this before any
|
|
||||||
* other method.
|
|
||||||
*
|
|
||||||
* \return 1 on successful initialization, 0 otherwise.
|
|
||||||
*/
|
|
||||||
extern int
|
|
||||||
pygame_scrap_init (void);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* \brief Checks, whether the pygame window lost the clipboard focus or not.
|
|
||||||
*
|
|
||||||
* \return 1 if the window lost the focus, 0 otherwise.
|
|
||||||
*/
|
|
||||||
extern int
|
|
||||||
pygame_scrap_lost (void);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* \brief Places content of a specific type into the clipboard.
|
|
||||||
*
|
|
||||||
* \note For X11 the following notes are important: The following types
|
|
||||||
* are reserved for internal usage and thus will throw an error on
|
|
||||||
* setting them: "TIMESTAMP", "TARGETS", "SDL_SELECTION".
|
|
||||||
* Setting PYGAME_SCRAP_TEXT ("text/plain") will also automatically
|
|
||||||
* set the X11 types "STRING" (XA_STRING), "TEXT" and "UTF8_STRING".
|
|
||||||
*
|
|
||||||
* For Win32 the following notes are important: Setting
|
|
||||||
* PYGAME_SCRAP_TEXT ("text/plain") will also automatically set
|
|
||||||
* the Win32 type "TEXT" (CF_TEXT).
|
|
||||||
*
|
|
||||||
* For QNX the following notes are important: Setting
|
|
||||||
* PYGAME_SCRAP_TEXT ("text/plain") will also automatically set
|
|
||||||
* the QNX type "TEXT" (Ph_CL_TEXT).
|
|
||||||
*
|
|
||||||
* \param type The type of the content.
|
|
||||||
* \param srclen The length of the content.
|
|
||||||
* \param src The NULL terminated content.
|
|
||||||
* \return 1, if the content could be successfully pasted into the clipboard,
|
|
||||||
* 0 otherwise.
|
|
||||||
*/
|
|
||||||
extern int
|
|
||||||
pygame_scrap_put (char *type, int srclen, char *src);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* \brief Gets the current content from the clipboard.
|
|
||||||
*
|
|
||||||
* \note The received content does not need to be the content previously
|
|
||||||
* placed in the clipboard using pygame_put_scrap(). See the
|
|
||||||
* pygame_put_scrap() notes for more details.
|
|
||||||
*
|
|
||||||
* \param type The type of the content to receive.
|
|
||||||
* \param count The size of the returned content.
|
|
||||||
* \return The content or NULL in case of an error or if no content of the
|
|
||||||
* specified type was available.
|
|
||||||
*/
|
|
||||||
extern char*
|
|
||||||
pygame_scrap_get (char *type, unsigned long *count);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* \brief Gets the currently available content types from the clipboard.
|
|
||||||
*
|
|
||||||
* \return The different available content types or NULL in case of an
|
|
||||||
* error or if no content type is available.
|
|
||||||
*/
|
|
||||||
extern char**
|
|
||||||
pygame_scrap_get_types (void);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* \brief Checks whether content for the specified scrap type is currently
|
|
||||||
* available in the clipboard.
|
|
||||||
*
|
|
||||||
* \param type The type to check for.
|
|
||||||
* \return 1, if there is content and 0 otherwise.
|
|
||||||
*/
|
|
||||||
extern int
|
|
||||||
pygame_scrap_contains (char *type);
|
|
@ -1,383 +0,0 @@
|
|||||||
/*
|
|
||||||
pygame - Python Game Library
|
|
||||||
Copyright (C) 2000-2001 Pete Shinners
|
|
||||||
Copyright (C) 2007 Marcus von Appen
|
|
||||||
|
|
||||||
This library is free software; you can redistribute it and/or
|
|
||||||
modify it under the terms of the GNU Library General Public
|
|
||||||
License as published by the Free Software Foundation; either
|
|
||||||
version 2 of the License, or (at your option) any later version.
|
|
||||||
|
|
||||||
This library is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
||||||
Library General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Library General Public
|
|
||||||
License along with this library; if not, write to the Free
|
|
||||||
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
||||||
|
|
||||||
Pete Shinners
|
|
||||||
pete@shinners.org
|
|
||||||
*/
|
|
||||||
|
|
||||||
#ifndef SURFACE_H
|
|
||||||
#define SURFACE_H
|
|
||||||
|
|
||||||
/* This is defined in SDL.h */
|
|
||||||
#if defined(_POSIX_C_SOURCE)
|
|
||||||
#undef _POSIX_C_SOURCE
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#include <SDL.h>
|
|
||||||
#include "pygame.h"
|
|
||||||
|
|
||||||
/* Blend modes */
|
|
||||||
#define PYGAME_BLEND_ADD 0x1
|
|
||||||
#define PYGAME_BLEND_SUB 0x2
|
|
||||||
#define PYGAME_BLEND_MULT 0x3
|
|
||||||
#define PYGAME_BLEND_MIN 0x4
|
|
||||||
#define PYGAME_BLEND_MAX 0x5
|
|
||||||
|
|
||||||
#define PYGAME_BLEND_RGB_ADD 0x1
|
|
||||||
#define PYGAME_BLEND_RGB_SUB 0x2
|
|
||||||
#define PYGAME_BLEND_RGB_MULT 0x3
|
|
||||||
#define PYGAME_BLEND_RGB_MIN 0x4
|
|
||||||
#define PYGAME_BLEND_RGB_MAX 0x5
|
|
||||||
|
|
||||||
#define PYGAME_BLEND_RGBA_ADD 0x6
|
|
||||||
#define PYGAME_BLEND_RGBA_SUB 0x7
|
|
||||||
#define PYGAME_BLEND_RGBA_MULT 0x8
|
|
||||||
#define PYGAME_BLEND_RGBA_MIN 0x9
|
|
||||||
#define PYGAME_BLEND_RGBA_MAX 0x10
|
|
||||||
#define PYGAME_BLEND_PREMULTIPLIED 0x11
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#if SDL_BYTEORDER == SDL_LIL_ENDIAN
|
|
||||||
#define GET_PIXEL_24(b) (b[0] + (b[1] << 8) + (b[2] << 16))
|
|
||||||
#else
|
|
||||||
#define GET_PIXEL_24(b) (b[2] + (b[1] << 8) + (b[0] << 16))
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#define GET_PIXEL(pxl, bpp, source) \
|
|
||||||
switch (bpp) \
|
|
||||||
{ \
|
|
||||||
case 2: \
|
|
||||||
pxl = *((Uint16 *) (source)); \
|
|
||||||
break; \
|
|
||||||
case 4: \
|
|
||||||
pxl = *((Uint32 *) (source)); \
|
|
||||||
break; \
|
|
||||||
default: \
|
|
||||||
{ \
|
|
||||||
Uint8 *b = (Uint8 *) source; \
|
|
||||||
pxl = GET_PIXEL_24(b); \
|
|
||||||
} \
|
|
||||||
break; \
|
|
||||||
}
|
|
||||||
|
|
||||||
#if IS_SDLv1
|
|
||||||
#define GET_PIXELVALS(_sR, _sG, _sB, _sA, px, fmt, ppa) \
|
|
||||||
_sR = ((px & fmt->Rmask) >> fmt->Rshift); \
|
|
||||||
_sR = (_sR << fmt->Rloss) + (_sR >> (8 - (fmt->Rloss << 1))); \
|
|
||||||
_sG = ((px & fmt->Gmask) >> fmt->Gshift); \
|
|
||||||
_sG = (_sG << fmt->Gloss) + (_sG >> (8 - (fmt->Gloss << 1))); \
|
|
||||||
_sB = ((px & fmt->Bmask) >> fmt->Bshift); \
|
|
||||||
_sB = (_sB << fmt->Bloss) + (_sB >> (8 - (fmt->Bloss << 1))); \
|
|
||||||
if (ppa) \
|
|
||||||
{ \
|
|
||||||
_sA = ((px & fmt->Amask) >> fmt->Ashift); \
|
|
||||||
_sA = (_sA << fmt->Aloss) + (_sA >> (8 - (fmt->Aloss << 1))); \
|
|
||||||
} \
|
|
||||||
else \
|
|
||||||
{ \
|
|
||||||
_sA = 255; \
|
|
||||||
}
|
|
||||||
|
|
||||||
#define GET_PIXELVALS_1(sr, sg, sb, sa, _src, _fmt) \
|
|
||||||
sr = _fmt->palette->colors[*((Uint8 *) (_src))].r; \
|
|
||||||
sg = _fmt->palette->colors[*((Uint8 *) (_src))].g; \
|
|
||||||
sb = _fmt->palette->colors[*((Uint8 *) (_src))].b; \
|
|
||||||
sa = 255;
|
|
||||||
|
|
||||||
/* For 1 byte palette pixels */
|
|
||||||
#define SET_PIXELVAL(px, fmt, _dR, _dG, _dB, _dA) \
|
|
||||||
*(px) = (Uint8) SDL_MapRGB(fmt, _dR, _dG, _dB)
|
|
||||||
#else /* IS_SDLv2 */
|
|
||||||
#define GET_PIXELVALS(_sR, _sG, _sB, _sA, px, fmt, ppa) \
|
|
||||||
SDL_GetRGBA(px, fmt, &(_sR), &(_sG), &(_sB), &(_sA)); \
|
|
||||||
if (!ppa) { \
|
|
||||||
_sA = 255; \
|
|
||||||
}
|
|
||||||
|
|
||||||
#define GET_PIXELVALS_1(sr, sg, sb, sa, _src, _fmt) \
|
|
||||||
sr = _fmt->palette->colors[*((Uint8 *) (_src))].r; \
|
|
||||||
sg = _fmt->palette->colors[*((Uint8 *) (_src))].g; \
|
|
||||||
sb = _fmt->palette->colors[*((Uint8 *) (_src))].b; \
|
|
||||||
sa = 255;
|
|
||||||
|
|
||||||
/* For 1 byte palette pixels */
|
|
||||||
#define SET_PIXELVAL(px, fmt, _dR, _dG, _dB, _dA) \
|
|
||||||
*(px) = (Uint8) SDL_MapRGBA(fmt, _dR, _dG, _dB, _dA)
|
|
||||||
#endif /* IS_SDLv2 */
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#if SDL_BYTEORDER == SDL_LIL_ENDIAN
|
|
||||||
#define SET_OFFSETS_24(or, og, ob, fmt) \
|
|
||||||
{ \
|
|
||||||
or = (fmt->Rshift == 0 ? 0 : \
|
|
||||||
fmt->Rshift == 8 ? 1 : \
|
|
||||||
2 ); \
|
|
||||||
og = (fmt->Gshift == 0 ? 0 : \
|
|
||||||
fmt->Gshift == 8 ? 1 : \
|
|
||||||
2 ); \
|
|
||||||
ob = (fmt->Bshift == 0 ? 0 : \
|
|
||||||
fmt->Bshift == 8 ? 1 : \
|
|
||||||
2 ); \
|
|
||||||
}
|
|
||||||
|
|
||||||
#define SET_OFFSETS_32(or, og, ob, fmt) \
|
|
||||||
{ \
|
|
||||||
or = (fmt->Rshift == 0 ? 0 : \
|
|
||||||
fmt->Rshift == 8 ? 1 : \
|
|
||||||
fmt->Rshift == 16 ? 2 : \
|
|
||||||
3 ); \
|
|
||||||
og = (fmt->Gshift == 0 ? 0 : \
|
|
||||||
fmt->Gshift == 8 ? 1 : \
|
|
||||||
fmt->Gshift == 16 ? 2 : \
|
|
||||||
3 ); \
|
|
||||||
ob = (fmt->Bshift == 0 ? 0 : \
|
|
||||||
fmt->Bshift == 8 ? 1 : \
|
|
||||||
fmt->Bshift == 16 ? 2 : \
|
|
||||||
3 ); \
|
|
||||||
}
|
|
||||||
#else
|
|
||||||
#define SET_OFFSETS_24(or, og, ob, fmt) \
|
|
||||||
{ \
|
|
||||||
or = (fmt->Rshift == 0 ? 2 : \
|
|
||||||
fmt->Rshift == 8 ? 1 : \
|
|
||||||
0 ); \
|
|
||||||
og = (fmt->Gshift == 0 ? 2 : \
|
|
||||||
fmt->Gshift == 8 ? 1 : \
|
|
||||||
0 ); \
|
|
||||||
ob = (fmt->Bshift == 0 ? 2 : \
|
|
||||||
fmt->Bshift == 8 ? 1 : \
|
|
||||||
0 ); \
|
|
||||||
}
|
|
||||||
|
|
||||||
#define SET_OFFSETS_32(or, og, ob, fmt) \
|
|
||||||
{ \
|
|
||||||
or = (fmt->Rshift == 0 ? 3 : \
|
|
||||||
fmt->Rshift == 8 ? 2 : \
|
|
||||||
fmt->Rshift == 16 ? 1 : \
|
|
||||||
0 ); \
|
|
||||||
og = (fmt->Gshift == 0 ? 3 : \
|
|
||||||
fmt->Gshift == 8 ? 2 : \
|
|
||||||
fmt->Gshift == 16 ? 1 : \
|
|
||||||
0 ); \
|
|
||||||
ob = (fmt->Bshift == 0 ? 3 : \
|
|
||||||
fmt->Bshift == 8 ? 2 : \
|
|
||||||
fmt->Bshift == 16 ? 1 : \
|
|
||||||
0 ); \
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
|
|
||||||
#define CREATE_PIXEL(buf, r, g, b, a, bp, ft) \
|
|
||||||
switch (bp) \
|
|
||||||
{ \
|
|
||||||
case 2: \
|
|
||||||
*((Uint16 *) (buf)) = \
|
|
||||||
((r >> ft->Rloss) << ft->Rshift) | \
|
|
||||||
((g >> ft->Gloss) << ft->Gshift) | \
|
|
||||||
((b >> ft->Bloss) << ft->Bshift) | \
|
|
||||||
((a >> ft->Aloss) << ft->Ashift); \
|
|
||||||
break; \
|
|
||||||
case 4: \
|
|
||||||
*((Uint32 *) (buf)) = \
|
|
||||||
((r >> ft->Rloss) << ft->Rshift) | \
|
|
||||||
((g >> ft->Gloss) << ft->Gshift) | \
|
|
||||||
((b >> ft->Bloss) << ft->Bshift) | \
|
|
||||||
((a >> ft->Aloss) << ft->Ashift); \
|
|
||||||
break; \
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Pretty good idea from Tom Duff :-). */
|
|
||||||
#define LOOP_UNROLLED4(code, n, width) \
|
|
||||||
n = (width + 3) / 4; \
|
|
||||||
switch (width & 3) \
|
|
||||||
{ \
|
|
||||||
case 0: do { code; \
|
|
||||||
case 3: code; \
|
|
||||||
case 2: code; \
|
|
||||||
case 1: code; \
|
|
||||||
} while (--n > 0); \
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Used in the srcbpp == dstbpp == 1 blend functions */
|
|
||||||
#define REPEAT_3(code) \
|
|
||||||
code; \
|
|
||||||
code; \
|
|
||||||
code;
|
|
||||||
|
|
||||||
#define REPEAT_4(code) \
|
|
||||||
code; \
|
|
||||||
code; \
|
|
||||||
code; \
|
|
||||||
code;
|
|
||||||
|
|
||||||
|
|
||||||
#define BLEND_ADD(tmp, sR, sG, sB, sA, dR, dG, dB, dA) \
|
|
||||||
tmp = dR + sR; dR = (tmp <= 255 ? tmp : 255); \
|
|
||||||
tmp = dG + sG; dG = (tmp <= 255 ? tmp : 255); \
|
|
||||||
tmp = dB + sB; dB = (tmp <= 255 ? tmp : 255);
|
|
||||||
|
|
||||||
#define BLEND_SUB(tmp, sR, sG, sB, sA, dR, dG, dB, dA) \
|
|
||||||
tmp = dR - sR; dR = (tmp >= 0 ? tmp : 0); \
|
|
||||||
tmp = dG - sG; dG = (tmp >= 0 ? tmp : 0); \
|
|
||||||
tmp = dB - sB; dB = (tmp >= 0 ? tmp : 0);
|
|
||||||
|
|
||||||
#define BLEND_MULT(sR, sG, sB, sA, dR, dG, dB, dA) \
|
|
||||||
dR = (dR && sR) ? (dR * sR) >> 8 : 0; \
|
|
||||||
dG = (dG && sG) ? (dG * sG) >> 8 : 0; \
|
|
||||||
dB = (dB && sB) ? (dB * sB) >> 8 : 0;
|
|
||||||
|
|
||||||
#define BLEND_MIN(sR, sG, sB, sA, dR, dG, dB, dA) \
|
|
||||||
if(sR < dR) { dR = sR; } \
|
|
||||||
if(sG < dG) { dG = sG; } \
|
|
||||||
if(sB < dB) { dB = sB; }
|
|
||||||
|
|
||||||
#define BLEND_MAX(sR, sG, sB, sA, dR, dG, dB, dA) \
|
|
||||||
if(sR > dR) { dR = sR; } \
|
|
||||||
if(sG > dG) { dG = sG; } \
|
|
||||||
if(sB > dB) { dB = sB; }
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#define BLEND_RGBA_ADD(tmp, sR, sG, sB, sA, dR, dG, dB, dA) \
|
|
||||||
tmp = dR + sR; dR = (tmp <= 255 ? tmp : 255); \
|
|
||||||
tmp = dG + sG; dG = (tmp <= 255 ? tmp : 255); \
|
|
||||||
tmp = dB + sB; dB = (tmp <= 255 ? tmp : 255); \
|
|
||||||
tmp = dA + sA; dA = (tmp <= 255 ? tmp : 255);
|
|
||||||
|
|
||||||
#define BLEND_RGBA_SUB(tmp, sR, sG, sB, sA, dR, dG, dB, dA) \
|
|
||||||
tmp = dR - sR; dR = (tmp >= 0 ? tmp : 0); \
|
|
||||||
tmp = dG - sG; dG = (tmp >= 0 ? tmp : 0); \
|
|
||||||
tmp = dB - sB; dB = (tmp >= 0 ? tmp : 0); \
|
|
||||||
tmp = dA - sA; dA = (tmp >= 0 ? tmp : 0);
|
|
||||||
|
|
||||||
#define BLEND_RGBA_MULT(sR, sG, sB, sA, dR, dG, dB, dA) \
|
|
||||||
dR = (dR && sR) ? (dR * sR) >> 8 : 0; \
|
|
||||||
dG = (dG && sG) ? (dG * sG) >> 8 : 0; \
|
|
||||||
dB = (dB && sB) ? (dB * sB) >> 8 : 0; \
|
|
||||||
dA = (dA && sA) ? (dA * sA) >> 8 : 0;
|
|
||||||
|
|
||||||
#define BLEND_RGBA_MIN(sR, sG, sB, sA, dR, dG, dB, dA) \
|
|
||||||
if(sR < dR) { dR = sR; } \
|
|
||||||
if(sG < dG) { dG = sG; } \
|
|
||||||
if(sB < dB) { dB = sB; } \
|
|
||||||
if(sA < dA) { dA = sA; }
|
|
||||||
|
|
||||||
#define BLEND_RGBA_MAX(sR, sG, sB, sA, dR, dG, dB, dA) \
|
|
||||||
if(sR > dR) { dR = sR; } \
|
|
||||||
if(sG > dG) { dG = sG; } \
|
|
||||||
if(sB > dB) { dB = sB; } \
|
|
||||||
if(sA > dA) { dA = sA; }
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#if 1
|
|
||||||
/* Choose an alpha blend equation. If the sign is preserved on a right shift
|
|
||||||
* then use a specialized, faster, equation. Otherwise a more general form,
|
|
||||||
* where all additions are done before the shift, is needed.
|
|
||||||
*/
|
|
||||||
#if (-1 >> 1) < 0
|
|
||||||
#define ALPHA_BLEND_COMP(sC, dC, sA) ((((sC - dC) * sA + sC) >> 8) + dC)
|
|
||||||
#else
|
|
||||||
#define ALPHA_BLEND_COMP(sC, dC, sA) (((dC << 8) + (sC - dC) * sA + sC) >> 8)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#define ALPHA_BLEND(sR, sG, sB, sA, dR, dG, dB, dA) \
|
|
||||||
do { \
|
|
||||||
if (dA) \
|
|
||||||
{ \
|
|
||||||
dR = ALPHA_BLEND_COMP(sR, dR, sA); \
|
|
||||||
dG = ALPHA_BLEND_COMP(sG, dG, sA); \
|
|
||||||
dB = ALPHA_BLEND_COMP(sB, dB, sA); \
|
|
||||||
dA = sA + dA - ((sA * dA) / 255); \
|
|
||||||
} \
|
|
||||||
else \
|
|
||||||
{ \
|
|
||||||
dR = sR; \
|
|
||||||
dG = sG; \
|
|
||||||
dB = sB; \
|
|
||||||
dA = sA; \
|
|
||||||
} \
|
|
||||||
} while(0)
|
|
||||||
|
|
||||||
#define ALPHA_BLEND_PREMULTIPLIED_COMP(sC, dC, sA) (sC + dC - ((dC * sA) >> 8))
|
|
||||||
|
|
||||||
#define ALPHA_BLEND_PREMULTIPLIED(tmp, sR, sG, sB, sA, dR, dG, dB, dA) \
|
|
||||||
do { \
|
|
||||||
tmp = ALPHA_BLEND_PREMULTIPLIED_COMP(sR, dR, sA); dR = (tmp > 255 ? 255 : tmp); \
|
|
||||||
tmp = ALPHA_BLEND_PREMULTIPLIED_COMP(sG, dG, sA); dG = (tmp > 255 ? 255 : tmp); \
|
|
||||||
tmp = ALPHA_BLEND_PREMULTIPLIED_COMP(sB, dB, sA); dB = (tmp > 255 ? 255 : tmp); \
|
|
||||||
dA = sA + dA - ((sA * dA) / 255); \
|
|
||||||
} while(0)
|
|
||||||
#elif 0
|
|
||||||
|
|
||||||
#define ALPHA_BLEND(sR, sG, sB, sA, dR, dG, dB, dA) \
|
|
||||||
do { \
|
|
||||||
if(sA){ \
|
|
||||||
if(dA && sA < 255){ \
|
|
||||||
int dContrib = dA*(255 - sA)/255; \
|
|
||||||
dA = sA+dA - ((sA*dA)/255); \
|
|
||||||
dR = (dR*dContrib + sR*sA)/dA; \
|
|
||||||
dG = (dG*dContrib + sG*sA)/dA; \
|
|
||||||
dB = (dB*dContrib + sB*sA)/dA; \
|
|
||||||
}else{ \
|
|
||||||
dR = sR; \
|
|
||||||
dG = sG; \
|
|
||||||
dB = sB; \
|
|
||||||
dA = sA; \
|
|
||||||
} \
|
|
||||||
} \
|
|
||||||
} while(0)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
int
|
|
||||||
surface_fill_blend (SDL_Surface *surface, SDL_Rect *rect, Uint32 color,
|
|
||||||
int blendargs);
|
|
||||||
|
|
||||||
void
|
|
||||||
surface_respect_clip_rect (SDL_Surface *surface, SDL_Rect *rect);
|
|
||||||
|
|
||||||
int
|
|
||||||
pygame_AlphaBlit (SDL_Surface * src, SDL_Rect * srcrect,
|
|
||||||
SDL_Surface * dst, SDL_Rect * dstrect, int the_args);
|
|
||||||
|
|
||||||
int
|
|
||||||
pygame_Blit (SDL_Surface * src, SDL_Rect * srcrect,
|
|
||||||
SDL_Surface * dst, SDL_Rect * dstrect, int the_args);
|
|
||||||
|
|
||||||
#endif /* SURFACE_H */
|
|
@ -1,2 +0,0 @@
|
|||||||
./setuptools-40.8.0-py3.7.egg
|
|
||||||
./pip-19.0.3-py3.7.egg
|
|
@ -1,73 +0,0 @@
|
|||||||
Metadata-Version: 1.2
|
|
||||||
Name: pip
|
|
||||||
Version: 19.0.3
|
|
||||||
Summary: The PyPA recommended tool for installing Python packages.
|
|
||||||
Home-page: https://pip.pypa.io/
|
|
||||||
Author: The pip developers
|
|
||||||
Author-email: pypa-dev@groups.google.com
|
|
||||||
License: MIT
|
|
||||||
Description: pip - The Python Package Installer
|
|
||||||
==================================
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/pypi/v/pip.svg
|
|
||||||
:target: https://pypi.org/project/pip/
|
|
||||||
|
|
||||||
.. image:: https://readthedocs.org/projects/pip/badge/?version=latest
|
|
||||||
:target: https://pip.pypa.io/en/latest
|
|
||||||
|
|
||||||
pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.
|
|
||||||
|
|
||||||
Please take a look at our documentation for how to install and use pip:
|
|
||||||
|
|
||||||
* `Installation`_
|
|
||||||
* `Usage`_
|
|
||||||
* `Release notes`_
|
|
||||||
|
|
||||||
If you find bugs, need help, or want to talk to the developers please use our mailing lists or chat rooms:
|
|
||||||
|
|
||||||
* `Issue tracking`_
|
|
||||||
* `Discourse channel`_
|
|
||||||
* `User IRC`_
|
|
||||||
|
|
||||||
If you want to get involved head over to GitHub to get the source code and feel free to jump on the developer mailing lists and chat rooms:
|
|
||||||
|
|
||||||
* `GitHub page`_
|
|
||||||
* `Dev mailing list`_
|
|
||||||
* `Dev IRC`_
|
|
||||||
|
|
||||||
Code of Conduct
|
|
||||||
---------------
|
|
||||||
|
|
||||||
Everyone interacting in the pip project's codebases, issue trackers, chat
|
|
||||||
rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_.
|
|
||||||
|
|
||||||
.. _package installer: https://packaging.python.org/en/latest/current/
|
|
||||||
.. _Python Package Index: https://pypi.org
|
|
||||||
.. _Installation: https://pip.pypa.io/en/stable/installing.html
|
|
||||||
.. _Usage: https://pip.pypa.io/en/stable/
|
|
||||||
.. _Release notes: https://pip.pypa.io/en/stable/news.html
|
|
||||||
.. _GitHub page: https://github.com/pypa/pip
|
|
||||||
.. _Issue tracking: https://github.com/pypa/pip/issues
|
|
||||||
.. _Discourse channel: https://discuss.python.org/c/packaging
|
|
||||||
.. _Dev mailing list: https://groups.google.com/forum/#!forum/pypa-dev
|
|
||||||
.. _User IRC: https://webchat.freenode.net/?channels=%23pypa
|
|
||||||
.. _Dev IRC: https://webchat.freenode.net/?channels=%23pypa-dev
|
|
||||||
.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/
|
|
||||||
|
|
||||||
Keywords: distutils easy_install egg setuptools wheel virtualenv
|
|
||||||
Platform: UNKNOWN
|
|
||||||
Classifier: Development Status :: 5 - Production/Stable
|
|
||||||
Classifier: Intended Audience :: Developers
|
|
||||||
Classifier: License :: OSI Approved :: MIT License
|
|
||||||
Classifier: Topic :: Software Development :: Build Tools
|
|
||||||
Classifier: Programming Language :: Python
|
|
||||||
Classifier: Programming Language :: Python :: 2
|
|
||||||
Classifier: Programming Language :: Python :: 2.7
|
|
||||||
Classifier: Programming Language :: Python :: 3
|
|
||||||
Classifier: Programming Language :: Python :: 3.4
|
|
||||||
Classifier: Programming Language :: Python :: 3.5
|
|
||||||
Classifier: Programming Language :: Python :: 3.6
|
|
||||||
Classifier: Programming Language :: Python :: 3.7
|
|
||||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
||||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
||||||
Requires-Python: >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*
|
|
@ -1,391 +0,0 @@
|
|||||||
AUTHORS.txt
|
|
||||||
LICENSE.txt
|
|
||||||
MANIFEST.in
|
|
||||||
NEWS.rst
|
|
||||||
README.rst
|
|
||||||
pyproject.toml
|
|
||||||
setup.cfg
|
|
||||||
setup.py
|
|
||||||
docs/pip_sphinxext.py
|
|
||||||
docs/html/conf.py
|
|
||||||
docs/html/cookbook.rst
|
|
||||||
docs/html/index.rst
|
|
||||||
docs/html/installing.rst
|
|
||||||
docs/html/logic.rst
|
|
||||||
docs/html/news.rst
|
|
||||||
docs/html/quickstart.rst
|
|
||||||
docs/html/usage.rst
|
|
||||||
docs/html/user_guide.rst
|
|
||||||
docs/html/development/configuration.rst
|
|
||||||
docs/html/development/contributing.rst
|
|
||||||
docs/html/development/getting-started.rst
|
|
||||||
docs/html/development/index.rst
|
|
||||||
docs/html/development/release-process.rst
|
|
||||||
docs/html/development/vendoring-policy.rst
|
|
||||||
docs/html/reference/index.rst
|
|
||||||
docs/html/reference/pip.rst
|
|
||||||
docs/html/reference/pip_check.rst
|
|
||||||
docs/html/reference/pip_config.rst
|
|
||||||
docs/html/reference/pip_download.rst
|
|
||||||
docs/html/reference/pip_freeze.rst
|
|
||||||
docs/html/reference/pip_hash.rst
|
|
||||||
docs/html/reference/pip_install.rst
|
|
||||||
docs/html/reference/pip_list.rst
|
|
||||||
docs/html/reference/pip_search.rst
|
|
||||||
docs/html/reference/pip_show.rst
|
|
||||||
docs/html/reference/pip_uninstall.rst
|
|
||||||
docs/html/reference/pip_wheel.rst
|
|
||||||
docs/man/index.rst
|
|
||||||
docs/man/commands/check.rst
|
|
||||||
docs/man/commands/config.rst
|
|
||||||
docs/man/commands/download.rst
|
|
||||||
docs/man/commands/freeze.rst
|
|
||||||
docs/man/commands/hash.rst
|
|
||||||
docs/man/commands/help.rst
|
|
||||||
docs/man/commands/install.rst
|
|
||||||
docs/man/commands/list.rst
|
|
||||||
docs/man/commands/search.rst
|
|
||||||
docs/man/commands/show.rst
|
|
||||||
docs/man/commands/uninstall.rst
|
|
||||||
docs/man/commands/wheel.rst
|
|
||||||
src/pip/__init__.py
|
|
||||||
src/pip/__main__.py
|
|
||||||
src/pip.egg-info/PKG-INFO
|
|
||||||
src/pip.egg-info/SOURCES.txt
|
|
||||||
src/pip.egg-info/dependency_links.txt
|
|
||||||
src/pip.egg-info/entry_points.txt
|
|
||||||
src/pip.egg-info/not-zip-safe
|
|
||||||
src/pip.egg-info/top_level.txt
|
|
||||||
src/pip/_internal/__init__.py
|
|
||||||
src/pip/_internal/build_env.py
|
|
||||||
src/pip/_internal/cache.py
|
|
||||||
src/pip/_internal/configuration.py
|
|
||||||
src/pip/_internal/download.py
|
|
||||||
src/pip/_internal/exceptions.py
|
|
||||||
src/pip/_internal/index.py
|
|
||||||
src/pip/_internal/locations.py
|
|
||||||
src/pip/_internal/pep425tags.py
|
|
||||||
src/pip/_internal/pyproject.py
|
|
||||||
src/pip/_internal/resolve.py
|
|
||||||
src/pip/_internal/wheel.py
|
|
||||||
src/pip/_internal/cli/__init__.py
|
|
||||||
src/pip/_internal/cli/autocompletion.py
|
|
||||||
src/pip/_internal/cli/base_command.py
|
|
||||||
src/pip/_internal/cli/cmdoptions.py
|
|
||||||
src/pip/_internal/cli/main_parser.py
|
|
||||||
src/pip/_internal/cli/parser.py
|
|
||||||
src/pip/_internal/cli/status_codes.py
|
|
||||||
src/pip/_internal/commands/__init__.py
|
|
||||||
src/pip/_internal/commands/check.py
|
|
||||||
src/pip/_internal/commands/completion.py
|
|
||||||
src/pip/_internal/commands/configuration.py
|
|
||||||
src/pip/_internal/commands/download.py
|
|
||||||
src/pip/_internal/commands/freeze.py
|
|
||||||
src/pip/_internal/commands/hash.py
|
|
||||||
src/pip/_internal/commands/help.py
|
|
||||||
src/pip/_internal/commands/install.py
|
|
||||||
src/pip/_internal/commands/list.py
|
|
||||||
src/pip/_internal/commands/search.py
|
|
||||||
src/pip/_internal/commands/show.py
|
|
||||||
src/pip/_internal/commands/uninstall.py
|
|
||||||
src/pip/_internal/commands/wheel.py
|
|
||||||
src/pip/_internal/models/__init__.py
|
|
||||||
src/pip/_internal/models/candidate.py
|
|
||||||
src/pip/_internal/models/format_control.py
|
|
||||||
src/pip/_internal/models/index.py
|
|
||||||
src/pip/_internal/models/link.py
|
|
||||||
src/pip/_internal/operations/__init__.py
|
|
||||||
src/pip/_internal/operations/check.py
|
|
||||||
src/pip/_internal/operations/freeze.py
|
|
||||||
src/pip/_internal/operations/prepare.py
|
|
||||||
src/pip/_internal/req/__init__.py
|
|
||||||
src/pip/_internal/req/constructors.py
|
|
||||||
src/pip/_internal/req/req_file.py
|
|
||||||
src/pip/_internal/req/req_install.py
|
|
||||||
src/pip/_internal/req/req_set.py
|
|
||||||
src/pip/_internal/req/req_tracker.py
|
|
||||||
src/pip/_internal/req/req_uninstall.py
|
|
||||||
src/pip/_internal/utils/__init__.py
|
|
||||||
src/pip/_internal/utils/appdirs.py
|
|
||||||
src/pip/_internal/utils/compat.py
|
|
||||||
src/pip/_internal/utils/deprecation.py
|
|
||||||
src/pip/_internal/utils/encoding.py
|
|
||||||
src/pip/_internal/utils/filesystem.py
|
|
||||||
src/pip/_internal/utils/glibc.py
|
|
||||||
src/pip/_internal/utils/hashes.py
|
|
||||||
src/pip/_internal/utils/logging.py
|
|
||||||
src/pip/_internal/utils/misc.py
|
|
||||||
src/pip/_internal/utils/models.py
|
|
||||||
src/pip/_internal/utils/outdated.py
|
|
||||||
src/pip/_internal/utils/packaging.py
|
|
||||||
src/pip/_internal/utils/setuptools_build.py
|
|
||||||
src/pip/_internal/utils/temp_dir.py
|
|
||||||
src/pip/_internal/utils/typing.py
|
|
||||||
src/pip/_internal/utils/ui.py
|
|
||||||
src/pip/_internal/vcs/__init__.py
|
|
||||||
src/pip/_internal/vcs/bazaar.py
|
|
||||||
src/pip/_internal/vcs/git.py
|
|
||||||
src/pip/_internal/vcs/mercurial.py
|
|
||||||
src/pip/_internal/vcs/subversion.py
|
|
||||||
src/pip/_vendor/README.rst
|
|
||||||
src/pip/_vendor/__init__.py
|
|
||||||
src/pip/_vendor/appdirs.LICENSE.txt
|
|
||||||
src/pip/_vendor/appdirs.py
|
|
||||||
src/pip/_vendor/distro.LICENSE
|
|
||||||
src/pip/_vendor/distro.py
|
|
||||||
src/pip/_vendor/ipaddress.LICENSE
|
|
||||||
src/pip/_vendor/ipaddress.py
|
|
||||||
src/pip/_vendor/pyparsing.LICENSE
|
|
||||||
src/pip/_vendor/pyparsing.py
|
|
||||||
src/pip/_vendor/retrying.LICENSE
|
|
||||||
src/pip/_vendor/retrying.py
|
|
||||||
src/pip/_vendor/six.LICENSE
|
|
||||||
src/pip/_vendor/six.py
|
|
||||||
src/pip/_vendor/vendor.txt
|
|
||||||
src/pip/_vendor/cachecontrol/LICENSE.txt
|
|
||||||
src/pip/_vendor/cachecontrol/__init__.py
|
|
||||||
src/pip/_vendor/cachecontrol/_cmd.py
|
|
||||||
src/pip/_vendor/cachecontrol/adapter.py
|
|
||||||
src/pip/_vendor/cachecontrol/cache.py
|
|
||||||
src/pip/_vendor/cachecontrol/compat.py
|
|
||||||
src/pip/_vendor/cachecontrol/controller.py
|
|
||||||
src/pip/_vendor/cachecontrol/filewrapper.py
|
|
||||||
src/pip/_vendor/cachecontrol/heuristics.py
|
|
||||||
src/pip/_vendor/cachecontrol/serialize.py
|
|
||||||
src/pip/_vendor/cachecontrol/wrapper.py
|
|
||||||
src/pip/_vendor/cachecontrol/caches/__init__.py
|
|
||||||
src/pip/_vendor/cachecontrol/caches/file_cache.py
|
|
||||||
src/pip/_vendor/cachecontrol/caches/redis_cache.py
|
|
||||||
src/pip/_vendor/certifi/LICENSE
|
|
||||||
src/pip/_vendor/certifi/__init__.py
|
|
||||||
src/pip/_vendor/certifi/__main__.py
|
|
||||||
src/pip/_vendor/certifi/cacert.pem
|
|
||||||
src/pip/_vendor/certifi/core.py
|
|
||||||
src/pip/_vendor/chardet/LICENSE
|
|
||||||
src/pip/_vendor/chardet/__init__.py
|
|
||||||
src/pip/_vendor/chardet/big5freq.py
|
|
||||||
src/pip/_vendor/chardet/big5prober.py
|
|
||||||
src/pip/_vendor/chardet/chardistribution.py
|
|
||||||
src/pip/_vendor/chardet/charsetgroupprober.py
|
|
||||||
src/pip/_vendor/chardet/charsetprober.py
|
|
||||||
src/pip/_vendor/chardet/codingstatemachine.py
|
|
||||||
src/pip/_vendor/chardet/compat.py
|
|
||||||
src/pip/_vendor/chardet/cp949prober.py
|
|
||||||
src/pip/_vendor/chardet/enums.py
|
|
||||||
src/pip/_vendor/chardet/escprober.py
|
|
||||||
src/pip/_vendor/chardet/escsm.py
|
|
||||||
src/pip/_vendor/chardet/eucjpprober.py
|
|
||||||
src/pip/_vendor/chardet/euckrfreq.py
|
|
||||||
src/pip/_vendor/chardet/euckrprober.py
|
|
||||||
src/pip/_vendor/chardet/euctwfreq.py
|
|
||||||
src/pip/_vendor/chardet/euctwprober.py
|
|
||||||
src/pip/_vendor/chardet/gb2312freq.py
|
|
||||||
src/pip/_vendor/chardet/gb2312prober.py
|
|
||||||
src/pip/_vendor/chardet/hebrewprober.py
|
|
||||||
src/pip/_vendor/chardet/jisfreq.py
|
|
||||||
src/pip/_vendor/chardet/jpcntx.py
|
|
||||||
src/pip/_vendor/chardet/langbulgarianmodel.py
|
|
||||||
src/pip/_vendor/chardet/langcyrillicmodel.py
|
|
||||||
src/pip/_vendor/chardet/langgreekmodel.py
|
|
||||||
src/pip/_vendor/chardet/langhebrewmodel.py
|
|
||||||
src/pip/_vendor/chardet/langhungarianmodel.py
|
|
||||||
src/pip/_vendor/chardet/langthaimodel.py
|
|
||||||
src/pip/_vendor/chardet/langturkishmodel.py
|
|
||||||
src/pip/_vendor/chardet/latin1prober.py
|
|
||||||
src/pip/_vendor/chardet/mbcharsetprober.py
|
|
||||||
src/pip/_vendor/chardet/mbcsgroupprober.py
|
|
||||||
src/pip/_vendor/chardet/mbcssm.py
|
|
||||||
src/pip/_vendor/chardet/sbcharsetprober.py
|
|
||||||
src/pip/_vendor/chardet/sbcsgroupprober.py
|
|
||||||
src/pip/_vendor/chardet/sjisprober.py
|
|
||||||
src/pip/_vendor/chardet/universaldetector.py
|
|
||||||
src/pip/_vendor/chardet/utf8prober.py
|
|
||||||
src/pip/_vendor/chardet/version.py
|
|
||||||
src/pip/_vendor/chardet/cli/__init__.py
|
|
||||||
src/pip/_vendor/chardet/cli/chardetect.py
|
|
||||||
src/pip/_vendor/colorama/LICENSE.txt
|
|
||||||
src/pip/_vendor/colorama/__init__.py
|
|
||||||
src/pip/_vendor/colorama/ansi.py
|
|
||||||
src/pip/_vendor/colorama/ansitowin32.py
|
|
||||||
src/pip/_vendor/colorama/initialise.py
|
|
||||||
src/pip/_vendor/colorama/win32.py
|
|
||||||
src/pip/_vendor/colorama/winterm.py
|
|
||||||
src/pip/_vendor/distlib/LICENSE.txt
|
|
||||||
src/pip/_vendor/distlib/__init__.py
|
|
||||||
src/pip/_vendor/distlib/compat.py
|
|
||||||
src/pip/_vendor/distlib/database.py
|
|
||||||
src/pip/_vendor/distlib/index.py
|
|
||||||
src/pip/_vendor/distlib/locators.py
|
|
||||||
src/pip/_vendor/distlib/manifest.py
|
|
||||||
src/pip/_vendor/distlib/markers.py
|
|
||||||
src/pip/_vendor/distlib/metadata.py
|
|
||||||
src/pip/_vendor/distlib/resources.py
|
|
||||||
src/pip/_vendor/distlib/scripts.py
|
|
||||||
src/pip/_vendor/distlib/t32.exe
|
|
||||||
src/pip/_vendor/distlib/t64.exe
|
|
||||||
src/pip/_vendor/distlib/util.py
|
|
||||||
src/pip/_vendor/distlib/version.py
|
|
||||||
src/pip/_vendor/distlib/w32.exe
|
|
||||||
src/pip/_vendor/distlib/w64.exe
|
|
||||||
src/pip/_vendor/distlib/wheel.py
|
|
||||||
src/pip/_vendor/distlib/_backport/__init__.py
|
|
||||||
src/pip/_vendor/distlib/_backport/misc.py
|
|
||||||
src/pip/_vendor/distlib/_backport/shutil.py
|
|
||||||
src/pip/_vendor/distlib/_backport/sysconfig.cfg
|
|
||||||
src/pip/_vendor/distlib/_backport/sysconfig.py
|
|
||||||
src/pip/_vendor/distlib/_backport/tarfile.py
|
|
||||||
src/pip/_vendor/html5lib/LICENSE
|
|
||||||
src/pip/_vendor/html5lib/__init__.py
|
|
||||||
src/pip/_vendor/html5lib/_ihatexml.py
|
|
||||||
src/pip/_vendor/html5lib/_inputstream.py
|
|
||||||
src/pip/_vendor/html5lib/_tokenizer.py
|
|
||||||
src/pip/_vendor/html5lib/_utils.py
|
|
||||||
src/pip/_vendor/html5lib/constants.py
|
|
||||||
src/pip/_vendor/html5lib/html5parser.py
|
|
||||||
src/pip/_vendor/html5lib/serializer.py
|
|
||||||
src/pip/_vendor/html5lib/_trie/__init__.py
|
|
||||||
src/pip/_vendor/html5lib/_trie/_base.py
|
|
||||||
src/pip/_vendor/html5lib/_trie/datrie.py
|
|
||||||
src/pip/_vendor/html5lib/_trie/py.py
|
|
||||||
src/pip/_vendor/html5lib/filters/__init__.py
|
|
||||||
src/pip/_vendor/html5lib/filters/alphabeticalattributes.py
|
|
||||||
src/pip/_vendor/html5lib/filters/base.py
|
|
||||||
src/pip/_vendor/html5lib/filters/inject_meta_charset.py
|
|
||||||
src/pip/_vendor/html5lib/filters/lint.py
|
|
||||||
src/pip/_vendor/html5lib/filters/optionaltags.py
|
|
||||||
src/pip/_vendor/html5lib/filters/sanitizer.py
|
|
||||||
src/pip/_vendor/html5lib/filters/whitespace.py
|
|
||||||
src/pip/_vendor/html5lib/treeadapters/__init__.py
|
|
||||||
src/pip/_vendor/html5lib/treeadapters/genshi.py
|
|
||||||
src/pip/_vendor/html5lib/treeadapters/sax.py
|
|
||||||
src/pip/_vendor/html5lib/treebuilders/__init__.py
|
|
||||||
src/pip/_vendor/html5lib/treebuilders/base.py
|
|
||||||
src/pip/_vendor/html5lib/treebuilders/dom.py
|
|
||||||
src/pip/_vendor/html5lib/treebuilders/etree.py
|
|
||||||
src/pip/_vendor/html5lib/treebuilders/etree_lxml.py
|
|
||||||
src/pip/_vendor/html5lib/treewalkers/__init__.py
|
|
||||||
src/pip/_vendor/html5lib/treewalkers/base.py
|
|
||||||
src/pip/_vendor/html5lib/treewalkers/dom.py
|
|
||||||
src/pip/_vendor/html5lib/treewalkers/etree.py
|
|
||||||
src/pip/_vendor/html5lib/treewalkers/etree_lxml.py
|
|
||||||
src/pip/_vendor/html5lib/treewalkers/genshi.py
|
|
||||||
src/pip/_vendor/idna/LICENSE.rst
|
|
||||||
src/pip/_vendor/idna/__init__.py
|
|
||||||
src/pip/_vendor/idna/codec.py
|
|
||||||
src/pip/_vendor/idna/compat.py
|
|
||||||
src/pip/_vendor/idna/core.py
|
|
||||||
src/pip/_vendor/idna/idnadata.py
|
|
||||||
src/pip/_vendor/idna/intranges.py
|
|
||||||
src/pip/_vendor/idna/package_data.py
|
|
||||||
src/pip/_vendor/idna/uts46data.py
|
|
||||||
src/pip/_vendor/lockfile/LICENSE
|
|
||||||
src/pip/_vendor/lockfile/__init__.py
|
|
||||||
src/pip/_vendor/lockfile/linklockfile.py
|
|
||||||
src/pip/_vendor/lockfile/mkdirlockfile.py
|
|
||||||
src/pip/_vendor/lockfile/pidlockfile.py
|
|
||||||
src/pip/_vendor/lockfile/sqlitelockfile.py
|
|
||||||
src/pip/_vendor/lockfile/symlinklockfile.py
|
|
||||||
src/pip/_vendor/msgpack/COPYING
|
|
||||||
src/pip/_vendor/msgpack/__init__.py
|
|
||||||
src/pip/_vendor/msgpack/_version.py
|
|
||||||
src/pip/_vendor/msgpack/exceptions.py
|
|
||||||
src/pip/_vendor/msgpack/fallback.py
|
|
||||||
src/pip/_vendor/packaging/LICENSE
|
|
||||||
src/pip/_vendor/packaging/LICENSE.APACHE
|
|
||||||
src/pip/_vendor/packaging/LICENSE.BSD
|
|
||||||
src/pip/_vendor/packaging/__about__.py
|
|
||||||
src/pip/_vendor/packaging/__init__.py
|
|
||||||
src/pip/_vendor/packaging/_compat.py
|
|
||||||
src/pip/_vendor/packaging/_structures.py
|
|
||||||
src/pip/_vendor/packaging/markers.py
|
|
||||||
src/pip/_vendor/packaging/requirements.py
|
|
||||||
src/pip/_vendor/packaging/specifiers.py
|
|
||||||
src/pip/_vendor/packaging/utils.py
|
|
||||||
src/pip/_vendor/packaging/version.py
|
|
||||||
src/pip/_vendor/pep517/LICENSE
|
|
||||||
src/pip/_vendor/pep517/__init__.py
|
|
||||||
src/pip/_vendor/pep517/_in_process.py
|
|
||||||
src/pip/_vendor/pep517/build.py
|
|
||||||
src/pip/_vendor/pep517/check.py
|
|
||||||
src/pip/_vendor/pep517/colorlog.py
|
|
||||||
src/pip/_vendor/pep517/compat.py
|
|
||||||
src/pip/_vendor/pep517/envbuild.py
|
|
||||||
src/pip/_vendor/pep517/wrappers.py
|
|
||||||
src/pip/_vendor/pkg_resources/LICENSE
|
|
||||||
src/pip/_vendor/pkg_resources/__init__.py
|
|
||||||
src/pip/_vendor/pkg_resources/py31compat.py
|
|
||||||
src/pip/_vendor/progress/LICENSE
|
|
||||||
src/pip/_vendor/progress/__init__.py
|
|
||||||
src/pip/_vendor/progress/bar.py
|
|
||||||
src/pip/_vendor/progress/counter.py
|
|
||||||
src/pip/_vendor/progress/helpers.py
|
|
||||||
src/pip/_vendor/progress/spinner.py
|
|
||||||
src/pip/_vendor/pytoml/LICENSE
|
|
||||||
src/pip/_vendor/pytoml/__init__.py
|
|
||||||
src/pip/_vendor/pytoml/core.py
|
|
||||||
src/pip/_vendor/pytoml/parser.py
|
|
||||||
src/pip/_vendor/pytoml/test.py
|
|
||||||
src/pip/_vendor/pytoml/utils.py
|
|
||||||
src/pip/_vendor/pytoml/writer.py
|
|
||||||
src/pip/_vendor/requests/LICENSE
|
|
||||||
src/pip/_vendor/requests/__init__.py
|
|
||||||
src/pip/_vendor/requests/__version__.py
|
|
||||||
src/pip/_vendor/requests/_internal_utils.py
|
|
||||||
src/pip/_vendor/requests/adapters.py
|
|
||||||
src/pip/_vendor/requests/api.py
|
|
||||||
src/pip/_vendor/requests/auth.py
|
|
||||||
src/pip/_vendor/requests/certs.py
|
|
||||||
src/pip/_vendor/requests/compat.py
|
|
||||||
src/pip/_vendor/requests/cookies.py
|
|
||||||
src/pip/_vendor/requests/exceptions.py
|
|
||||||
src/pip/_vendor/requests/help.py
|
|
||||||
src/pip/_vendor/requests/hooks.py
|
|
||||||
src/pip/_vendor/requests/models.py
|
|
||||||
src/pip/_vendor/requests/packages.py
|
|
||||||
src/pip/_vendor/requests/sessions.py
|
|
||||||
src/pip/_vendor/requests/status_codes.py
|
|
||||||
src/pip/_vendor/requests/structures.py
|
|
||||||
src/pip/_vendor/requests/utils.py
|
|
||||||
src/pip/_vendor/urllib3/LICENSE.txt
|
|
||||||
src/pip/_vendor/urllib3/__init__.py
|
|
||||||
src/pip/_vendor/urllib3/_collections.py
|
|
||||||
src/pip/_vendor/urllib3/connection.py
|
|
||||||
src/pip/_vendor/urllib3/connectionpool.py
|
|
||||||
src/pip/_vendor/urllib3/exceptions.py
|
|
||||||
src/pip/_vendor/urllib3/fields.py
|
|
||||||
src/pip/_vendor/urllib3/filepost.py
|
|
||||||
src/pip/_vendor/urllib3/poolmanager.py
|
|
||||||
src/pip/_vendor/urllib3/request.py
|
|
||||||
src/pip/_vendor/urllib3/response.py
|
|
||||||
src/pip/_vendor/urllib3/contrib/__init__.py
|
|
||||||
src/pip/_vendor/urllib3/contrib/_appengine_environ.py
|
|
||||||
src/pip/_vendor/urllib3/contrib/appengine.py
|
|
||||||
src/pip/_vendor/urllib3/contrib/ntlmpool.py
|
|
||||||
src/pip/_vendor/urllib3/contrib/pyopenssl.py
|
|
||||||
src/pip/_vendor/urllib3/contrib/securetransport.py
|
|
||||||
src/pip/_vendor/urllib3/contrib/socks.py
|
|
||||||
src/pip/_vendor/urllib3/contrib/_securetransport/__init__.py
|
|
||||||
src/pip/_vendor/urllib3/contrib/_securetransport/bindings.py
|
|
||||||
src/pip/_vendor/urllib3/contrib/_securetransport/low_level.py
|
|
||||||
src/pip/_vendor/urllib3/packages/__init__.py
|
|
||||||
src/pip/_vendor/urllib3/packages/six.py
|
|
||||||
src/pip/_vendor/urllib3/packages/backports/__init__.py
|
|
||||||
src/pip/_vendor/urllib3/packages/backports/makefile.py
|
|
||||||
src/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py
|
|
||||||
src/pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py
|
|
||||||
src/pip/_vendor/urllib3/util/__init__.py
|
|
||||||
src/pip/_vendor/urllib3/util/connection.py
|
|
||||||
src/pip/_vendor/urllib3/util/queue.py
|
|
||||||
src/pip/_vendor/urllib3/util/request.py
|
|
||||||
src/pip/_vendor/urllib3/util/response.py
|
|
||||||
src/pip/_vendor/urllib3/util/retry.py
|
|
||||||
src/pip/_vendor/urllib3/util/ssl_.py
|
|
||||||
src/pip/_vendor/urllib3/util/timeout.py
|
|
||||||
src/pip/_vendor/urllib3/util/url.py
|
|
||||||
src/pip/_vendor/urllib3/util/wait.py
|
|
||||||
src/pip/_vendor/webencodings/LICENSE
|
|
||||||
src/pip/_vendor/webencodings/__init__.py
|
|
||||||
src/pip/_vendor/webencodings/labels.py
|
|
||||||
src/pip/_vendor/webencodings/mklabels.py
|
|
||||||
src/pip/_vendor/webencodings/tests.py
|
|
||||||
src/pip/_vendor/webencodings/x_user_defined.py
|
|
@ -1,5 +0,0 @@
|
|||||||
[console_scripts]
|
|
||||||
pip = pip._internal:main
|
|
||||||
pip3 = pip._internal:main
|
|
||||||
pip3.7 = pip._internal:main
|
|
||||||
|
|
@ -1 +0,0 @@
|
|||||||
pip
|
|
@ -1 +0,0 @@
|
|||||||
__version__ = "19.0.3"
|
|
@ -1,19 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
# If we are running from a wheel, add the wheel to sys.path
|
|
||||||
# This allows the usage python pip-*.whl/pip install pip-*.whl
|
|
||||||
if __package__ == '':
|
|
||||||
# __file__ is pip-*.whl/pip/__main__.py
|
|
||||||
# first dirname call strips of '/__main__.py', second strips off '/pip'
|
|
||||||
# Resulting path is the name of the wheel itself
|
|
||||||
# Add that to sys.path so we can import pip
|
|
||||||
path = os.path.dirname(os.path.dirname(__file__))
|
|
||||||
sys.path.insert(0, path)
|
|
||||||
|
|
||||||
from pip._internal import main as _main # isort:skip # noqa
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.exit(_main())
|
|
@ -1,78 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import locale
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
# 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks,
|
|
||||||
# but if invoked (i.e. imported), it will issue a warning to stderr if socks
|
|
||||||
# isn't available. requests unconditionally imports urllib3's socks contrib
|
|
||||||
# module, triggering this warning. The warning breaks DEP-8 tests (because of
|
|
||||||
# the stderr output) and is just plain annoying in normal usage. I don't want
|
|
||||||
# to add socks as yet another dependency for pip, nor do I want to allow-stder
|
|
||||||
# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to
|
|
||||||
# be done before the import of pip.vcs.
|
|
||||||
from pip._vendor.urllib3.exceptions import DependencyWarning
|
|
||||||
warnings.filterwarnings("ignore", category=DependencyWarning) # noqa
|
|
||||||
|
|
||||||
# We want to inject the use of SecureTransport as early as possible so that any
|
|
||||||
# references or sessions or what have you are ensured to have it, however we
|
|
||||||
# only want to do this in the case that we're running on macOS and the linked
|
|
||||||
# OpenSSL is too old to handle TLSv1.2
|
|
||||||
try:
|
|
||||||
import ssl
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
# Checks for OpenSSL 1.0.1 on MacOS
|
|
||||||
if sys.platform == "darwin" and ssl.OPENSSL_VERSION_NUMBER < 0x1000100f:
|
|
||||||
try:
|
|
||||||
from pip._vendor.urllib3.contrib import securetransport
|
|
||||||
except (ImportError, OSError):
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
securetransport.inject_into_urllib3()
|
|
||||||
|
|
||||||
from pip._internal.cli.autocompletion import autocomplete
|
|
||||||
from pip._internal.cli.main_parser import parse_command
|
|
||||||
from pip._internal.commands import commands_dict
|
|
||||||
from pip._internal.exceptions import PipError
|
|
||||||
from pip._internal.utils import deprecation
|
|
||||||
from pip._internal.vcs import git, mercurial, subversion, bazaar # noqa
|
|
||||||
from pip._vendor.urllib3.exceptions import InsecureRequestWarning
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Hide the InsecureRequestWarning from urllib3
|
|
||||||
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
|
|
||||||
|
|
||||||
|
|
||||||
def main(args=None):
|
|
||||||
if args is None:
|
|
||||||
args = sys.argv[1:]
|
|
||||||
|
|
||||||
# Configure our deprecation warnings to be sent through loggers
|
|
||||||
deprecation.install_warning_logger()
|
|
||||||
|
|
||||||
autocomplete()
|
|
||||||
|
|
||||||
try:
|
|
||||||
cmd_name, cmd_args = parse_command(args)
|
|
||||||
except PipError as exc:
|
|
||||||
sys.stderr.write("ERROR: %s" % exc)
|
|
||||||
sys.stderr.write(os.linesep)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# Needed for locale.getpreferredencoding(False) to work
|
|
||||||
# in pip._internal.utils.encoding.auto_decode
|
|
||||||
try:
|
|
||||||
locale.setlocale(locale.LC_ALL, '')
|
|
||||||
except locale.Error as e:
|
|
||||||
# setlocale can apparently crash if locale are uninitialized
|
|
||||||
logger.debug("Ignoring error %s when setting locale", e)
|
|
||||||
command = commands_dict[cmd_name](isolated=("--isolated" in cmd_args))
|
|
||||||
return command.main(cmd_args)
|
|
@ -1,215 +0,0 @@
|
|||||||
"""Build Environment used for isolation during sdist building
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import textwrap
|
|
||||||
from collections import OrderedDict
|
|
||||||
from distutils.sysconfig import get_python_lib
|
|
||||||
from sysconfig import get_paths
|
|
||||||
|
|
||||||
from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet
|
|
||||||
|
|
||||||
from pip import __file__ as pip_location
|
|
||||||
from pip._internal.utils.misc import call_subprocess
|
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
from pip._internal.utils.ui import open_spinner
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import Tuple, Set, Iterable, Optional, List # noqa: F401
|
|
||||||
from pip._internal.index import PackageFinder # noqa: F401
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class _Prefix:
|
|
||||||
|
|
||||||
def __init__(self, path):
|
|
||||||
# type: (str) -> None
|
|
||||||
self.path = path
|
|
||||||
self.setup = False
|
|
||||||
self.bin_dir = get_paths(
|
|
||||||
'nt' if os.name == 'nt' else 'posix_prefix',
|
|
||||||
vars={'base': path, 'platbase': path}
|
|
||||||
)['scripts']
|
|
||||||
# Note: prefer distutils' sysconfig to get the
|
|
||||||
# library paths so PyPy is correctly supported.
|
|
||||||
purelib = get_python_lib(plat_specific=False, prefix=path)
|
|
||||||
platlib = get_python_lib(plat_specific=True, prefix=path)
|
|
||||||
if purelib == platlib:
|
|
||||||
self.lib_dirs = [purelib]
|
|
||||||
else:
|
|
||||||
self.lib_dirs = [purelib, platlib]
|
|
||||||
|
|
||||||
|
|
||||||
class BuildEnvironment(object):
|
|
||||||
"""Creates and manages an isolated environment to install build deps
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
# type: () -> None
|
|
||||||
self._temp_dir = TempDirectory(kind="build-env")
|
|
||||||
self._temp_dir.create()
|
|
||||||
|
|
||||||
self._prefixes = OrderedDict((
|
|
||||||
(name, _Prefix(os.path.join(self._temp_dir.path, name)))
|
|
||||||
for name in ('normal', 'overlay')
|
|
||||||
))
|
|
||||||
|
|
||||||
self._bin_dirs = [] # type: List[str]
|
|
||||||
self._lib_dirs = [] # type: List[str]
|
|
||||||
for prefix in reversed(list(self._prefixes.values())):
|
|
||||||
self._bin_dirs.append(prefix.bin_dir)
|
|
||||||
self._lib_dirs.extend(prefix.lib_dirs)
|
|
||||||
|
|
||||||
# Customize site to:
|
|
||||||
# - ensure .pth files are honored
|
|
||||||
# - prevent access to system site packages
|
|
||||||
system_sites = {
|
|
||||||
os.path.normcase(site) for site in (
|
|
||||||
get_python_lib(plat_specific=False),
|
|
||||||
get_python_lib(plat_specific=True),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
self._site_dir = os.path.join(self._temp_dir.path, 'site')
|
|
||||||
if not os.path.exists(self._site_dir):
|
|
||||||
os.mkdir(self._site_dir)
|
|
||||||
with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp:
|
|
||||||
fp.write(textwrap.dedent(
|
|
||||||
'''
|
|
||||||
import os, site, sys
|
|
||||||
|
|
||||||
# First, drop system-sites related paths.
|
|
||||||
original_sys_path = sys.path[:]
|
|
||||||
known_paths = set()
|
|
||||||
for path in {system_sites!r}:
|
|
||||||
site.addsitedir(path, known_paths=known_paths)
|
|
||||||
system_paths = set(
|
|
||||||
os.path.normcase(path)
|
|
||||||
for path in sys.path[len(original_sys_path):]
|
|
||||||
)
|
|
||||||
original_sys_path = [
|
|
||||||
path for path in original_sys_path
|
|
||||||
if os.path.normcase(path) not in system_paths
|
|
||||||
]
|
|
||||||
sys.path = original_sys_path
|
|
||||||
|
|
||||||
# Second, add lib directories.
|
|
||||||
# ensuring .pth file are processed.
|
|
||||||
for path in {lib_dirs!r}:
|
|
||||||
assert not path in sys.path
|
|
||||||
site.addsitedir(path)
|
|
||||||
'''
|
|
||||||
).format(system_sites=system_sites, lib_dirs=self._lib_dirs))
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
self._save_env = {
|
|
||||||
name: os.environ.get(name, None)
|
|
||||||
for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH')
|
|
||||||
}
|
|
||||||
|
|
||||||
path = self._bin_dirs[:]
|
|
||||||
old_path = self._save_env['PATH']
|
|
||||||
if old_path:
|
|
||||||
path.extend(old_path.split(os.pathsep))
|
|
||||||
|
|
||||||
pythonpath = [self._site_dir]
|
|
||||||
|
|
||||||
os.environ.update({
|
|
||||||
'PATH': os.pathsep.join(path),
|
|
||||||
'PYTHONNOUSERSITE': '1',
|
|
||||||
'PYTHONPATH': os.pathsep.join(pythonpath),
|
|
||||||
})
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
for varname, old_value in self._save_env.items():
|
|
||||||
if old_value is None:
|
|
||||||
os.environ.pop(varname, None)
|
|
||||||
else:
|
|
||||||
os.environ[varname] = old_value
|
|
||||||
|
|
||||||
def cleanup(self):
|
|
||||||
# type: () -> None
|
|
||||||
self._temp_dir.cleanup()
|
|
||||||
|
|
||||||
def check_requirements(self, reqs):
|
|
||||||
# type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]]
|
|
||||||
"""Return 2 sets:
|
|
||||||
- conflicting requirements: set of (installed, wanted) reqs tuples
|
|
||||||
- missing requirements: set of reqs
|
|
||||||
"""
|
|
||||||
missing = set()
|
|
||||||
conflicting = set()
|
|
||||||
if reqs:
|
|
||||||
ws = WorkingSet(self._lib_dirs)
|
|
||||||
for req in reqs:
|
|
||||||
try:
|
|
||||||
if ws.find(Requirement.parse(req)) is None:
|
|
||||||
missing.add(req)
|
|
||||||
except VersionConflict as e:
|
|
||||||
conflicting.add((str(e.args[0].as_requirement()),
|
|
||||||
str(e.args[1])))
|
|
||||||
return conflicting, missing
|
|
||||||
|
|
||||||
def install_requirements(
|
|
||||||
self,
|
|
||||||
finder, # type: PackageFinder
|
|
||||||
requirements, # type: Iterable[str]
|
|
||||||
prefix_as_string, # type: str
|
|
||||||
message # type: Optional[str]
|
|
||||||
):
|
|
||||||
# type: (...) -> None
|
|
||||||
prefix = self._prefixes[prefix_as_string]
|
|
||||||
assert not prefix.setup
|
|
||||||
prefix.setup = True
|
|
||||||
if not requirements:
|
|
||||||
return
|
|
||||||
args = [
|
|
||||||
sys.executable, os.path.dirname(pip_location), 'install',
|
|
||||||
'--ignore-installed', '--no-user', '--prefix', prefix.path,
|
|
||||||
'--no-warn-script-location',
|
|
||||||
] # type: List[str]
|
|
||||||
if logger.getEffectiveLevel() <= logging.DEBUG:
|
|
||||||
args.append('-v')
|
|
||||||
for format_control in ('no_binary', 'only_binary'):
|
|
||||||
formats = getattr(finder.format_control, format_control)
|
|
||||||
args.extend(('--' + format_control.replace('_', '-'),
|
|
||||||
','.join(sorted(formats or {':none:'}))))
|
|
||||||
if finder.index_urls:
|
|
||||||
args.extend(['-i', finder.index_urls[0]])
|
|
||||||
for extra_index in finder.index_urls[1:]:
|
|
||||||
args.extend(['--extra-index-url', extra_index])
|
|
||||||
else:
|
|
||||||
args.append('--no-index')
|
|
||||||
for link in finder.find_links:
|
|
||||||
args.extend(['--find-links', link])
|
|
||||||
for _, host, _ in finder.secure_origins:
|
|
||||||
args.extend(['--trusted-host', host])
|
|
||||||
if finder.allow_all_prereleases:
|
|
||||||
args.append('--pre')
|
|
||||||
args.append('--')
|
|
||||||
args.extend(requirements)
|
|
||||||
with open_spinner(message) as spinner:
|
|
||||||
call_subprocess(args, show_stdout=False, spinner=spinner)
|
|
||||||
|
|
||||||
|
|
||||||
class NoOpBuildEnvironment(BuildEnvironment):
|
|
||||||
"""A no-op drop-in replacement for BuildEnvironment
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def cleanup(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def install_requirements(self, finder, requirements, prefix, message):
|
|
||||||
raise NotImplementedError()
|
|
@ -1,224 +0,0 @@
|
|||||||
"""Cache Management
|
|
||||||
"""
|
|
||||||
|
|
||||||
import errno
|
|
||||||
import hashlib
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
|
||||||
|
|
||||||
from pip._internal.download import path_to_url
|
|
||||||
from pip._internal.models.link import Link
|
|
||||||
from pip._internal.utils.compat import expanduser
|
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
from pip._internal.wheel import InvalidWheelFilename, Wheel
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import Optional, Set, List, Any # noqa: F401
|
|
||||||
from pip._internal.index import FormatControl # noqa: F401
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Cache(object):
|
|
||||||
"""An abstract class - provides cache directories for data from links
|
|
||||||
|
|
||||||
|
|
||||||
:param cache_dir: The root of the cache.
|
|
||||||
:param format_control: An object of FormatControl class to limit
|
|
||||||
binaries being read from the cache.
|
|
||||||
:param allowed_formats: which formats of files the cache should store.
|
|
||||||
('binary' and 'source' are the only allowed values)
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, cache_dir, format_control, allowed_formats):
|
|
||||||
# type: (str, FormatControl, Set[str]) -> None
|
|
||||||
super(Cache, self).__init__()
|
|
||||||
self.cache_dir = expanduser(cache_dir) if cache_dir else None
|
|
||||||
self.format_control = format_control
|
|
||||||
self.allowed_formats = allowed_formats
|
|
||||||
|
|
||||||
_valid_formats = {"source", "binary"}
|
|
||||||
assert self.allowed_formats.union(_valid_formats) == _valid_formats
|
|
||||||
|
|
||||||
def _get_cache_path_parts(self, link):
|
|
||||||
# type: (Link) -> List[str]
|
|
||||||
"""Get parts of part that must be os.path.joined with cache_dir
|
|
||||||
"""
|
|
||||||
|
|
||||||
# We want to generate an url to use as our cache key, we don't want to
|
|
||||||
# just re-use the URL because it might have other items in the fragment
|
|
||||||
# and we don't care about those.
|
|
||||||
key_parts = [link.url_without_fragment]
|
|
||||||
if link.hash_name is not None and link.hash is not None:
|
|
||||||
key_parts.append("=".join([link.hash_name, link.hash]))
|
|
||||||
key_url = "#".join(key_parts)
|
|
||||||
|
|
||||||
# Encode our key url with sha224, we'll use this because it has similar
|
|
||||||
# security properties to sha256, but with a shorter total output (and
|
|
||||||
# thus less secure). However the differences don't make a lot of
|
|
||||||
# difference for our use case here.
|
|
||||||
hashed = hashlib.sha224(key_url.encode()).hexdigest()
|
|
||||||
|
|
||||||
# We want to nest the directories some to prevent having a ton of top
|
|
||||||
# level directories where we might run out of sub directories on some
|
|
||||||
# FS.
|
|
||||||
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
|
|
||||||
|
|
||||||
return parts
|
|
||||||
|
|
||||||
def _get_candidates(self, link, package_name):
|
|
||||||
# type: (Link, Optional[str]) -> List[Any]
|
|
||||||
can_not_cache = (
|
|
||||||
not self.cache_dir or
|
|
||||||
not package_name or
|
|
||||||
not link
|
|
||||||
)
|
|
||||||
if can_not_cache:
|
|
||||||
return []
|
|
||||||
|
|
||||||
canonical_name = canonicalize_name(package_name)
|
|
||||||
formats = self.format_control.get_allowed_formats(
|
|
||||||
canonical_name
|
|
||||||
)
|
|
||||||
if not self.allowed_formats.intersection(formats):
|
|
||||||
return []
|
|
||||||
|
|
||||||
root = self.get_path_for_link(link)
|
|
||||||
try:
|
|
||||||
return os.listdir(root)
|
|
||||||
except OSError as err:
|
|
||||||
if err.errno in {errno.ENOENT, errno.ENOTDIR}:
|
|
||||||
return []
|
|
||||||
raise
|
|
||||||
|
|
||||||
def get_path_for_link(self, link):
|
|
||||||
# type: (Link) -> str
|
|
||||||
"""Return a directory to store cached items in for link.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
def get(self, link, package_name):
|
|
||||||
# type: (Link, Optional[str]) -> Link
|
|
||||||
"""Returns a link to a cached item if it exists, otherwise returns the
|
|
||||||
passed link.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
def _link_for_candidate(self, link, candidate):
|
|
||||||
# type: (Link, str) -> Link
|
|
||||||
root = self.get_path_for_link(link)
|
|
||||||
path = os.path.join(root, candidate)
|
|
||||||
|
|
||||||
return Link(path_to_url(path))
|
|
||||||
|
|
||||||
def cleanup(self):
|
|
||||||
# type: () -> None
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class SimpleWheelCache(Cache):
|
|
||||||
"""A cache of wheels for future installs.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, cache_dir, format_control):
|
|
||||||
# type: (str, FormatControl) -> None
|
|
||||||
super(SimpleWheelCache, self).__init__(
|
|
||||||
cache_dir, format_control, {"binary"}
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_path_for_link(self, link):
|
|
||||||
# type: (Link) -> str
|
|
||||||
"""Return a directory to store cached wheels for link
|
|
||||||
|
|
||||||
Because there are M wheels for any one sdist, we provide a directory
|
|
||||||
to cache them in, and then consult that directory when looking up
|
|
||||||
cache hits.
|
|
||||||
|
|
||||||
We only insert things into the cache if they have plausible version
|
|
||||||
numbers, so that we don't contaminate the cache with things that were
|
|
||||||
not unique. E.g. ./package might have dozens of installs done for it
|
|
||||||
and build a version of 0.0...and if we built and cached a wheel, we'd
|
|
||||||
end up using the same wheel even if the source has been edited.
|
|
||||||
|
|
||||||
:param link: The link of the sdist for which this will cache wheels.
|
|
||||||
"""
|
|
||||||
parts = self._get_cache_path_parts(link)
|
|
||||||
|
|
||||||
# Store wheels within the root cache_dir
|
|
||||||
return os.path.join(self.cache_dir, "wheels", *parts)
|
|
||||||
|
|
||||||
def get(self, link, package_name):
|
|
||||||
# type: (Link, Optional[str]) -> Link
|
|
||||||
candidates = []
|
|
||||||
|
|
||||||
for wheel_name in self._get_candidates(link, package_name):
|
|
||||||
try:
|
|
||||||
wheel = Wheel(wheel_name)
|
|
||||||
except InvalidWheelFilename:
|
|
||||||
continue
|
|
||||||
if not wheel.supported():
|
|
||||||
# Built for a different python/arch/etc
|
|
||||||
continue
|
|
||||||
candidates.append((wheel.support_index_min(), wheel_name))
|
|
||||||
|
|
||||||
if not candidates:
|
|
||||||
return link
|
|
||||||
|
|
||||||
return self._link_for_candidate(link, min(candidates)[1])
|
|
||||||
|
|
||||||
|
|
||||||
class EphemWheelCache(SimpleWheelCache):
|
|
||||||
"""A SimpleWheelCache that creates it's own temporary cache directory
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, format_control):
|
|
||||||
# type: (FormatControl) -> None
|
|
||||||
self._temp_dir = TempDirectory(kind="ephem-wheel-cache")
|
|
||||||
self._temp_dir.create()
|
|
||||||
|
|
||||||
super(EphemWheelCache, self).__init__(
|
|
||||||
self._temp_dir.path, format_control
|
|
||||||
)
|
|
||||||
|
|
||||||
def cleanup(self):
|
|
||||||
# type: () -> None
|
|
||||||
self._temp_dir.cleanup()
|
|
||||||
|
|
||||||
|
|
||||||
class WheelCache(Cache):
|
|
||||||
"""Wraps EphemWheelCache and SimpleWheelCache into a single Cache
|
|
||||||
|
|
||||||
This Cache allows for gracefully degradation, using the ephem wheel cache
|
|
||||||
when a certain link is not found in the simple wheel cache first.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, cache_dir, format_control):
|
|
||||||
# type: (str, FormatControl) -> None
|
|
||||||
super(WheelCache, self).__init__(
|
|
||||||
cache_dir, format_control, {'binary'}
|
|
||||||
)
|
|
||||||
self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
|
|
||||||
self._ephem_cache = EphemWheelCache(format_control)
|
|
||||||
|
|
||||||
def get_path_for_link(self, link):
|
|
||||||
# type: (Link) -> str
|
|
||||||
return self._wheel_cache.get_path_for_link(link)
|
|
||||||
|
|
||||||
def get_ephem_path_for_link(self, link):
|
|
||||||
# type: (Link) -> str
|
|
||||||
return self._ephem_cache.get_path_for_link(link)
|
|
||||||
|
|
||||||
def get(self, link, package_name):
|
|
||||||
# type: (Link, Optional[str]) -> Link
|
|
||||||
retval = self._wheel_cache.get(link, package_name)
|
|
||||||
if retval is link:
|
|
||||||
retval = self._ephem_cache.get(link, package_name)
|
|
||||||
return retval
|
|
||||||
|
|
||||||
def cleanup(self):
|
|
||||||
# type: () -> None
|
|
||||||
self._wheel_cache.cleanup()
|
|
||||||
self._ephem_cache.cleanup()
|
|
@ -1,4 +0,0 @@
|
|||||||
"""Subpackage containing all of pip's command line interface related code
|
|
||||||
"""
|
|
||||||
|
|
||||||
# This file intentionally does not import submodules
|
|
@ -1,152 +0,0 @@
|
|||||||
"""Logic that powers autocompletion installed by ``pip completion``.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import optparse
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pip._internal.cli.main_parser import create_main_parser
|
|
||||||
from pip._internal.commands import commands_dict, get_summaries
|
|
||||||
from pip._internal.utils.misc import get_installed_distributions
|
|
||||||
|
|
||||||
|
|
||||||
def autocomplete():
|
|
||||||
"""Entry Point for completion of main and subcommand options.
|
|
||||||
"""
|
|
||||||
# Don't complete if user hasn't sourced bash_completion file.
|
|
||||||
if 'PIP_AUTO_COMPLETE' not in os.environ:
|
|
||||||
return
|
|
||||||
cwords = os.environ['COMP_WORDS'].split()[1:]
|
|
||||||
cword = int(os.environ['COMP_CWORD'])
|
|
||||||
try:
|
|
||||||
current = cwords[cword - 1]
|
|
||||||
except IndexError:
|
|
||||||
current = ''
|
|
||||||
|
|
||||||
subcommands = [cmd for cmd, summary in get_summaries()]
|
|
||||||
options = []
|
|
||||||
# subcommand
|
|
||||||
try:
|
|
||||||
subcommand_name = [w for w in cwords if w in subcommands][0]
|
|
||||||
except IndexError:
|
|
||||||
subcommand_name = None
|
|
||||||
|
|
||||||
parser = create_main_parser()
|
|
||||||
# subcommand options
|
|
||||||
if subcommand_name:
|
|
||||||
# special case: 'help' subcommand has no options
|
|
||||||
if subcommand_name == 'help':
|
|
||||||
sys.exit(1)
|
|
||||||
# special case: list locally installed dists for show and uninstall
|
|
||||||
should_list_installed = (
|
|
||||||
subcommand_name in ['show', 'uninstall'] and
|
|
||||||
not current.startswith('-')
|
|
||||||
)
|
|
||||||
if should_list_installed:
|
|
||||||
installed = []
|
|
||||||
lc = current.lower()
|
|
||||||
for dist in get_installed_distributions(local_only=True):
|
|
||||||
if dist.key.startswith(lc) and dist.key not in cwords[1:]:
|
|
||||||
installed.append(dist.key)
|
|
||||||
# if there are no dists installed, fall back to option completion
|
|
||||||
if installed:
|
|
||||||
for dist in installed:
|
|
||||||
print(dist)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
subcommand = commands_dict[subcommand_name]()
|
|
||||||
|
|
||||||
for opt in subcommand.parser.option_list_all:
|
|
||||||
if opt.help != optparse.SUPPRESS_HELP:
|
|
||||||
for opt_str in opt._long_opts + opt._short_opts:
|
|
||||||
options.append((opt_str, opt.nargs))
|
|
||||||
|
|
||||||
# filter out previously specified options from available options
|
|
||||||
prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
|
|
||||||
options = [(x, v) for (x, v) in options if x not in prev_opts]
|
|
||||||
# filter options by current input
|
|
||||||
options = [(k, v) for k, v in options if k.startswith(current)]
|
|
||||||
# get completion type given cwords and available subcommand options
|
|
||||||
completion_type = get_path_completion_type(
|
|
||||||
cwords, cword, subcommand.parser.option_list_all,
|
|
||||||
)
|
|
||||||
# get completion files and directories if ``completion_type`` is
|
|
||||||
# ``<file>``, ``<dir>`` or ``<path>``
|
|
||||||
if completion_type:
|
|
||||||
options = auto_complete_paths(current, completion_type)
|
|
||||||
options = ((opt, 0) for opt in options)
|
|
||||||
for option in options:
|
|
||||||
opt_label = option[0]
|
|
||||||
# append '=' to options which require args
|
|
||||||
if option[1] and option[0][:2] == "--":
|
|
||||||
opt_label += '='
|
|
||||||
print(opt_label)
|
|
||||||
else:
|
|
||||||
# show main parser options only when necessary
|
|
||||||
|
|
||||||
opts = [i.option_list for i in parser.option_groups]
|
|
||||||
opts.append(parser.option_list)
|
|
||||||
opts = (o for it in opts for o in it)
|
|
||||||
if current.startswith('-'):
|
|
||||||
for opt in opts:
|
|
||||||
if opt.help != optparse.SUPPRESS_HELP:
|
|
||||||
subcommands += opt._long_opts + opt._short_opts
|
|
||||||
else:
|
|
||||||
# get completion type given cwords and all available options
|
|
||||||
completion_type = get_path_completion_type(cwords, cword, opts)
|
|
||||||
if completion_type:
|
|
||||||
subcommands = auto_complete_paths(current, completion_type)
|
|
||||||
|
|
||||||
print(' '.join([x for x in subcommands if x.startswith(current)]))
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
def get_path_completion_type(cwords, cword, opts):
|
|
||||||
"""Get the type of path completion (``file``, ``dir``, ``path`` or None)
|
|
||||||
|
|
||||||
:param cwords: same as the environmental variable ``COMP_WORDS``
|
|
||||||
:param cword: same as the environmental variable ``COMP_CWORD``
|
|
||||||
:param opts: The available options to check
|
|
||||||
:return: path completion type (``file``, ``dir``, ``path`` or None)
|
|
||||||
"""
|
|
||||||
if cword < 2 or not cwords[cword - 2].startswith('-'):
|
|
||||||
return
|
|
||||||
for opt in opts:
|
|
||||||
if opt.help == optparse.SUPPRESS_HELP:
|
|
||||||
continue
|
|
||||||
for o in str(opt).split('/'):
|
|
||||||
if cwords[cword - 2].split('=')[0] == o:
|
|
||||||
if not opt.metavar or any(
|
|
||||||
x in ('path', 'file', 'dir')
|
|
||||||
for x in opt.metavar.split('/')):
|
|
||||||
return opt.metavar
|
|
||||||
|
|
||||||
|
|
||||||
def auto_complete_paths(current, completion_type):
|
|
||||||
"""If ``completion_type`` is ``file`` or ``path``, list all regular files
|
|
||||||
and directories starting with ``current``; otherwise only list directories
|
|
||||||
starting with ``current``.
|
|
||||||
|
|
||||||
:param current: The word to be completed
|
|
||||||
:param completion_type: path completion type(`file`, `path` or `dir`)i
|
|
||||||
:return: A generator of regular files and/or directories
|
|
||||||
"""
|
|
||||||
directory, filename = os.path.split(current)
|
|
||||||
current_path = os.path.abspath(directory)
|
|
||||||
# Don't complete paths if they can't be accessed
|
|
||||||
if not os.access(current_path, os.R_OK):
|
|
||||||
return
|
|
||||||
filename = os.path.normcase(filename)
|
|
||||||
# list all files that start with ``filename``
|
|
||||||
file_list = (x for x in os.listdir(current_path)
|
|
||||||
if os.path.normcase(x).startswith(filename))
|
|
||||||
for f in file_list:
|
|
||||||
opt = os.path.join(current_path, f)
|
|
||||||
comp_file = os.path.normcase(os.path.join(directory, f))
|
|
||||||
# complete regular files when there is not ``<dir>`` after option
|
|
||||||
# complete directories when there is ``<file>``, ``<path>`` or
|
|
||||||
# ``<dir>``after option
|
|
||||||
if completion_type != 'dir' and os.path.isfile(opt):
|
|
||||||
yield comp_file
|
|
||||||
elif os.path.isdir(opt):
|
|
||||||
yield os.path.join(comp_file, '')
|
|
@ -1,341 +0,0 @@
|
|||||||
"""Base Command class, and related routines"""
|
|
||||||
from __future__ import absolute_import, print_function
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import logging.config
|
|
||||||
import optparse
|
|
||||||
import os
|
|
||||||
import platform
|
|
||||||
import sys
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
from pip._internal.cli import cmdoptions
|
|
||||||
from pip._internal.cli.parser import (
|
|
||||||
ConfigOptionParser, UpdatingDefaultsHelpFormatter,
|
|
||||||
)
|
|
||||||
from pip._internal.cli.status_codes import (
|
|
||||||
ERROR, PREVIOUS_BUILD_DIR_ERROR, SUCCESS, UNKNOWN_ERROR,
|
|
||||||
VIRTUALENV_NOT_FOUND,
|
|
||||||
)
|
|
||||||
from pip._internal.download import PipSession
|
|
||||||
from pip._internal.exceptions import (
|
|
||||||
BadCommand, CommandError, InstallationError, PreviousBuildDirError,
|
|
||||||
UninstallationError,
|
|
||||||
)
|
|
||||||
from pip._internal.index import PackageFinder
|
|
||||||
from pip._internal.locations import running_under_virtualenv
|
|
||||||
from pip._internal.req.constructors import (
|
|
||||||
install_req_from_editable, install_req_from_line,
|
|
||||||
)
|
|
||||||
from pip._internal.req.req_file import parse_requirements
|
|
||||||
from pip._internal.utils.deprecation import deprecated
|
|
||||||
from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
|
|
||||||
from pip._internal.utils.misc import (
|
|
||||||
get_prog, normalize_path, redact_password_from_url,
|
|
||||||
)
|
|
||||||
from pip._internal.utils.outdated import pip_version_check
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import Optional, List, Tuple, Any # noqa: F401
|
|
||||||
from optparse import Values # noqa: F401
|
|
||||||
from pip._internal.cache import WheelCache # noqa: F401
|
|
||||||
from pip._internal.req.req_set import RequirementSet # noqa: F401
|
|
||||||
|
|
||||||
__all__ = ['Command']
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Command(object):
|
|
||||||
name = None # type: Optional[str]
|
|
||||||
usage = None # type: Optional[str]
|
|
||||||
hidden = False # type: bool
|
|
||||||
ignore_require_venv = False # type: bool
|
|
||||||
|
|
||||||
def __init__(self, isolated=False):
|
|
||||||
# type: (bool) -> None
|
|
||||||
parser_kw = {
|
|
||||||
'usage': self.usage,
|
|
||||||
'prog': '%s %s' % (get_prog(), self.name),
|
|
||||||
'formatter': UpdatingDefaultsHelpFormatter(),
|
|
||||||
'add_help_option': False,
|
|
||||||
'name': self.name,
|
|
||||||
'description': self.__doc__,
|
|
||||||
'isolated': isolated,
|
|
||||||
}
|
|
||||||
|
|
||||||
self.parser = ConfigOptionParser(**parser_kw)
|
|
||||||
|
|
||||||
# Commands should add options to this option group
|
|
||||||
optgroup_name = '%s Options' % self.name.capitalize()
|
|
||||||
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
|
|
||||||
|
|
||||||
# Add the general options
|
|
||||||
gen_opts = cmdoptions.make_option_group(
|
|
||||||
cmdoptions.general_group,
|
|
||||||
self.parser,
|
|
||||||
)
|
|
||||||
self.parser.add_option_group(gen_opts)
|
|
||||||
|
|
||||||
def run(self, options, args):
|
|
||||||
# type: (Values, List[Any]) -> Any
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def _build_session(self, options, retries=None, timeout=None):
|
|
||||||
# type: (Values, Optional[int], Optional[int]) -> PipSession
|
|
||||||
session = PipSession(
|
|
||||||
cache=(
|
|
||||||
normalize_path(os.path.join(options.cache_dir, "http"))
|
|
||||||
if options.cache_dir else None
|
|
||||||
),
|
|
||||||
retries=retries if retries is not None else options.retries,
|
|
||||||
insecure_hosts=options.trusted_hosts,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Handle custom ca-bundles from the user
|
|
||||||
if options.cert:
|
|
||||||
session.verify = options.cert
|
|
||||||
|
|
||||||
# Handle SSL client certificate
|
|
||||||
if options.client_cert:
|
|
||||||
session.cert = options.client_cert
|
|
||||||
|
|
||||||
# Handle timeouts
|
|
||||||
if options.timeout or timeout:
|
|
||||||
session.timeout = (
|
|
||||||
timeout if timeout is not None else options.timeout
|
|
||||||
)
|
|
||||||
|
|
||||||
# Handle configured proxies
|
|
||||||
if options.proxy:
|
|
||||||
session.proxies = {
|
|
||||||
"http": options.proxy,
|
|
||||||
"https": options.proxy,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Determine if we can prompt the user for authentication or not
|
|
||||||
session.auth.prompting = not options.no_input
|
|
||||||
|
|
||||||
return session
|
|
||||||
|
|
||||||
def parse_args(self, args):
|
|
||||||
# type: (List[str]) -> Tuple
|
|
||||||
# factored out for testability
|
|
||||||
return self.parser.parse_args(args)
|
|
||||||
|
|
||||||
def main(self, args):
|
|
||||||
# type: (List[str]) -> int
|
|
||||||
options, args = self.parse_args(args)
|
|
||||||
|
|
||||||
# Set verbosity so that it can be used elsewhere.
|
|
||||||
self.verbosity = options.verbose - options.quiet
|
|
||||||
|
|
||||||
level_number = setup_logging(
|
|
||||||
verbosity=self.verbosity,
|
|
||||||
no_color=options.no_color,
|
|
||||||
user_log_file=options.log,
|
|
||||||
)
|
|
||||||
|
|
||||||
if sys.version_info[:2] == (3, 4):
|
|
||||||
deprecated(
|
|
||||||
"Python 3.4 support has been deprecated. pip 19.1 will be the "
|
|
||||||
"last one supporting it. Please upgrade your Python as Python "
|
|
||||||
"3.4 won't be maintained after March 2019 (cf PEP 429).",
|
|
||||||
replacement=None,
|
|
||||||
gone_in='19.2',
|
|
||||||
)
|
|
||||||
elif sys.version_info[:2] == (2, 7):
|
|
||||||
message = (
|
|
||||||
"A future version of pip will drop support for Python 2.7."
|
|
||||||
)
|
|
||||||
if platform.python_implementation() == "CPython":
|
|
||||||
message = (
|
|
||||||
"Python 2.7 will reach the end of its life on January "
|
|
||||||
"1st, 2020. Please upgrade your Python as Python 2.7 "
|
|
||||||
"won't be maintained after that date. "
|
|
||||||
) + message
|
|
||||||
deprecated(message, replacement=None, gone_in=None)
|
|
||||||
|
|
||||||
# TODO: Try to get these passing down from the command?
|
|
||||||
# without resorting to os.environ to hold these.
|
|
||||||
# This also affects isolated builds and it should.
|
|
||||||
|
|
||||||
if options.no_input:
|
|
||||||
os.environ['PIP_NO_INPUT'] = '1'
|
|
||||||
|
|
||||||
if options.exists_action:
|
|
||||||
os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
|
|
||||||
|
|
||||||
if options.require_venv and not self.ignore_require_venv:
|
|
||||||
# If a venv is required check if it can really be found
|
|
||||||
if not running_under_virtualenv():
|
|
||||||
logger.critical(
|
|
||||||
'Could not find an activated virtualenv (required).'
|
|
||||||
)
|
|
||||||
sys.exit(VIRTUALENV_NOT_FOUND)
|
|
||||||
|
|
||||||
try:
|
|
||||||
status = self.run(options, args)
|
|
||||||
# FIXME: all commands should return an exit status
|
|
||||||
# and when it is done, isinstance is not needed anymore
|
|
||||||
if isinstance(status, int):
|
|
||||||
return status
|
|
||||||
except PreviousBuildDirError as exc:
|
|
||||||
logger.critical(str(exc))
|
|
||||||
logger.debug('Exception information:', exc_info=True)
|
|
||||||
|
|
||||||
return PREVIOUS_BUILD_DIR_ERROR
|
|
||||||
except (InstallationError, UninstallationError, BadCommand) as exc:
|
|
||||||
logger.critical(str(exc))
|
|
||||||
logger.debug('Exception information:', exc_info=True)
|
|
||||||
|
|
||||||
return ERROR
|
|
||||||
except CommandError as exc:
|
|
||||||
logger.critical('ERROR: %s', exc)
|
|
||||||
logger.debug('Exception information:', exc_info=True)
|
|
||||||
|
|
||||||
return ERROR
|
|
||||||
except BrokenStdoutLoggingError:
|
|
||||||
# Bypass our logger and write any remaining messages to stderr
|
|
||||||
# because stdout no longer works.
|
|
||||||
print('ERROR: Pipe to stdout was broken', file=sys.stderr)
|
|
||||||
if level_number <= logging.DEBUG:
|
|
||||||
traceback.print_exc(file=sys.stderr)
|
|
||||||
|
|
||||||
return ERROR
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
logger.critical('Operation cancelled by user')
|
|
||||||
logger.debug('Exception information:', exc_info=True)
|
|
||||||
|
|
||||||
return ERROR
|
|
||||||
except BaseException:
|
|
||||||
logger.critical('Exception:', exc_info=True)
|
|
||||||
|
|
||||||
return UNKNOWN_ERROR
|
|
||||||
finally:
|
|
||||||
allow_version_check = (
|
|
||||||
# Does this command have the index_group options?
|
|
||||||
hasattr(options, "no_index") and
|
|
||||||
# Is this command allowed to perform this check?
|
|
||||||
not (options.disable_pip_version_check or options.no_index)
|
|
||||||
)
|
|
||||||
# Check if we're using the latest version of pip available
|
|
||||||
if allow_version_check:
|
|
||||||
session = self._build_session(
|
|
||||||
options,
|
|
||||||
retries=0,
|
|
||||||
timeout=min(5, options.timeout)
|
|
||||||
)
|
|
||||||
with session:
|
|
||||||
pip_version_check(session, options)
|
|
||||||
|
|
||||||
# Shutdown the logging module
|
|
||||||
logging.shutdown()
|
|
||||||
|
|
||||||
return SUCCESS
|
|
||||||
|
|
||||||
|
|
||||||
class RequirementCommand(Command):
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def populate_requirement_set(requirement_set, # type: RequirementSet
|
|
||||||
args, # type: List[str]
|
|
||||||
options, # type: Values
|
|
||||||
finder, # type: PackageFinder
|
|
||||||
session, # type: PipSession
|
|
||||||
name, # type: str
|
|
||||||
wheel_cache # type: Optional[WheelCache]
|
|
||||||
):
|
|
||||||
# type: (...) -> None
|
|
||||||
"""
|
|
||||||
Marshal cmd line args into a requirement set.
|
|
||||||
"""
|
|
||||||
# NOTE: As a side-effect, options.require_hashes and
|
|
||||||
# requirement_set.require_hashes may be updated
|
|
||||||
|
|
||||||
for filename in options.constraints:
|
|
||||||
for req_to_add in parse_requirements(
|
|
||||||
filename,
|
|
||||||
constraint=True, finder=finder, options=options,
|
|
||||||
session=session, wheel_cache=wheel_cache):
|
|
||||||
req_to_add.is_direct = True
|
|
||||||
requirement_set.add_requirement(req_to_add)
|
|
||||||
|
|
||||||
for req in args:
|
|
||||||
req_to_add = install_req_from_line(
|
|
||||||
req, None, isolated=options.isolated_mode,
|
|
||||||
use_pep517=options.use_pep517,
|
|
||||||
wheel_cache=wheel_cache
|
|
||||||
)
|
|
||||||
req_to_add.is_direct = True
|
|
||||||
requirement_set.add_requirement(req_to_add)
|
|
||||||
|
|
||||||
for req in options.editables:
|
|
||||||
req_to_add = install_req_from_editable(
|
|
||||||
req,
|
|
||||||
isolated=options.isolated_mode,
|
|
||||||
use_pep517=options.use_pep517,
|
|
||||||
wheel_cache=wheel_cache
|
|
||||||
)
|
|
||||||
req_to_add.is_direct = True
|
|
||||||
requirement_set.add_requirement(req_to_add)
|
|
||||||
|
|
||||||
for filename in options.requirements:
|
|
||||||
for req_to_add in parse_requirements(
|
|
||||||
filename,
|
|
||||||
finder=finder, options=options, session=session,
|
|
||||||
wheel_cache=wheel_cache,
|
|
||||||
use_pep517=options.use_pep517):
|
|
||||||
req_to_add.is_direct = True
|
|
||||||
requirement_set.add_requirement(req_to_add)
|
|
||||||
# If --require-hashes was a line in a requirements file, tell
|
|
||||||
# RequirementSet about it:
|
|
||||||
requirement_set.require_hashes = options.require_hashes
|
|
||||||
|
|
||||||
if not (args or options.editables or options.requirements):
|
|
||||||
opts = {'name': name}
|
|
||||||
if options.find_links:
|
|
||||||
raise CommandError(
|
|
||||||
'You must give at least one requirement to %(name)s '
|
|
||||||
'(maybe you meant "pip %(name)s %(links)s"?)' %
|
|
||||||
dict(opts, links=' '.join(options.find_links)))
|
|
||||||
else:
|
|
||||||
raise CommandError(
|
|
||||||
'You must give at least one requirement to %(name)s '
|
|
||||||
'(see "pip help %(name)s")' % opts)
|
|
||||||
|
|
||||||
def _build_package_finder(
|
|
||||||
self,
|
|
||||||
options, # type: Values
|
|
||||||
session, # type: PipSession
|
|
||||||
platform=None, # type: Optional[str]
|
|
||||||
python_versions=None, # type: Optional[List[str]]
|
|
||||||
abi=None, # type: Optional[str]
|
|
||||||
implementation=None # type: Optional[str]
|
|
||||||
):
|
|
||||||
# type: (...) -> PackageFinder
|
|
||||||
"""
|
|
||||||
Create a package finder appropriate to this requirement command.
|
|
||||||
"""
|
|
||||||
index_urls = [options.index_url] + options.extra_index_urls
|
|
||||||
if options.no_index:
|
|
||||||
logger.debug(
|
|
||||||
'Ignoring indexes: %s',
|
|
||||||
','.join(redact_password_from_url(url) for url in index_urls),
|
|
||||||
)
|
|
||||||
index_urls = []
|
|
||||||
|
|
||||||
return PackageFinder(
|
|
||||||
find_links=options.find_links,
|
|
||||||
format_control=options.format_control,
|
|
||||||
index_urls=index_urls,
|
|
||||||
trusted_hosts=options.trusted_hosts,
|
|
||||||
allow_all_prereleases=options.pre,
|
|
||||||
session=session,
|
|
||||||
platform=platform,
|
|
||||||
versions=python_versions,
|
|
||||||
abi=abi,
|
|
||||||
implementation=implementation,
|
|
||||||
prefer_binary=options.prefer_binary,
|
|
||||||
)
|
|
@ -1,809 +0,0 @@
|
|||||||
"""
|
|
||||||
shared options and groups
|
|
||||||
|
|
||||||
The principle here is to define options once, but *not* instantiate them
|
|
||||||
globally. One reason being that options with action='append' can carry state
|
|
||||||
between parses. pip parses general options twice internally, and shouldn't
|
|
||||||
pass on state. To be consistent, all options will follow this design.
|
|
||||||
|
|
||||||
"""
|
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import textwrap
|
|
||||||
import warnings
|
|
||||||
from distutils.util import strtobool
|
|
||||||
from functools import partial
|
|
||||||
from optparse import SUPPRESS_HELP, Option, OptionGroup
|
|
||||||
|
|
||||||
from pip._internal.exceptions import CommandError
|
|
||||||
from pip._internal.locations import USER_CACHE_DIR, src_prefix
|
|
||||||
from pip._internal.models.format_control import FormatControl
|
|
||||||
from pip._internal.models.index import PyPI
|
|
||||||
from pip._internal.utils.hashes import STRONG_HASHES
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
from pip._internal.utils.ui import BAR_TYPES
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import Any, Callable, Dict, List, Optional, Union # noqa: F401
|
|
||||||
from optparse import OptionParser, Values # noqa: F401
|
|
||||||
from pip._internal.cli.parser import ConfigOptionParser # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
def raise_option_error(parser, option, msg):
|
|
||||||
"""
|
|
||||||
Raise an option parsing error using parser.error().
|
|
||||||
|
|
||||||
Args:
|
|
||||||
parser: an OptionParser instance.
|
|
||||||
option: an Option instance.
|
|
||||||
msg: the error text.
|
|
||||||
"""
|
|
||||||
msg = '{} error: {}'.format(option, msg)
|
|
||||||
msg = textwrap.fill(' '.join(msg.split()))
|
|
||||||
parser.error(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def make_option_group(group, parser):
|
|
||||||
# type: (Dict[str, Any], ConfigOptionParser) -> OptionGroup
|
|
||||||
"""
|
|
||||||
Return an OptionGroup object
|
|
||||||
group -- assumed to be dict with 'name' and 'options' keys
|
|
||||||
parser -- an optparse Parser
|
|
||||||
"""
|
|
||||||
option_group = OptionGroup(parser, group['name'])
|
|
||||||
for option in group['options']:
|
|
||||||
option_group.add_option(option())
|
|
||||||
return option_group
|
|
||||||
|
|
||||||
|
|
||||||
def check_install_build_global(options, check_options=None):
|
|
||||||
# type: (Values, Optional[Values]) -> None
|
|
||||||
"""Disable wheels if per-setup.py call options are set.
|
|
||||||
|
|
||||||
:param options: The OptionParser options to update.
|
|
||||||
:param check_options: The options to check, if not supplied defaults to
|
|
||||||
options.
|
|
||||||
"""
|
|
||||||
if check_options is None:
|
|
||||||
check_options = options
|
|
||||||
|
|
||||||
def getname(n):
|
|
||||||
return getattr(check_options, n, None)
|
|
||||||
names = ["build_options", "global_options", "install_options"]
|
|
||||||
if any(map(getname, names)):
|
|
||||||
control = options.format_control
|
|
||||||
control.disallow_binaries()
|
|
||||||
warnings.warn(
|
|
||||||
'Disabling all use of wheels due to the use of --build-options '
|
|
||||||
'/ --global-options / --install-options.', stacklevel=2,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def check_dist_restriction(options, check_target=False):
|
|
||||||
# type: (Values, bool) -> None
|
|
||||||
"""Function for determining if custom platform options are allowed.
|
|
||||||
|
|
||||||
:param options: The OptionParser options.
|
|
||||||
:param check_target: Whether or not to check if --target is being used.
|
|
||||||
"""
|
|
||||||
dist_restriction_set = any([
|
|
||||||
options.python_version,
|
|
||||||
options.platform,
|
|
||||||
options.abi,
|
|
||||||
options.implementation,
|
|
||||||
])
|
|
||||||
|
|
||||||
binary_only = FormatControl(set(), {':all:'})
|
|
||||||
sdist_dependencies_allowed = (
|
|
||||||
options.format_control != binary_only and
|
|
||||||
not options.ignore_dependencies
|
|
||||||
)
|
|
||||||
|
|
||||||
# Installations or downloads using dist restrictions must not combine
|
|
||||||
# source distributions and dist-specific wheels, as they are not
|
|
||||||
# gauranteed to be locally compatible.
|
|
||||||
if dist_restriction_set and sdist_dependencies_allowed:
|
|
||||||
raise CommandError(
|
|
||||||
"When restricting platform and interpreter constraints using "
|
|
||||||
"--python-version, --platform, --abi, or --implementation, "
|
|
||||||
"either --no-deps must be set, or --only-binary=:all: must be "
|
|
||||||
"set and --no-binary must not be set (or must be set to "
|
|
||||||
":none:)."
|
|
||||||
)
|
|
||||||
|
|
||||||
if check_target:
|
|
||||||
if dist_restriction_set and not options.target_dir:
|
|
||||||
raise CommandError(
|
|
||||||
"Can not use any platform or abi specific options unless "
|
|
||||||
"installing via '--target'"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
###########
|
|
||||||
# options #
|
|
||||||
###########
|
|
||||||
|
|
||||||
help_ = partial(
|
|
||||||
Option,
|
|
||||||
'-h', '--help',
|
|
||||||
dest='help',
|
|
||||||
action='help',
|
|
||||||
help='Show help.',
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
isolated_mode = partial(
|
|
||||||
Option,
|
|
||||||
"--isolated",
|
|
||||||
dest="isolated_mode",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help=(
|
|
||||||
"Run pip in an isolated mode, ignoring environment variables and user "
|
|
||||||
"configuration."
|
|
||||||
),
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
require_virtualenv = partial(
|
|
||||||
Option,
|
|
||||||
# Run only if inside a virtualenv, bail if not.
|
|
||||||
'--require-virtualenv', '--require-venv',
|
|
||||||
dest='require_venv',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help=SUPPRESS_HELP
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
verbose = partial(
|
|
||||||
Option,
|
|
||||||
'-v', '--verbose',
|
|
||||||
dest='verbose',
|
|
||||||
action='count',
|
|
||||||
default=0,
|
|
||||||
help='Give more output. Option is additive, and can be used up to 3 times.'
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
no_color = partial(
|
|
||||||
Option,
|
|
||||||
'--no-color',
|
|
||||||
dest='no_color',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help="Suppress colored output",
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
version = partial(
|
|
||||||
Option,
|
|
||||||
'-V', '--version',
|
|
||||||
dest='version',
|
|
||||||
action='store_true',
|
|
||||||
help='Show version and exit.',
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
quiet = partial(
|
|
||||||
Option,
|
|
||||||
'-q', '--quiet',
|
|
||||||
dest='quiet',
|
|
||||||
action='count',
|
|
||||||
default=0,
|
|
||||||
help=(
|
|
||||||
'Give less output. Option is additive, and can be used up to 3'
|
|
||||||
' times (corresponding to WARNING, ERROR, and CRITICAL logging'
|
|
||||||
' levels).'
|
|
||||||
),
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
progress_bar = partial(
|
|
||||||
Option,
|
|
||||||
'--progress-bar',
|
|
||||||
dest='progress_bar',
|
|
||||||
type='choice',
|
|
||||||
choices=list(BAR_TYPES.keys()),
|
|
||||||
default='on',
|
|
||||||
help=(
|
|
||||||
'Specify type of progress to be displayed [' +
|
|
||||||
'|'.join(BAR_TYPES.keys()) + '] (default: %default)'
|
|
||||||
),
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
log = partial(
|
|
||||||
Option,
|
|
||||||
"--log", "--log-file", "--local-log",
|
|
||||||
dest="log",
|
|
||||||
metavar="path",
|
|
||||||
help="Path to a verbose appending log."
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
no_input = partial(
|
|
||||||
Option,
|
|
||||||
# Don't ask for input
|
|
||||||
'--no-input',
|
|
||||||
dest='no_input',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help=SUPPRESS_HELP
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
proxy = partial(
|
|
||||||
Option,
|
|
||||||
'--proxy',
|
|
||||||
dest='proxy',
|
|
||||||
type='str',
|
|
||||||
default='',
|
|
||||||
help="Specify a proxy in the form [user:passwd@]proxy.server:port."
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
retries = partial(
|
|
||||||
Option,
|
|
||||||
'--retries',
|
|
||||||
dest='retries',
|
|
||||||
type='int',
|
|
||||||
default=5,
|
|
||||||
help="Maximum number of retries each connection should attempt "
|
|
||||||
"(default %default times).",
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
timeout = partial(
|
|
||||||
Option,
|
|
||||||
'--timeout', '--default-timeout',
|
|
||||||
metavar='sec',
|
|
||||||
dest='timeout',
|
|
||||||
type='float',
|
|
||||||
default=15,
|
|
||||||
help='Set the socket timeout (default %default seconds).',
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
skip_requirements_regex = partial(
|
|
||||||
Option,
|
|
||||||
# A regex to be used to skip requirements
|
|
||||||
'--skip-requirements-regex',
|
|
||||||
dest='skip_requirements_regex',
|
|
||||||
type='str',
|
|
||||||
default='',
|
|
||||||
help=SUPPRESS_HELP,
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
|
|
||||||
def exists_action():
|
|
||||||
# type: () -> Option
|
|
||||||
return Option(
|
|
||||||
# Option when path already exist
|
|
||||||
'--exists-action',
|
|
||||||
dest='exists_action',
|
|
||||||
type='choice',
|
|
||||||
choices=['s', 'i', 'w', 'b', 'a'],
|
|
||||||
default=[],
|
|
||||||
action='append',
|
|
||||||
metavar='action',
|
|
||||||
help="Default action when a path already exists: "
|
|
||||||
"(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort).",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
cert = partial(
|
|
||||||
Option,
|
|
||||||
'--cert',
|
|
||||||
dest='cert',
|
|
||||||
type='str',
|
|
||||||
metavar='path',
|
|
||||||
help="Path to alternate CA bundle.",
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
client_cert = partial(
|
|
||||||
Option,
|
|
||||||
'--client-cert',
|
|
||||||
dest='client_cert',
|
|
||||||
type='str',
|
|
||||||
default=None,
|
|
||||||
metavar='path',
|
|
||||||
help="Path to SSL client certificate, a single file containing the "
|
|
||||||
"private key and the certificate in PEM format.",
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
index_url = partial(
|
|
||||||
Option,
|
|
||||||
'-i', '--index-url', '--pypi-url',
|
|
||||||
dest='index_url',
|
|
||||||
metavar='URL',
|
|
||||||
default=PyPI.simple_url,
|
|
||||||
help="Base URL of Python Package Index (default %default). "
|
|
||||||
"This should point to a repository compliant with PEP 503 "
|
|
||||||
"(the simple repository API) or a local directory laid out "
|
|
||||||
"in the same format.",
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
|
|
||||||
def extra_index_url():
|
|
||||||
return Option(
|
|
||||||
'--extra-index-url',
|
|
||||||
dest='extra_index_urls',
|
|
||||||
metavar='URL',
|
|
||||||
action='append',
|
|
||||||
default=[],
|
|
||||||
help="Extra URLs of package indexes to use in addition to "
|
|
||||||
"--index-url. Should follow the same rules as "
|
|
||||||
"--index-url.",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
no_index = partial(
|
|
||||||
Option,
|
|
||||||
'--no-index',
|
|
||||||
dest='no_index',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help='Ignore package index (only looking at --find-links URLs instead).',
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
|
|
||||||
def find_links():
|
|
||||||
# type: () -> Option
|
|
||||||
return Option(
|
|
||||||
'-f', '--find-links',
|
|
||||||
dest='find_links',
|
|
||||||
action='append',
|
|
||||||
default=[],
|
|
||||||
metavar='url',
|
|
||||||
help="If a url or path to an html file, then parse for links to "
|
|
||||||
"archives. If a local path or file:// url that's a directory, "
|
|
||||||
"then look for archives in the directory listing.",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def trusted_host():
|
|
||||||
# type: () -> Option
|
|
||||||
return Option(
|
|
||||||
"--trusted-host",
|
|
||||||
dest="trusted_hosts",
|
|
||||||
action="append",
|
|
||||||
metavar="HOSTNAME",
|
|
||||||
default=[],
|
|
||||||
help="Mark this host as trusted, even though it does not have valid "
|
|
||||||
"or any HTTPS.",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def constraints():
|
|
||||||
# type: () -> Option
|
|
||||||
return Option(
|
|
||||||
'-c', '--constraint',
|
|
||||||
dest='constraints',
|
|
||||||
action='append',
|
|
||||||
default=[],
|
|
||||||
metavar='file',
|
|
||||||
help='Constrain versions using the given constraints file. '
|
|
||||||
'This option can be used multiple times.'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def requirements():
|
|
||||||
# type: () -> Option
|
|
||||||
return Option(
|
|
||||||
'-r', '--requirement',
|
|
||||||
dest='requirements',
|
|
||||||
action='append',
|
|
||||||
default=[],
|
|
||||||
metavar='file',
|
|
||||||
help='Install from the given requirements file. '
|
|
||||||
'This option can be used multiple times.'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def editable():
|
|
||||||
# type: () -> Option
|
|
||||||
return Option(
|
|
||||||
'-e', '--editable',
|
|
||||||
dest='editables',
|
|
||||||
action='append',
|
|
||||||
default=[],
|
|
||||||
metavar='path/url',
|
|
||||||
help=('Install a project in editable mode (i.e. setuptools '
|
|
||||||
'"develop mode") from a local project path or a VCS url.'),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
src = partial(
|
|
||||||
Option,
|
|
||||||
'--src', '--source', '--source-dir', '--source-directory',
|
|
||||||
dest='src_dir',
|
|
||||||
metavar='dir',
|
|
||||||
default=src_prefix,
|
|
||||||
help='Directory to check out editable projects into. '
|
|
||||||
'The default in a virtualenv is "<venv path>/src". '
|
|
||||||
'The default for global installs is "<current dir>/src".'
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
|
|
||||||
def _get_format_control(values, option):
|
|
||||||
# type: (Values, Option) -> Any
|
|
||||||
"""Get a format_control object."""
|
|
||||||
return getattr(values, option.dest)
|
|
||||||
|
|
||||||
|
|
||||||
def _handle_no_binary(option, opt_str, value, parser):
|
|
||||||
# type: (Option, str, str, OptionParser) -> None
|
|
||||||
existing = _get_format_control(parser.values, option)
|
|
||||||
FormatControl.handle_mutual_excludes(
|
|
||||||
value, existing.no_binary, existing.only_binary,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _handle_only_binary(option, opt_str, value, parser):
|
|
||||||
# type: (Option, str, str, OptionParser) -> None
|
|
||||||
existing = _get_format_control(parser.values, option)
|
|
||||||
FormatControl.handle_mutual_excludes(
|
|
||||||
value, existing.only_binary, existing.no_binary,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def no_binary():
|
|
||||||
# type: () -> Option
|
|
||||||
format_control = FormatControl(set(), set())
|
|
||||||
return Option(
|
|
||||||
"--no-binary", dest="format_control", action="callback",
|
|
||||||
callback=_handle_no_binary, type="str",
|
|
||||||
default=format_control,
|
|
||||||
help="Do not use binary packages. Can be supplied multiple times, and "
|
|
||||||
"each time adds to the existing value. Accepts either :all: to "
|
|
||||||
"disable all binary packages, :none: to empty the set, or one or "
|
|
||||||
"more package names with commas between them. Note that some "
|
|
||||||
"packages are tricky to compile and may fail to install when "
|
|
||||||
"this option is used on them.",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def only_binary():
|
|
||||||
# type: () -> Option
|
|
||||||
format_control = FormatControl(set(), set())
|
|
||||||
return Option(
|
|
||||||
"--only-binary", dest="format_control", action="callback",
|
|
||||||
callback=_handle_only_binary, type="str",
|
|
||||||
default=format_control,
|
|
||||||
help="Do not use source packages. Can be supplied multiple times, and "
|
|
||||||
"each time adds to the existing value. Accepts either :all: to "
|
|
||||||
"disable all source packages, :none: to empty the set, or one or "
|
|
||||||
"more package names with commas between them. Packages without "
|
|
||||||
"binary distributions will fail to install when this option is "
|
|
||||||
"used on them.",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
platform = partial(
|
|
||||||
Option,
|
|
||||||
'--platform',
|
|
||||||
dest='platform',
|
|
||||||
metavar='platform',
|
|
||||||
default=None,
|
|
||||||
help=("Only use wheels compatible with <platform>. "
|
|
||||||
"Defaults to the platform of the running system."),
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
|
|
||||||
python_version = partial(
|
|
||||||
Option,
|
|
||||||
'--python-version',
|
|
||||||
dest='python_version',
|
|
||||||
metavar='python_version',
|
|
||||||
default=None,
|
|
||||||
help=("Only use wheels compatible with Python "
|
|
||||||
"interpreter version <version>. If not specified, then the "
|
|
||||||
"current system interpreter minor version is used. A major "
|
|
||||||
"version (e.g. '2') can be specified to match all "
|
|
||||||
"minor revs of that major version. A minor version "
|
|
||||||
"(e.g. '34') can also be specified."),
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
|
|
||||||
implementation = partial(
|
|
||||||
Option,
|
|
||||||
'--implementation',
|
|
||||||
dest='implementation',
|
|
||||||
metavar='implementation',
|
|
||||||
default=None,
|
|
||||||
help=("Only use wheels compatible with Python "
|
|
||||||
"implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
|
|
||||||
" or 'ip'. If not specified, then the current "
|
|
||||||
"interpreter implementation is used. Use 'py' to force "
|
|
||||||
"implementation-agnostic wheels."),
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
|
|
||||||
abi = partial(
|
|
||||||
Option,
|
|
||||||
'--abi',
|
|
||||||
dest='abi',
|
|
||||||
metavar='abi',
|
|
||||||
default=None,
|
|
||||||
help=("Only use wheels compatible with Python "
|
|
||||||
"abi <abi>, e.g. 'pypy_41'. If not specified, then the "
|
|
||||||
"current interpreter abi tag is used. Generally "
|
|
||||||
"you will need to specify --implementation, "
|
|
||||||
"--platform, and --python-version when using "
|
|
||||||
"this option."),
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
|
|
||||||
def prefer_binary():
|
|
||||||
# type: () -> Option
|
|
||||||
return Option(
|
|
||||||
"--prefer-binary",
|
|
||||||
dest="prefer_binary",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="Prefer older binary packages over newer source packages."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
cache_dir = partial(
|
|
||||||
Option,
|
|
||||||
"--cache-dir",
|
|
||||||
dest="cache_dir",
|
|
||||||
default=USER_CACHE_DIR,
|
|
||||||
metavar="dir",
|
|
||||||
help="Store the cache data in <dir>."
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
|
|
||||||
def no_cache_dir_callback(option, opt, value, parser):
|
|
||||||
"""
|
|
||||||
Process a value provided for the --no-cache-dir option.
|
|
||||||
|
|
||||||
This is an optparse.Option callback for the --no-cache-dir option.
|
|
||||||
"""
|
|
||||||
# The value argument will be None if --no-cache-dir is passed via the
|
|
||||||
# command-line, since the option doesn't accept arguments. However,
|
|
||||||
# the value can be non-None if the option is triggered e.g. by an
|
|
||||||
# environment variable, like PIP_NO_CACHE_DIR=true.
|
|
||||||
if value is not None:
|
|
||||||
# Then parse the string value to get argument error-checking.
|
|
||||||
try:
|
|
||||||
strtobool(value)
|
|
||||||
except ValueError as exc:
|
|
||||||
raise_option_error(parser, option=option, msg=str(exc))
|
|
||||||
|
|
||||||
# Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
|
|
||||||
# converted to 0 (like "false" or "no") caused cache_dir to be disabled
|
|
||||||
# rather than enabled (logic would say the latter). Thus, we disable
|
|
||||||
# the cache directory not just on values that parse to True, but (for
|
|
||||||
# backwards compatibility reasons) also on values that parse to False.
|
|
||||||
# In other words, always set it to False if the option is provided in
|
|
||||||
# some (valid) form.
|
|
||||||
parser.values.cache_dir = False
|
|
||||||
|
|
||||||
|
|
||||||
no_cache = partial(
|
|
||||||
Option,
|
|
||||||
"--no-cache-dir",
|
|
||||||
dest="cache_dir",
|
|
||||||
action="callback",
|
|
||||||
callback=no_cache_dir_callback,
|
|
||||||
help="Disable the cache.",
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
no_deps = partial(
|
|
||||||
Option,
|
|
||||||
'--no-deps', '--no-dependencies',
|
|
||||||
dest='ignore_dependencies',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help="Don't install package dependencies.",
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
build_dir = partial(
|
|
||||||
Option,
|
|
||||||
'-b', '--build', '--build-dir', '--build-directory',
|
|
||||||
dest='build_dir',
|
|
||||||
metavar='dir',
|
|
||||||
help='Directory to unpack packages into and build in. Note that '
|
|
||||||
'an initial build still takes place in a temporary directory. '
|
|
||||||
'The location of temporary directories can be controlled by setting '
|
|
||||||
'the TMPDIR environment variable (TEMP on Windows) appropriately. '
|
|
||||||
'When passed, build directories are not cleaned in case of failures.'
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
ignore_requires_python = partial(
|
|
||||||
Option,
|
|
||||||
'--ignore-requires-python',
|
|
||||||
dest='ignore_requires_python',
|
|
||||||
action='store_true',
|
|
||||||
help='Ignore the Requires-Python information.'
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
no_build_isolation = partial(
|
|
||||||
Option,
|
|
||||||
'--no-build-isolation',
|
|
||||||
dest='build_isolation',
|
|
||||||
action='store_false',
|
|
||||||
default=True,
|
|
||||||
help='Disable isolation when building a modern source distribution. '
|
|
||||||
'Build dependencies specified by PEP 518 must be already installed '
|
|
||||||
'if this option is used.'
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
|
|
||||||
def no_use_pep517_callback(option, opt, value, parser):
|
|
||||||
"""
|
|
||||||
Process a value provided for the --no-use-pep517 option.
|
|
||||||
|
|
||||||
This is an optparse.Option callback for the no_use_pep517 option.
|
|
||||||
"""
|
|
||||||
# Since --no-use-pep517 doesn't accept arguments, the value argument
|
|
||||||
# will be None if --no-use-pep517 is passed via the command-line.
|
|
||||||
# However, the value can be non-None if the option is triggered e.g.
|
|
||||||
# by an environment variable, for example "PIP_NO_USE_PEP517=true".
|
|
||||||
if value is not None:
|
|
||||||
msg = """A value was passed for --no-use-pep517,
|
|
||||||
probably using either the PIP_NO_USE_PEP517 environment variable
|
|
||||||
or the "no-use-pep517" config file option. Use an appropriate value
|
|
||||||
of the PIP_USE_PEP517 environment variable or the "use-pep517"
|
|
||||||
config file option instead.
|
|
||||||
"""
|
|
||||||
raise_option_error(parser, option=option, msg=msg)
|
|
||||||
|
|
||||||
# Otherwise, --no-use-pep517 was passed via the command-line.
|
|
||||||
parser.values.use_pep517 = False
|
|
||||||
|
|
||||||
|
|
||||||
use_pep517 = partial(
|
|
||||||
Option,
|
|
||||||
'--use-pep517',
|
|
||||||
dest='use_pep517',
|
|
||||||
action='store_true',
|
|
||||||
default=None,
|
|
||||||
help='Use PEP 517 for building source distributions '
|
|
||||||
'(use --no-use-pep517 to force legacy behaviour).'
|
|
||||||
) # type: Any
|
|
||||||
|
|
||||||
no_use_pep517 = partial(
|
|
||||||
Option,
|
|
||||||
'--no-use-pep517',
|
|
||||||
dest='use_pep517',
|
|
||||||
action='callback',
|
|
||||||
callback=no_use_pep517_callback,
|
|
||||||
default=None,
|
|
||||||
help=SUPPRESS_HELP
|
|
||||||
) # type: Any
|
|
||||||
|
|
||||||
install_options = partial(
|
|
||||||
Option,
|
|
||||||
'--install-option',
|
|
||||||
dest='install_options',
|
|
||||||
action='append',
|
|
||||||
metavar='options',
|
|
||||||
help="Extra arguments to be supplied to the setup.py install "
|
|
||||||
"command (use like --install-option=\"--install-scripts=/usr/local/"
|
|
||||||
"bin\"). Use multiple --install-option options to pass multiple "
|
|
||||||
"options to setup.py install. If you are using an option with a "
|
|
||||||
"directory path, be sure to use absolute path.",
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
global_options = partial(
|
|
||||||
Option,
|
|
||||||
'--global-option',
|
|
||||||
dest='global_options',
|
|
||||||
action='append',
|
|
||||||
metavar='options',
|
|
||||||
help="Extra global options to be supplied to the setup.py "
|
|
||||||
"call before the install command.",
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
no_clean = partial(
|
|
||||||
Option,
|
|
||||||
'--no-clean',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help="Don't clean up build directories."
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
pre = partial(
|
|
||||||
Option,
|
|
||||||
'--pre',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help="Include pre-release and development versions. By default, "
|
|
||||||
"pip only finds stable versions.",
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
disable_pip_version_check = partial(
|
|
||||||
Option,
|
|
||||||
"--disable-pip-version-check",
|
|
||||||
dest="disable_pip_version_check",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="Don't periodically check PyPI to determine whether a new version "
|
|
||||||
"of pip is available for download. Implied with --no-index.",
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
|
|
||||||
# Deprecated, Remove later
|
|
||||||
always_unzip = partial(
|
|
||||||
Option,
|
|
||||||
'-Z', '--always-unzip',
|
|
||||||
dest='always_unzip',
|
|
||||||
action='store_true',
|
|
||||||
help=SUPPRESS_HELP,
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
|
|
||||||
def _merge_hash(option, opt_str, value, parser):
|
|
||||||
# type: (Option, str, str, OptionParser) -> None
|
|
||||||
"""Given a value spelled "algo:digest", append the digest to a list
|
|
||||||
pointed to in a dict by the algo name."""
|
|
||||||
if not parser.values.hashes:
|
|
||||||
parser.values.hashes = {} # type: ignore
|
|
||||||
try:
|
|
||||||
algo, digest = value.split(':', 1)
|
|
||||||
except ValueError:
|
|
||||||
parser.error('Arguments to %s must be a hash name '
|
|
||||||
'followed by a value, like --hash=sha256:abcde...' %
|
|
||||||
opt_str)
|
|
||||||
if algo not in STRONG_HASHES:
|
|
||||||
parser.error('Allowed hash algorithms for %s are %s.' %
|
|
||||||
(opt_str, ', '.join(STRONG_HASHES)))
|
|
||||||
parser.values.hashes.setdefault(algo, []).append(digest)
|
|
||||||
|
|
||||||
|
|
||||||
hash = partial(
|
|
||||||
Option,
|
|
||||||
'--hash',
|
|
||||||
# Hash values eventually end up in InstallRequirement.hashes due to
|
|
||||||
# __dict__ copying in process_line().
|
|
||||||
dest='hashes',
|
|
||||||
action='callback',
|
|
||||||
callback=_merge_hash,
|
|
||||||
type='string',
|
|
||||||
help="Verify that the package's archive matches this "
|
|
||||||
'hash before installing. Example: --hash=sha256:abcdef...',
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
|
|
||||||
require_hashes = partial(
|
|
||||||
Option,
|
|
||||||
'--require-hashes',
|
|
||||||
dest='require_hashes',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help='Require a hash to check each requirement against, for '
|
|
||||||
'repeatable installs. This option is implied when any package in a '
|
|
||||||
'requirements file has a --hash option.',
|
|
||||||
) # type: Callable[..., Option]
|
|
||||||
|
|
||||||
|
|
||||||
##########
|
|
||||||
# groups #
|
|
||||||
##########
|
|
||||||
|
|
||||||
general_group = {
|
|
||||||
'name': 'General Options',
|
|
||||||
'options': [
|
|
||||||
help_,
|
|
||||||
isolated_mode,
|
|
||||||
require_virtualenv,
|
|
||||||
verbose,
|
|
||||||
version,
|
|
||||||
quiet,
|
|
||||||
log,
|
|
||||||
no_input,
|
|
||||||
proxy,
|
|
||||||
retries,
|
|
||||||
timeout,
|
|
||||||
skip_requirements_regex,
|
|
||||||
exists_action,
|
|
||||||
trusted_host,
|
|
||||||
cert,
|
|
||||||
client_cert,
|
|
||||||
cache_dir,
|
|
||||||
no_cache,
|
|
||||||
disable_pip_version_check,
|
|
||||||
no_color,
|
|
||||||
]
|
|
||||||
} # type: Dict[str, Any]
|
|
||||||
|
|
||||||
index_group = {
|
|
||||||
'name': 'Package Index Options',
|
|
||||||
'options': [
|
|
||||||
index_url,
|
|
||||||
extra_index_url,
|
|
||||||
no_index,
|
|
||||||
find_links,
|
|
||||||
]
|
|
||||||
} # type: Dict[str, Any]
|
|
@ -1,104 +0,0 @@
|
|||||||
"""A single place for constructing and exposing the main parser
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pip import __version__
|
|
||||||
from pip._internal.cli import cmdoptions
|
|
||||||
from pip._internal.cli.parser import (
|
|
||||||
ConfigOptionParser, UpdatingDefaultsHelpFormatter,
|
|
||||||
)
|
|
||||||
from pip._internal.commands import (
|
|
||||||
commands_dict, get_similar_commands, get_summaries,
|
|
||||||
)
|
|
||||||
from pip._internal.exceptions import CommandError
|
|
||||||
from pip._internal.utils.misc import get_prog
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import Tuple, List # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["create_main_parser", "parse_command"]
|
|
||||||
|
|
||||||
|
|
||||||
def create_main_parser():
|
|
||||||
# type: () -> ConfigOptionParser
|
|
||||||
"""Creates and returns the main parser for pip's CLI
|
|
||||||
"""
|
|
||||||
|
|
||||||
parser_kw = {
|
|
||||||
'usage': '\n%prog <command> [options]',
|
|
||||||
'add_help_option': False,
|
|
||||||
'formatter': UpdatingDefaultsHelpFormatter(),
|
|
||||||
'name': 'global',
|
|
||||||
'prog': get_prog(),
|
|
||||||
}
|
|
||||||
|
|
||||||
parser = ConfigOptionParser(**parser_kw)
|
|
||||||
parser.disable_interspersed_args()
|
|
||||||
|
|
||||||
pip_pkg_dir = os.path.abspath(os.path.join(
|
|
||||||
os.path.dirname(__file__), "..", "..",
|
|
||||||
))
|
|
||||||
parser.version = 'pip %s from %s (python %s)' % (
|
|
||||||
__version__, pip_pkg_dir, sys.version[:3],
|
|
||||||
)
|
|
||||||
|
|
||||||
# add the general options
|
|
||||||
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
|
|
||||||
parser.add_option_group(gen_opts)
|
|
||||||
|
|
||||||
# so the help formatter knows
|
|
||||||
parser.main = True # type: ignore
|
|
||||||
|
|
||||||
# create command listing for description
|
|
||||||
command_summaries = get_summaries()
|
|
||||||
description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries]
|
|
||||||
parser.description = '\n'.join(description)
|
|
||||||
|
|
||||||
return parser
|
|
||||||
|
|
||||||
|
|
||||||
def parse_command(args):
|
|
||||||
# type: (List[str]) -> Tuple[str, List[str]]
|
|
||||||
parser = create_main_parser()
|
|
||||||
|
|
||||||
# Note: parser calls disable_interspersed_args(), so the result of this
|
|
||||||
# call is to split the initial args into the general options before the
|
|
||||||
# subcommand and everything else.
|
|
||||||
# For example:
|
|
||||||
# args: ['--timeout=5', 'install', '--user', 'INITools']
|
|
||||||
# general_options: ['--timeout==5']
|
|
||||||
# args_else: ['install', '--user', 'INITools']
|
|
||||||
general_options, args_else = parser.parse_args(args)
|
|
||||||
|
|
||||||
# --version
|
|
||||||
if general_options.version:
|
|
||||||
sys.stdout.write(parser.version) # type: ignore
|
|
||||||
sys.stdout.write(os.linesep)
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
# pip || pip help -> print_help()
|
|
||||||
if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
|
|
||||||
parser.print_help()
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
# the subcommand name
|
|
||||||
cmd_name = args_else[0]
|
|
||||||
|
|
||||||
if cmd_name not in commands_dict:
|
|
||||||
guess = get_similar_commands(cmd_name)
|
|
||||||
|
|
||||||
msg = ['unknown command "%s"' % cmd_name]
|
|
||||||
if guess:
|
|
||||||
msg.append('maybe you meant "%s"' % guess)
|
|
||||||
|
|
||||||
raise CommandError(' - '.join(msg))
|
|
||||||
|
|
||||||
# all the args without the subcommand
|
|
||||||
cmd_args = args[:]
|
|
||||||
cmd_args.remove(cmd_name)
|
|
||||||
|
|
||||||
return cmd_name, cmd_args
|
|
@ -1,261 +0,0 @@
|
|||||||
"""Base option parser setup"""
|
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import optparse
|
|
||||||
import sys
|
|
||||||
import textwrap
|
|
||||||
from distutils.util import strtobool
|
|
||||||
|
|
||||||
from pip._vendor.six import string_types
|
|
||||||
|
|
||||||
from pip._internal.cli.status_codes import UNKNOWN_ERROR
|
|
||||||
from pip._internal.configuration import Configuration, ConfigurationError
|
|
||||||
from pip._internal.utils.compat import get_terminal_size
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
|
|
||||||
"""A prettier/less verbose help formatter for optparse."""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
# help position must be aligned with __init__.parseopts.description
|
|
||||||
kwargs['max_help_position'] = 30
|
|
||||||
kwargs['indent_increment'] = 1
|
|
||||||
kwargs['width'] = get_terminal_size()[0] - 2
|
|
||||||
optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)
|
|
||||||
|
|
||||||
def format_option_strings(self, option):
|
|
||||||
return self._format_option_strings(option, ' <%s>', ', ')
|
|
||||||
|
|
||||||
def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
|
|
||||||
"""
|
|
||||||
Return a comma-separated list of option strings and metavars.
|
|
||||||
|
|
||||||
:param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
|
|
||||||
:param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
|
|
||||||
:param optsep: separator
|
|
||||||
"""
|
|
||||||
opts = []
|
|
||||||
|
|
||||||
if option._short_opts:
|
|
||||||
opts.append(option._short_opts[0])
|
|
||||||
if option._long_opts:
|
|
||||||
opts.append(option._long_opts[0])
|
|
||||||
if len(opts) > 1:
|
|
||||||
opts.insert(1, optsep)
|
|
||||||
|
|
||||||
if option.takes_value():
|
|
||||||
metavar = option.metavar or option.dest.lower()
|
|
||||||
opts.append(mvarfmt % metavar.lower())
|
|
||||||
|
|
||||||
return ''.join(opts)
|
|
||||||
|
|
||||||
def format_heading(self, heading):
|
|
||||||
if heading == 'Options':
|
|
||||||
return ''
|
|
||||||
return heading + ':\n'
|
|
||||||
|
|
||||||
def format_usage(self, usage):
|
|
||||||
"""
|
|
||||||
Ensure there is only one newline between usage and the first heading
|
|
||||||
if there is no description.
|
|
||||||
"""
|
|
||||||
msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ")
|
|
||||||
return msg
|
|
||||||
|
|
||||||
def format_description(self, description):
|
|
||||||
# leave full control over description to us
|
|
||||||
if description:
|
|
||||||
if hasattr(self.parser, 'main'):
|
|
||||||
label = 'Commands'
|
|
||||||
else:
|
|
||||||
label = 'Description'
|
|
||||||
# some doc strings have initial newlines, some don't
|
|
||||||
description = description.lstrip('\n')
|
|
||||||
# some doc strings have final newlines and spaces, some don't
|
|
||||||
description = description.rstrip()
|
|
||||||
# dedent, then reindent
|
|
||||||
description = self.indent_lines(textwrap.dedent(description), " ")
|
|
||||||
description = '%s:\n%s\n' % (label, description)
|
|
||||||
return description
|
|
||||||
else:
|
|
||||||
return ''
|
|
||||||
|
|
||||||
def format_epilog(self, epilog):
|
|
||||||
# leave full control over epilog to us
|
|
||||||
if epilog:
|
|
||||||
return epilog
|
|
||||||
else:
|
|
||||||
return ''
|
|
||||||
|
|
||||||
def indent_lines(self, text, indent):
|
|
||||||
new_lines = [indent + line for line in text.split('\n')]
|
|
||||||
return "\n".join(new_lines)
|
|
||||||
|
|
||||||
|
|
||||||
class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
|
|
||||||
"""Custom help formatter for use in ConfigOptionParser.
|
|
||||||
|
|
||||||
This is updates the defaults before expanding them, allowing
|
|
||||||
them to show up correctly in the help listing.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def expand_default(self, option):
|
|
||||||
if self.parser is not None:
|
|
||||||
self.parser._update_defaults(self.parser.defaults)
|
|
||||||
return optparse.IndentedHelpFormatter.expand_default(self, option)
|
|
||||||
|
|
||||||
|
|
||||||
class CustomOptionParser(optparse.OptionParser):
|
|
||||||
|
|
||||||
def insert_option_group(self, idx, *args, **kwargs):
|
|
||||||
"""Insert an OptionGroup at a given position."""
|
|
||||||
group = self.add_option_group(*args, **kwargs)
|
|
||||||
|
|
||||||
self.option_groups.pop()
|
|
||||||
self.option_groups.insert(idx, group)
|
|
||||||
|
|
||||||
return group
|
|
||||||
|
|
||||||
@property
|
|
||||||
def option_list_all(self):
|
|
||||||
"""Get a list of all options, including those in option groups."""
|
|
||||||
res = self.option_list[:]
|
|
||||||
for i in self.option_groups:
|
|
||||||
res.extend(i.option_list)
|
|
||||||
|
|
||||||
return res
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigOptionParser(CustomOptionParser):
|
|
||||||
"""Custom option parser which updates its defaults by checking the
|
|
||||||
configuration files and environmental variables"""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
self.name = kwargs.pop('name')
|
|
||||||
|
|
||||||
isolated = kwargs.pop("isolated", False)
|
|
||||||
self.config = Configuration(isolated)
|
|
||||||
|
|
||||||
assert self.name
|
|
||||||
optparse.OptionParser.__init__(self, *args, **kwargs)
|
|
||||||
|
|
||||||
def check_default(self, option, key, val):
|
|
||||||
try:
|
|
||||||
return option.check_value(key, val)
|
|
||||||
except optparse.OptionValueError as exc:
|
|
||||||
print("An error occurred during configuration: %s" % exc)
|
|
||||||
sys.exit(3)
|
|
||||||
|
|
||||||
def _get_ordered_configuration_items(self):
|
|
||||||
# Configuration gives keys in an unordered manner. Order them.
|
|
||||||
override_order = ["global", self.name, ":env:"]
|
|
||||||
|
|
||||||
# Pool the options into different groups
|
|
||||||
section_items = {name: [] for name in override_order}
|
|
||||||
for section_key, val in self.config.items():
|
|
||||||
# ignore empty values
|
|
||||||
if not val:
|
|
||||||
logger.debug(
|
|
||||||
"Ignoring configuration key '%s' as it's value is empty.",
|
|
||||||
section_key
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
section, key = section_key.split(".", 1)
|
|
||||||
if section in override_order:
|
|
||||||
section_items[section].append((key, val))
|
|
||||||
|
|
||||||
# Yield each group in their override order
|
|
||||||
for section in override_order:
|
|
||||||
for key, val in section_items[section]:
|
|
||||||
yield key, val
|
|
||||||
|
|
||||||
def _update_defaults(self, defaults):
|
|
||||||
"""Updates the given defaults with values from the config files and
|
|
||||||
the environ. Does a little special handling for certain types of
|
|
||||||
options (lists)."""
|
|
||||||
|
|
||||||
# Accumulate complex default state.
|
|
||||||
self.values = optparse.Values(self.defaults)
|
|
||||||
late_eval = set()
|
|
||||||
# Then set the options with those values
|
|
||||||
for key, val in self._get_ordered_configuration_items():
|
|
||||||
# '--' because configuration supports only long names
|
|
||||||
option = self.get_option('--' + key)
|
|
||||||
|
|
||||||
# Ignore options not present in this parser. E.g. non-globals put
|
|
||||||
# in [global] by users that want them to apply to all applicable
|
|
||||||
# commands.
|
|
||||||
if option is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if option.action in ('store_true', 'store_false', 'count'):
|
|
||||||
try:
|
|
||||||
val = strtobool(val)
|
|
||||||
except ValueError:
|
|
||||||
error_msg = invalid_config_error_message(
|
|
||||||
option.action, key, val
|
|
||||||
)
|
|
||||||
self.error(error_msg)
|
|
||||||
|
|
||||||
elif option.action == 'append':
|
|
||||||
val = val.split()
|
|
||||||
val = [self.check_default(option, key, v) for v in val]
|
|
||||||
elif option.action == 'callback':
|
|
||||||
late_eval.add(option.dest)
|
|
||||||
opt_str = option.get_opt_string()
|
|
||||||
val = option.convert_value(opt_str, val)
|
|
||||||
# From take_action
|
|
||||||
args = option.callback_args or ()
|
|
||||||
kwargs = option.callback_kwargs or {}
|
|
||||||
option.callback(option, opt_str, val, self, *args, **kwargs)
|
|
||||||
else:
|
|
||||||
val = self.check_default(option, key, val)
|
|
||||||
|
|
||||||
defaults[option.dest] = val
|
|
||||||
|
|
||||||
for key in late_eval:
|
|
||||||
defaults[key] = getattr(self.values, key)
|
|
||||||
self.values = None
|
|
||||||
return defaults
|
|
||||||
|
|
||||||
def get_default_values(self):
|
|
||||||
"""Overriding to make updating the defaults after instantiation of
|
|
||||||
the option parser possible, _update_defaults() does the dirty work."""
|
|
||||||
if not self.process_default_values:
|
|
||||||
# Old, pre-Optik 1.5 behaviour.
|
|
||||||
return optparse.Values(self.defaults)
|
|
||||||
|
|
||||||
# Load the configuration, or error out in case of an error
|
|
||||||
try:
|
|
||||||
self.config.load()
|
|
||||||
except ConfigurationError as err:
|
|
||||||
self.exit(UNKNOWN_ERROR, str(err))
|
|
||||||
|
|
||||||
defaults = self._update_defaults(self.defaults.copy()) # ours
|
|
||||||
for option in self._get_all_options():
|
|
||||||
default = defaults.get(option.dest)
|
|
||||||
if isinstance(default, string_types):
|
|
||||||
opt_str = option.get_opt_string()
|
|
||||||
defaults[option.dest] = option.check_value(opt_str, default)
|
|
||||||
return optparse.Values(defaults)
|
|
||||||
|
|
||||||
def error(self, msg):
|
|
||||||
self.print_usage(sys.stderr)
|
|
||||||
self.exit(UNKNOWN_ERROR, "%s\n" % msg)
|
|
||||||
|
|
||||||
|
|
||||||
def invalid_config_error_message(action, key, val):
|
|
||||||
"""Returns a better error message when invalid configuration option
|
|
||||||
is provided."""
|
|
||||||
if action in ('store_true', 'store_false'):
|
|
||||||
return ("{0} is not a valid value for {1} option, "
|
|
||||||
"please specify a boolean value like yes/no, "
|
|
||||||
"true/false or 1/0 instead.").format(val, key)
|
|
||||||
|
|
||||||
return ("{0} is not a valid value for {1} option, "
|
|
||||||
"please specify a numerical value like 1/0 "
|
|
||||||
"instead.").format(val, key)
|
|
@ -1,8 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
SUCCESS = 0
|
|
||||||
ERROR = 1
|
|
||||||
UNKNOWN_ERROR = 2
|
|
||||||
VIRTUALENV_NOT_FOUND = 3
|
|
||||||
PREVIOUS_BUILD_DIR_ERROR = 4
|
|
||||||
NO_MATCHES_FOUND = 23
|
|
@ -1,79 +0,0 @@
|
|||||||
"""
|
|
||||||
Package containing all pip commands
|
|
||||||
"""
|
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
from pip._internal.commands.completion import CompletionCommand
|
|
||||||
from pip._internal.commands.configuration import ConfigurationCommand
|
|
||||||
from pip._internal.commands.download import DownloadCommand
|
|
||||||
from pip._internal.commands.freeze import FreezeCommand
|
|
||||||
from pip._internal.commands.hash import HashCommand
|
|
||||||
from pip._internal.commands.help import HelpCommand
|
|
||||||
from pip._internal.commands.list import ListCommand
|
|
||||||
from pip._internal.commands.check import CheckCommand
|
|
||||||
from pip._internal.commands.search import SearchCommand
|
|
||||||
from pip._internal.commands.show import ShowCommand
|
|
||||||
from pip._internal.commands.install import InstallCommand
|
|
||||||
from pip._internal.commands.uninstall import UninstallCommand
|
|
||||||
from pip._internal.commands.wheel import WheelCommand
|
|
||||||
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import List, Type # noqa: F401
|
|
||||||
from pip._internal.cli.base_command import Command # noqa: F401
|
|
||||||
|
|
||||||
commands_order = [
|
|
||||||
InstallCommand,
|
|
||||||
DownloadCommand,
|
|
||||||
UninstallCommand,
|
|
||||||
FreezeCommand,
|
|
||||||
ListCommand,
|
|
||||||
ShowCommand,
|
|
||||||
CheckCommand,
|
|
||||||
ConfigurationCommand,
|
|
||||||
SearchCommand,
|
|
||||||
WheelCommand,
|
|
||||||
HashCommand,
|
|
||||||
CompletionCommand,
|
|
||||||
HelpCommand,
|
|
||||||
] # type: List[Type[Command]]
|
|
||||||
|
|
||||||
commands_dict = {c.name: c for c in commands_order}
|
|
||||||
|
|
||||||
|
|
||||||
def get_summaries(ordered=True):
|
|
||||||
"""Yields sorted (command name, command summary) tuples."""
|
|
||||||
|
|
||||||
if ordered:
|
|
||||||
cmditems = _sort_commands(commands_dict, commands_order)
|
|
||||||
else:
|
|
||||||
cmditems = commands_dict.items()
|
|
||||||
|
|
||||||
for name, command_class in cmditems:
|
|
||||||
yield (name, command_class.summary)
|
|
||||||
|
|
||||||
|
|
||||||
def get_similar_commands(name):
|
|
||||||
"""Command name auto-correct."""
|
|
||||||
from difflib import get_close_matches
|
|
||||||
|
|
||||||
name = name.lower()
|
|
||||||
|
|
||||||
close_commands = get_close_matches(name, commands_dict.keys())
|
|
||||||
|
|
||||||
if close_commands:
|
|
||||||
return close_commands[0]
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def _sort_commands(cmddict, order):
|
|
||||||
def keyfn(key):
|
|
||||||
try:
|
|
||||||
return order.index(key[1])
|
|
||||||
except ValueError:
|
|
||||||
# unordered items should come last
|
|
||||||
return 0xff
|
|
||||||
|
|
||||||
return sorted(cmddict.items(), key=keyfn)
|
|
@ -1,41 +0,0 @@
|
|||||||
import logging
|
|
||||||
|
|
||||||
from pip._internal.cli.base_command import Command
|
|
||||||
from pip._internal.operations.check import (
|
|
||||||
check_package_set, create_package_set_from_installed,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class CheckCommand(Command):
|
|
||||||
"""Verify installed packages have compatible dependencies."""
|
|
||||||
name = 'check'
|
|
||||||
usage = """
|
|
||||||
%prog [options]"""
|
|
||||||
summary = 'Verify installed packages have compatible dependencies.'
|
|
||||||
|
|
||||||
def run(self, options, args):
|
|
||||||
package_set, parsing_probs = create_package_set_from_installed()
|
|
||||||
missing, conflicting = check_package_set(package_set)
|
|
||||||
|
|
||||||
for project_name in missing:
|
|
||||||
version = package_set[project_name].version
|
|
||||||
for dependency in missing[project_name]:
|
|
||||||
logger.info(
|
|
||||||
"%s %s requires %s, which is not installed.",
|
|
||||||
project_name, version, dependency[0],
|
|
||||||
)
|
|
||||||
|
|
||||||
for project_name in conflicting:
|
|
||||||
version = package_set[project_name].version
|
|
||||||
for dep_name, dep_version, req in conflicting[project_name]:
|
|
||||||
logger.info(
|
|
||||||
"%s %s has requirement %s, but you have %s %s.",
|
|
||||||
project_name, version, req, dep_name, dep_version,
|
|
||||||
)
|
|
||||||
|
|
||||||
if missing or conflicting or parsing_probs:
|
|
||||||
return 1
|
|
||||||
else:
|
|
||||||
logger.info("No broken requirements found.")
|
|
@ -1,94 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import textwrap
|
|
||||||
|
|
||||||
from pip._internal.cli.base_command import Command
|
|
||||||
from pip._internal.utils.misc import get_prog
|
|
||||||
|
|
||||||
BASE_COMPLETION = """
|
|
||||||
# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
|
|
||||||
"""
|
|
||||||
|
|
||||||
COMPLETION_SCRIPTS = {
|
|
||||||
'bash': """
|
|
||||||
_pip_completion()
|
|
||||||
{
|
|
||||||
COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
|
|
||||||
COMP_CWORD=$COMP_CWORD \\
|
|
||||||
PIP_AUTO_COMPLETE=1 $1 ) )
|
|
||||||
}
|
|
||||||
complete -o default -F _pip_completion %(prog)s
|
|
||||||
""",
|
|
||||||
'zsh': """
|
|
||||||
function _pip_completion {
|
|
||||||
local words cword
|
|
||||||
read -Ac words
|
|
||||||
read -cn cword
|
|
||||||
reply=( $( COMP_WORDS="$words[*]" \\
|
|
||||||
COMP_CWORD=$(( cword-1 )) \\
|
|
||||||
PIP_AUTO_COMPLETE=1 $words[1] ) )
|
|
||||||
}
|
|
||||||
compctl -K _pip_completion %(prog)s
|
|
||||||
""",
|
|
||||||
'fish': """
|
|
||||||
function __fish_complete_pip
|
|
||||||
set -lx COMP_WORDS (commandline -o) ""
|
|
||||||
set -lx COMP_CWORD ( \\
|
|
||||||
math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
|
|
||||||
)
|
|
||||||
set -lx PIP_AUTO_COMPLETE 1
|
|
||||||
string split \\ -- (eval $COMP_WORDS[1])
|
|
||||||
end
|
|
||||||
complete -fa "(__fish_complete_pip)" -c %(prog)s
|
|
||||||
""",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class CompletionCommand(Command):
|
|
||||||
"""A helper command to be used for command completion."""
|
|
||||||
name = 'completion'
|
|
||||||
summary = 'A helper command used for command completion.'
|
|
||||||
ignore_require_venv = True
|
|
||||||
|
|
||||||
def __init__(self, *args, **kw):
|
|
||||||
super(CompletionCommand, self).__init__(*args, **kw)
|
|
||||||
|
|
||||||
cmd_opts = self.cmd_opts
|
|
||||||
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'--bash', '-b',
|
|
||||||
action='store_const',
|
|
||||||
const='bash',
|
|
||||||
dest='shell',
|
|
||||||
help='Emit completion code for bash')
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'--zsh', '-z',
|
|
||||||
action='store_const',
|
|
||||||
const='zsh',
|
|
||||||
dest='shell',
|
|
||||||
help='Emit completion code for zsh')
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'--fish', '-f',
|
|
||||||
action='store_const',
|
|
||||||
const='fish',
|
|
||||||
dest='shell',
|
|
||||||
help='Emit completion code for fish')
|
|
||||||
|
|
||||||
self.parser.insert_option_group(0, cmd_opts)
|
|
||||||
|
|
||||||
def run(self, options, args):
|
|
||||||
"""Prints the completion code of the given shell"""
|
|
||||||
shells = COMPLETION_SCRIPTS.keys()
|
|
||||||
shell_options = ['--' + shell for shell in sorted(shells)]
|
|
||||||
if options.shell in shells:
|
|
||||||
script = textwrap.dedent(
|
|
||||||
COMPLETION_SCRIPTS.get(options.shell, '') % {
|
|
||||||
'prog': get_prog(),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
|
|
||||||
else:
|
|
||||||
sys.stderr.write(
|
|
||||||
'ERROR: You must pass %s\n' % ' or '.join(shell_options)
|
|
||||||
)
|
|
@ -1,227 +0,0 @@
|
|||||||
import logging
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
from pip._internal.cli.base_command import Command
|
|
||||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
|
||||||
from pip._internal.configuration import Configuration, kinds
|
|
||||||
from pip._internal.exceptions import PipError
|
|
||||||
from pip._internal.locations import venv_config_file
|
|
||||||
from pip._internal.utils.misc import get_prog
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigurationCommand(Command):
|
|
||||||
"""Manage local and global configuration.
|
|
||||||
|
|
||||||
Subcommands:
|
|
||||||
|
|
||||||
list: List the active configuration (or from the file specified)
|
|
||||||
edit: Edit the configuration file in an editor
|
|
||||||
get: Get the value associated with name
|
|
||||||
set: Set the name=value
|
|
||||||
unset: Unset the value associated with name
|
|
||||||
|
|
||||||
If none of --user, --global and --venv are passed, a virtual
|
|
||||||
environment configuration file is used if one is active and the file
|
|
||||||
exists. Otherwise, all modifications happen on the to the user file by
|
|
||||||
default.
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = 'config'
|
|
||||||
usage = """
|
|
||||||
%prog [<file-option>] list
|
|
||||||
%prog [<file-option>] [--editor <editor-path>] edit
|
|
||||||
|
|
||||||
%prog [<file-option>] get name
|
|
||||||
%prog [<file-option>] set name value
|
|
||||||
%prog [<file-option>] unset name
|
|
||||||
"""
|
|
||||||
|
|
||||||
summary = "Manage local and global configuration."
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super(ConfigurationCommand, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
self.configuration = None
|
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'--editor',
|
|
||||||
dest='editor',
|
|
||||||
action='store',
|
|
||||||
default=None,
|
|
||||||
help=(
|
|
||||||
'Editor to use to edit the file. Uses VISUAL or EDITOR '
|
|
||||||
'environment variables if not provided.'
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'--global',
|
|
||||||
dest='global_file',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help='Use the system-wide configuration file only'
|
|
||||||
)
|
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'--user',
|
|
||||||
dest='user_file',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help='Use the user configuration file only'
|
|
||||||
)
|
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'--venv',
|
|
||||||
dest='venv_file',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help='Use the virtualenv configuration file only'
|
|
||||||
)
|
|
||||||
|
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
|
||||||
|
|
||||||
def run(self, options, args):
|
|
||||||
handlers = {
|
|
||||||
"list": self.list_values,
|
|
||||||
"edit": self.open_in_editor,
|
|
||||||
"get": self.get_name,
|
|
||||||
"set": self.set_name_value,
|
|
||||||
"unset": self.unset_name
|
|
||||||
}
|
|
||||||
|
|
||||||
# Determine action
|
|
||||||
if not args or args[0] not in handlers:
|
|
||||||
logger.error("Need an action ({}) to perform.".format(
|
|
||||||
", ".join(sorted(handlers)))
|
|
||||||
)
|
|
||||||
return ERROR
|
|
||||||
|
|
||||||
action = args[0]
|
|
||||||
|
|
||||||
# Determine which configuration files are to be loaded
|
|
||||||
# Depends on whether the command is modifying.
|
|
||||||
try:
|
|
||||||
load_only = self._determine_file(
|
|
||||||
options, need_value=(action in ["get", "set", "unset", "edit"])
|
|
||||||
)
|
|
||||||
except PipError as e:
|
|
||||||
logger.error(e.args[0])
|
|
||||||
return ERROR
|
|
||||||
|
|
||||||
# Load a new configuration
|
|
||||||
self.configuration = Configuration(
|
|
||||||
isolated=options.isolated_mode, load_only=load_only
|
|
||||||
)
|
|
||||||
self.configuration.load()
|
|
||||||
|
|
||||||
# Error handling happens here, not in the action-handlers.
|
|
||||||
try:
|
|
||||||
handlers[action](options, args[1:])
|
|
||||||
except PipError as e:
|
|
||||||
logger.error(e.args[0])
|
|
||||||
return ERROR
|
|
||||||
|
|
||||||
return SUCCESS
|
|
||||||
|
|
||||||
def _determine_file(self, options, need_value):
|
|
||||||
file_options = {
|
|
||||||
kinds.USER: options.user_file,
|
|
||||||
kinds.GLOBAL: options.global_file,
|
|
||||||
kinds.VENV: options.venv_file
|
|
||||||
}
|
|
||||||
|
|
||||||
if sum(file_options.values()) == 0:
|
|
||||||
if not need_value:
|
|
||||||
return None
|
|
||||||
# Default to user, unless there's a virtualenv file.
|
|
||||||
elif os.path.exists(venv_config_file):
|
|
||||||
return kinds.VENV
|
|
||||||
else:
|
|
||||||
return kinds.USER
|
|
||||||
elif sum(file_options.values()) == 1:
|
|
||||||
# There's probably a better expression for this.
|
|
||||||
return [key for key in file_options if file_options[key]][0]
|
|
||||||
|
|
||||||
raise PipError(
|
|
||||||
"Need exactly one file to operate upon "
|
|
||||||
"(--user, --venv, --global) to perform."
|
|
||||||
)
|
|
||||||
|
|
||||||
def list_values(self, options, args):
|
|
||||||
self._get_n_args(args, "list", n=0)
|
|
||||||
|
|
||||||
for key, value in sorted(self.configuration.items()):
|
|
||||||
logger.info("%s=%r", key, value)
|
|
||||||
|
|
||||||
def get_name(self, options, args):
|
|
||||||
key = self._get_n_args(args, "get [name]", n=1)
|
|
||||||
value = self.configuration.get_value(key)
|
|
||||||
|
|
||||||
logger.info("%s", value)
|
|
||||||
|
|
||||||
def set_name_value(self, options, args):
|
|
||||||
key, value = self._get_n_args(args, "set [name] [value]", n=2)
|
|
||||||
self.configuration.set_value(key, value)
|
|
||||||
|
|
||||||
self._save_configuration()
|
|
||||||
|
|
||||||
def unset_name(self, options, args):
|
|
||||||
key = self._get_n_args(args, "unset [name]", n=1)
|
|
||||||
self.configuration.unset_value(key)
|
|
||||||
|
|
||||||
self._save_configuration()
|
|
||||||
|
|
||||||
def open_in_editor(self, options, args):
|
|
||||||
editor = self._determine_editor(options)
|
|
||||||
|
|
||||||
fname = self.configuration.get_file_to_edit()
|
|
||||||
if fname is None:
|
|
||||||
raise PipError("Could not determine appropriate file.")
|
|
||||||
|
|
||||||
try:
|
|
||||||
subprocess.check_call([editor, fname])
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
raise PipError(
|
|
||||||
"Editor Subprocess exited with exit code {}"
|
|
||||||
.format(e.returncode)
|
|
||||||
)
|
|
||||||
|
|
||||||
def _get_n_args(self, args, example, n):
|
|
||||||
"""Helper to make sure the command got the right number of arguments
|
|
||||||
"""
|
|
||||||
if len(args) != n:
|
|
||||||
msg = (
|
|
||||||
'Got unexpected number of arguments, expected {}. '
|
|
||||||
'(example: "{} config {}")'
|
|
||||||
).format(n, get_prog(), example)
|
|
||||||
raise PipError(msg)
|
|
||||||
|
|
||||||
if n == 1:
|
|
||||||
return args[0]
|
|
||||||
else:
|
|
||||||
return args
|
|
||||||
|
|
||||||
def _save_configuration(self):
|
|
||||||
# We successfully ran a modifying command. Need to save the
|
|
||||||
# configuration.
|
|
||||||
try:
|
|
||||||
self.configuration.save()
|
|
||||||
except Exception:
|
|
||||||
logger.error(
|
|
||||||
"Unable to save configuration. Please report this as a bug.",
|
|
||||||
exc_info=1
|
|
||||||
)
|
|
||||||
raise PipError("Internal Error.")
|
|
||||||
|
|
||||||
def _determine_editor(self, options):
|
|
||||||
if options.editor is not None:
|
|
||||||
return options.editor
|
|
||||||
elif "VISUAL" in os.environ:
|
|
||||||
return os.environ["VISUAL"]
|
|
||||||
elif "EDITOR" in os.environ:
|
|
||||||
return os.environ["EDITOR"]
|
|
||||||
else:
|
|
||||||
raise PipError("Could not determine editor to use.")
|
|
@ -1,176 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
from pip._internal.cli import cmdoptions
|
|
||||||
from pip._internal.cli.base_command import RequirementCommand
|
|
||||||
from pip._internal.operations.prepare import RequirementPreparer
|
|
||||||
from pip._internal.req import RequirementSet
|
|
||||||
from pip._internal.req.req_tracker import RequirementTracker
|
|
||||||
from pip._internal.resolve import Resolver
|
|
||||||
from pip._internal.utils.filesystem import check_path_owner
|
|
||||||
from pip._internal.utils.misc import ensure_dir, normalize_path
|
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadCommand(RequirementCommand):
|
|
||||||
"""
|
|
||||||
Download packages from:
|
|
||||||
|
|
||||||
- PyPI (and other indexes) using requirement specifiers.
|
|
||||||
- VCS project urls.
|
|
||||||
- Local project directories.
|
|
||||||
- Local or remote source archives.
|
|
||||||
|
|
||||||
pip also supports downloading from "requirements files", which provide
|
|
||||||
an easy way to specify a whole environment to be downloaded.
|
|
||||||
"""
|
|
||||||
name = 'download'
|
|
||||||
|
|
||||||
usage = """
|
|
||||||
%prog [options] <requirement specifier> [package-index-options] ...
|
|
||||||
%prog [options] -r <requirements file> [package-index-options] ...
|
|
||||||
%prog [options] <vcs project url> ...
|
|
||||||
%prog [options] <local project path> ...
|
|
||||||
%prog [options] <archive url/path> ..."""
|
|
||||||
|
|
||||||
summary = 'Download packages.'
|
|
||||||
|
|
||||||
def __init__(self, *args, **kw):
|
|
||||||
super(DownloadCommand, self).__init__(*args, **kw)
|
|
||||||
|
|
||||||
cmd_opts = self.cmd_opts
|
|
||||||
|
|
||||||
cmd_opts.add_option(cmdoptions.constraints())
|
|
||||||
cmd_opts.add_option(cmdoptions.requirements())
|
|
||||||
cmd_opts.add_option(cmdoptions.build_dir())
|
|
||||||
cmd_opts.add_option(cmdoptions.no_deps())
|
|
||||||
cmd_opts.add_option(cmdoptions.global_options())
|
|
||||||
cmd_opts.add_option(cmdoptions.no_binary())
|
|
||||||
cmd_opts.add_option(cmdoptions.only_binary())
|
|
||||||
cmd_opts.add_option(cmdoptions.prefer_binary())
|
|
||||||
cmd_opts.add_option(cmdoptions.src())
|
|
||||||
cmd_opts.add_option(cmdoptions.pre())
|
|
||||||
cmd_opts.add_option(cmdoptions.no_clean())
|
|
||||||
cmd_opts.add_option(cmdoptions.require_hashes())
|
|
||||||
cmd_opts.add_option(cmdoptions.progress_bar())
|
|
||||||
cmd_opts.add_option(cmdoptions.no_build_isolation())
|
|
||||||
cmd_opts.add_option(cmdoptions.use_pep517())
|
|
||||||
cmd_opts.add_option(cmdoptions.no_use_pep517())
|
|
||||||
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'-d', '--dest', '--destination-dir', '--destination-directory',
|
|
||||||
dest='download_dir',
|
|
||||||
metavar='dir',
|
|
||||||
default=os.curdir,
|
|
||||||
help=("Download packages into <dir>."),
|
|
||||||
)
|
|
||||||
|
|
||||||
cmd_opts.add_option(cmdoptions.platform())
|
|
||||||
cmd_opts.add_option(cmdoptions.python_version())
|
|
||||||
cmd_opts.add_option(cmdoptions.implementation())
|
|
||||||
cmd_opts.add_option(cmdoptions.abi())
|
|
||||||
|
|
||||||
index_opts = cmdoptions.make_option_group(
|
|
||||||
cmdoptions.index_group,
|
|
||||||
self.parser,
|
|
||||||
)
|
|
||||||
|
|
||||||
self.parser.insert_option_group(0, index_opts)
|
|
||||||
self.parser.insert_option_group(0, cmd_opts)
|
|
||||||
|
|
||||||
def run(self, options, args):
|
|
||||||
options.ignore_installed = True
|
|
||||||
# editable doesn't really make sense for `pip download`, but the bowels
|
|
||||||
# of the RequirementSet code require that property.
|
|
||||||
options.editables = []
|
|
||||||
|
|
||||||
if options.python_version:
|
|
||||||
python_versions = [options.python_version]
|
|
||||||
else:
|
|
||||||
python_versions = None
|
|
||||||
|
|
||||||
cmdoptions.check_dist_restriction(options)
|
|
||||||
|
|
||||||
options.src_dir = os.path.abspath(options.src_dir)
|
|
||||||
options.download_dir = normalize_path(options.download_dir)
|
|
||||||
|
|
||||||
ensure_dir(options.download_dir)
|
|
||||||
|
|
||||||
with self._build_session(options) as session:
|
|
||||||
finder = self._build_package_finder(
|
|
||||||
options=options,
|
|
||||||
session=session,
|
|
||||||
platform=options.platform,
|
|
||||||
python_versions=python_versions,
|
|
||||||
abi=options.abi,
|
|
||||||
implementation=options.implementation,
|
|
||||||
)
|
|
||||||
build_delete = (not (options.no_clean or options.build_dir))
|
|
||||||
if options.cache_dir and not check_path_owner(options.cache_dir):
|
|
||||||
logger.warning(
|
|
||||||
"The directory '%s' or its parent directory is not owned "
|
|
||||||
"by the current user and caching wheels has been "
|
|
||||||
"disabled. check the permissions and owner of that "
|
|
||||||
"directory. If executing pip with sudo, you may want "
|
|
||||||
"sudo's -H flag.",
|
|
||||||
options.cache_dir,
|
|
||||||
)
|
|
||||||
options.cache_dir = None
|
|
||||||
|
|
||||||
with RequirementTracker() as req_tracker, TempDirectory(
|
|
||||||
options.build_dir, delete=build_delete, kind="download"
|
|
||||||
) as directory:
|
|
||||||
|
|
||||||
requirement_set = RequirementSet(
|
|
||||||
require_hashes=options.require_hashes,
|
|
||||||
)
|
|
||||||
self.populate_requirement_set(
|
|
||||||
requirement_set,
|
|
||||||
args,
|
|
||||||
options,
|
|
||||||
finder,
|
|
||||||
session,
|
|
||||||
self.name,
|
|
||||||
None
|
|
||||||
)
|
|
||||||
|
|
||||||
preparer = RequirementPreparer(
|
|
||||||
build_dir=directory.path,
|
|
||||||
src_dir=options.src_dir,
|
|
||||||
download_dir=options.download_dir,
|
|
||||||
wheel_download_dir=None,
|
|
||||||
progress_bar=options.progress_bar,
|
|
||||||
build_isolation=options.build_isolation,
|
|
||||||
req_tracker=req_tracker,
|
|
||||||
)
|
|
||||||
|
|
||||||
resolver = Resolver(
|
|
||||||
preparer=preparer,
|
|
||||||
finder=finder,
|
|
||||||
session=session,
|
|
||||||
wheel_cache=None,
|
|
||||||
use_user_site=False,
|
|
||||||
upgrade_strategy="to-satisfy-only",
|
|
||||||
force_reinstall=False,
|
|
||||||
ignore_dependencies=options.ignore_dependencies,
|
|
||||||
ignore_requires_python=False,
|
|
||||||
ignore_installed=True,
|
|
||||||
isolated=options.isolated_mode,
|
|
||||||
)
|
|
||||||
resolver.resolve(requirement_set)
|
|
||||||
|
|
||||||
downloaded = ' '.join([
|
|
||||||
req.name for req in requirement_set.successfully_downloaded
|
|
||||||
])
|
|
||||||
if downloaded:
|
|
||||||
logger.info('Successfully downloaded %s', downloaded)
|
|
||||||
|
|
||||||
# Clean up
|
|
||||||
if not options.no_clean:
|
|
||||||
requirement_set.cleanup_files()
|
|
||||||
|
|
||||||
return requirement_set
|
|
@ -1,96 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pip._internal.cache import WheelCache
|
|
||||||
from pip._internal.cli.base_command import Command
|
|
||||||
from pip._internal.models.format_control import FormatControl
|
|
||||||
from pip._internal.operations.freeze import freeze
|
|
||||||
from pip._internal.utils.compat import stdlib_pkgs
|
|
||||||
|
|
||||||
DEV_PKGS = {'pip', 'setuptools', 'distribute', 'wheel'}
|
|
||||||
|
|
||||||
|
|
||||||
class FreezeCommand(Command):
|
|
||||||
"""
|
|
||||||
Output installed packages in requirements format.
|
|
||||||
|
|
||||||
packages are listed in a case-insensitive sorted order.
|
|
||||||
"""
|
|
||||||
name = 'freeze'
|
|
||||||
usage = """
|
|
||||||
%prog [options]"""
|
|
||||||
summary = 'Output installed packages in requirements format.'
|
|
||||||
log_streams = ("ext://sys.stderr", "ext://sys.stderr")
|
|
||||||
|
|
||||||
def __init__(self, *args, **kw):
|
|
||||||
super(FreezeCommand, self).__init__(*args, **kw)
|
|
||||||
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'-r', '--requirement',
|
|
||||||
dest='requirements',
|
|
||||||
action='append',
|
|
||||||
default=[],
|
|
||||||
metavar='file',
|
|
||||||
help="Use the order in the given requirements file and its "
|
|
||||||
"comments when generating output. This option can be "
|
|
||||||
"used multiple times.")
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'-f', '--find-links',
|
|
||||||
dest='find_links',
|
|
||||||
action='append',
|
|
||||||
default=[],
|
|
||||||
metavar='URL',
|
|
||||||
help='URL for finding packages, which will be added to the '
|
|
||||||
'output.')
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'-l', '--local',
|
|
||||||
dest='local',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help='If in a virtualenv that has global access, do not output '
|
|
||||||
'globally-installed packages.')
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'--user',
|
|
||||||
dest='user',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help='Only output packages installed in user-site.')
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'--all',
|
|
||||||
dest='freeze_all',
|
|
||||||
action='store_true',
|
|
||||||
help='Do not skip these packages in the output:'
|
|
||||||
' %s' % ', '.join(DEV_PKGS))
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'--exclude-editable',
|
|
||||||
dest='exclude_editable',
|
|
||||||
action='store_true',
|
|
||||||
help='Exclude editable package from output.')
|
|
||||||
|
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
|
||||||
|
|
||||||
def run(self, options, args):
|
|
||||||
format_control = FormatControl(set(), set())
|
|
||||||
wheel_cache = WheelCache(options.cache_dir, format_control)
|
|
||||||
skip = set(stdlib_pkgs)
|
|
||||||
if not options.freeze_all:
|
|
||||||
skip.update(DEV_PKGS)
|
|
||||||
|
|
||||||
freeze_kwargs = dict(
|
|
||||||
requirement=options.requirements,
|
|
||||||
find_links=options.find_links,
|
|
||||||
local_only=options.local,
|
|
||||||
user_only=options.user,
|
|
||||||
skip_regex=options.skip_requirements_regex,
|
|
||||||
isolated=options.isolated_mode,
|
|
||||||
wheel_cache=wheel_cache,
|
|
||||||
skip=skip,
|
|
||||||
exclude_editable=options.exclude_editable,
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
for line in freeze(**freeze_kwargs):
|
|
||||||
sys.stdout.write(line + '\n')
|
|
||||||
finally:
|
|
||||||
wheel_cache.cleanup()
|
|
@ -1,57 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import hashlib
|
|
||||||
import logging
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pip._internal.cli.base_command import Command
|
|
||||||
from pip._internal.cli.status_codes import ERROR
|
|
||||||
from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
|
|
||||||
from pip._internal.utils.misc import read_chunks
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class HashCommand(Command):
|
|
||||||
"""
|
|
||||||
Compute a hash of a local package archive.
|
|
||||||
|
|
||||||
These can be used with --hash in a requirements file to do repeatable
|
|
||||||
installs.
|
|
||||||
|
|
||||||
"""
|
|
||||||
name = 'hash'
|
|
||||||
usage = '%prog [options] <file> ...'
|
|
||||||
summary = 'Compute hashes of package archives.'
|
|
||||||
ignore_require_venv = True
|
|
||||||
|
|
||||||
def __init__(self, *args, **kw):
|
|
||||||
super(HashCommand, self).__init__(*args, **kw)
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'-a', '--algorithm',
|
|
||||||
dest='algorithm',
|
|
||||||
choices=STRONG_HASHES,
|
|
||||||
action='store',
|
|
||||||
default=FAVORITE_HASH,
|
|
||||||
help='The hash algorithm to use: one of %s' %
|
|
||||||
', '.join(STRONG_HASHES))
|
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
|
||||||
|
|
||||||
def run(self, options, args):
|
|
||||||
if not args:
|
|
||||||
self.parser.print_usage(sys.stderr)
|
|
||||||
return ERROR
|
|
||||||
|
|
||||||
algorithm = options.algorithm
|
|
||||||
for path in args:
|
|
||||||
logger.info('%s:\n--hash=%s:%s',
|
|
||||||
path, algorithm, _hash_of_file(path, algorithm))
|
|
||||||
|
|
||||||
|
|
||||||
def _hash_of_file(path, algorithm):
|
|
||||||
"""Return the hash digest of a file."""
|
|
||||||
with open(path, 'rb') as archive:
|
|
||||||
hash = hashlib.new(algorithm)
|
|
||||||
for chunk in read_chunks(archive):
|
|
||||||
hash.update(chunk)
|
|
||||||
return hash.hexdigest()
|
|
@ -1,37 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
from pip._internal.cli.base_command import Command
|
|
||||||
from pip._internal.cli.status_codes import SUCCESS
|
|
||||||
from pip._internal.exceptions import CommandError
|
|
||||||
|
|
||||||
|
|
||||||
class HelpCommand(Command):
|
|
||||||
"""Show help for commands"""
|
|
||||||
name = 'help'
|
|
||||||
usage = """
|
|
||||||
%prog <command>"""
|
|
||||||
summary = 'Show help for commands.'
|
|
||||||
ignore_require_venv = True
|
|
||||||
|
|
||||||
def run(self, options, args):
|
|
||||||
from pip._internal.commands import commands_dict, get_similar_commands
|
|
||||||
|
|
||||||
try:
|
|
||||||
# 'pip help' with no args is handled by pip.__init__.parseopt()
|
|
||||||
cmd_name = args[0] # the command we need help for
|
|
||||||
except IndexError:
|
|
||||||
return SUCCESS
|
|
||||||
|
|
||||||
if cmd_name not in commands_dict:
|
|
||||||
guess = get_similar_commands(cmd_name)
|
|
||||||
|
|
||||||
msg = ['unknown command "%s"' % cmd_name]
|
|
||||||
if guess:
|
|
||||||
msg.append('maybe you meant "%s"' % guess)
|
|
||||||
|
|
||||||
raise CommandError(' - '.join(msg))
|
|
||||||
|
|
||||||
command = commands_dict[cmd_name]()
|
|
||||||
command.parser.print_help()
|
|
||||||
|
|
||||||
return SUCCESS
|
|
@ -1,566 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import errno
|
|
||||||
import logging
|
|
||||||
import operator
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
from optparse import SUPPRESS_HELP
|
|
||||||
|
|
||||||
from pip._vendor import pkg_resources
|
|
||||||
|
|
||||||
from pip._internal.cache import WheelCache
|
|
||||||
from pip._internal.cli import cmdoptions
|
|
||||||
from pip._internal.cli.base_command import RequirementCommand
|
|
||||||
from pip._internal.cli.status_codes import ERROR
|
|
||||||
from pip._internal.exceptions import (
|
|
||||||
CommandError, InstallationError, PreviousBuildDirError,
|
|
||||||
)
|
|
||||||
from pip._internal.locations import distutils_scheme, virtualenv_no_global
|
|
||||||
from pip._internal.operations.check import check_install_conflicts
|
|
||||||
from pip._internal.operations.prepare import RequirementPreparer
|
|
||||||
from pip._internal.req import RequirementSet, install_given_reqs
|
|
||||||
from pip._internal.req.req_tracker import RequirementTracker
|
|
||||||
from pip._internal.resolve import Resolver
|
|
||||||
from pip._internal.utils.filesystem import check_path_owner
|
|
||||||
from pip._internal.utils.misc import (
|
|
||||||
ensure_dir, get_installed_version,
|
|
||||||
protect_pip_from_modification_on_windows,
|
|
||||||
)
|
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
|
||||||
from pip._internal.wheel import WheelBuilder
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class InstallCommand(RequirementCommand):
|
|
||||||
"""
|
|
||||||
Install packages from:
|
|
||||||
|
|
||||||
- PyPI (and other indexes) using requirement specifiers.
|
|
||||||
- VCS project urls.
|
|
||||||
- Local project directories.
|
|
||||||
- Local or remote source archives.
|
|
||||||
|
|
||||||
pip also supports installing from "requirements files", which provide
|
|
||||||
an easy way to specify a whole environment to be installed.
|
|
||||||
"""
|
|
||||||
name = 'install'
|
|
||||||
|
|
||||||
usage = """
|
|
||||||
%prog [options] <requirement specifier> [package-index-options] ...
|
|
||||||
%prog [options] -r <requirements file> [package-index-options] ...
|
|
||||||
%prog [options] [-e] <vcs project url> ...
|
|
||||||
%prog [options] [-e] <local project path> ...
|
|
||||||
%prog [options] <archive url/path> ..."""
|
|
||||||
|
|
||||||
summary = 'Install packages.'
|
|
||||||
|
|
||||||
def __init__(self, *args, **kw):
|
|
||||||
super(InstallCommand, self).__init__(*args, **kw)
|
|
||||||
|
|
||||||
cmd_opts = self.cmd_opts
|
|
||||||
|
|
||||||
cmd_opts.add_option(cmdoptions.requirements())
|
|
||||||
cmd_opts.add_option(cmdoptions.constraints())
|
|
||||||
cmd_opts.add_option(cmdoptions.no_deps())
|
|
||||||
cmd_opts.add_option(cmdoptions.pre())
|
|
||||||
|
|
||||||
cmd_opts.add_option(cmdoptions.editable())
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'-t', '--target',
|
|
||||||
dest='target_dir',
|
|
||||||
metavar='dir',
|
|
||||||
default=None,
|
|
||||||
help='Install packages into <dir>. '
|
|
||||||
'By default this will not replace existing files/folders in '
|
|
||||||
'<dir>. Use --upgrade to replace existing packages in <dir> '
|
|
||||||
'with new versions.'
|
|
||||||
)
|
|
||||||
cmd_opts.add_option(cmdoptions.platform())
|
|
||||||
cmd_opts.add_option(cmdoptions.python_version())
|
|
||||||
cmd_opts.add_option(cmdoptions.implementation())
|
|
||||||
cmd_opts.add_option(cmdoptions.abi())
|
|
||||||
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'--user',
|
|
||||||
dest='use_user_site',
|
|
||||||
action='store_true',
|
|
||||||
help="Install to the Python user install directory for your "
|
|
||||||
"platform. Typically ~/.local/, or %APPDATA%\\Python on "
|
|
||||||
"Windows. (See the Python documentation for site.USER_BASE "
|
|
||||||
"for full details.)")
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'--no-user',
|
|
||||||
dest='use_user_site',
|
|
||||||
action='store_false',
|
|
||||||
help=SUPPRESS_HELP)
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'--root',
|
|
||||||
dest='root_path',
|
|
||||||
metavar='dir',
|
|
||||||
default=None,
|
|
||||||
help="Install everything relative to this alternate root "
|
|
||||||
"directory.")
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'--prefix',
|
|
||||||
dest='prefix_path',
|
|
||||||
metavar='dir',
|
|
||||||
default=None,
|
|
||||||
help="Installation prefix where lib, bin and other top-level "
|
|
||||||
"folders are placed")
|
|
||||||
|
|
||||||
cmd_opts.add_option(cmdoptions.build_dir())
|
|
||||||
|
|
||||||
cmd_opts.add_option(cmdoptions.src())
|
|
||||||
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'-U', '--upgrade',
|
|
||||||
dest='upgrade',
|
|
||||||
action='store_true',
|
|
||||||
help='Upgrade all specified packages to the newest available '
|
|
||||||
'version. The handling of dependencies depends on the '
|
|
||||||
'upgrade-strategy used.'
|
|
||||||
)
|
|
||||||
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'--upgrade-strategy',
|
|
||||||
dest='upgrade_strategy',
|
|
||||||
default='only-if-needed',
|
|
||||||
choices=['only-if-needed', 'eager'],
|
|
||||||
help='Determines how dependency upgrading should be handled '
|
|
||||||
'[default: %default]. '
|
|
||||||
'"eager" - dependencies are upgraded regardless of '
|
|
||||||
'whether the currently installed version satisfies the '
|
|
||||||
'requirements of the upgraded package(s). '
|
|
||||||
'"only-if-needed" - are upgraded only when they do not '
|
|
||||||
'satisfy the requirements of the upgraded package(s).'
|
|
||||||
)
|
|
||||||
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'--force-reinstall',
|
|
||||||
dest='force_reinstall',
|
|
||||||
action='store_true',
|
|
||||||
help='Reinstall all packages even if they are already '
|
|
||||||
'up-to-date.')
|
|
||||||
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'-I', '--ignore-installed',
|
|
||||||
dest='ignore_installed',
|
|
||||||
action='store_true',
|
|
||||||
help='Ignore the installed packages (reinstalling instead).')
|
|
||||||
|
|
||||||
cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
|
||||||
cmd_opts.add_option(cmdoptions.no_build_isolation())
|
|
||||||
cmd_opts.add_option(cmdoptions.use_pep517())
|
|
||||||
cmd_opts.add_option(cmdoptions.no_use_pep517())
|
|
||||||
|
|
||||||
cmd_opts.add_option(cmdoptions.install_options())
|
|
||||||
cmd_opts.add_option(cmdoptions.global_options())
|
|
||||||
|
|
||||||
cmd_opts.add_option(
|
|
||||||
"--compile",
|
|
||||||
action="store_true",
|
|
||||||
dest="compile",
|
|
||||||
default=True,
|
|
||||||
help="Compile Python source files to bytecode",
|
|
||||||
)
|
|
||||||
|
|
||||||
cmd_opts.add_option(
|
|
||||||
"--no-compile",
|
|
||||||
action="store_false",
|
|
||||||
dest="compile",
|
|
||||||
help="Do not compile Python source files to bytecode",
|
|
||||||
)
|
|
||||||
|
|
||||||
cmd_opts.add_option(
|
|
||||||
"--no-warn-script-location",
|
|
||||||
action="store_false",
|
|
||||||
dest="warn_script_location",
|
|
||||||
default=True,
|
|
||||||
help="Do not warn when installing scripts outside PATH",
|
|
||||||
)
|
|
||||||
cmd_opts.add_option(
|
|
||||||
"--no-warn-conflicts",
|
|
||||||
action="store_false",
|
|
||||||
dest="warn_about_conflicts",
|
|
||||||
default=True,
|
|
||||||
help="Do not warn about broken dependencies",
|
|
||||||
)
|
|
||||||
|
|
||||||
cmd_opts.add_option(cmdoptions.no_binary())
|
|
||||||
cmd_opts.add_option(cmdoptions.only_binary())
|
|
||||||
cmd_opts.add_option(cmdoptions.prefer_binary())
|
|
||||||
cmd_opts.add_option(cmdoptions.no_clean())
|
|
||||||
cmd_opts.add_option(cmdoptions.require_hashes())
|
|
||||||
cmd_opts.add_option(cmdoptions.progress_bar())
|
|
||||||
|
|
||||||
index_opts = cmdoptions.make_option_group(
|
|
||||||
cmdoptions.index_group,
|
|
||||||
self.parser,
|
|
||||||
)
|
|
||||||
|
|
||||||
self.parser.insert_option_group(0, index_opts)
|
|
||||||
self.parser.insert_option_group(0, cmd_opts)
|
|
||||||
|
|
||||||
def run(self, options, args):
|
|
||||||
cmdoptions.check_install_build_global(options)
|
|
||||||
upgrade_strategy = "to-satisfy-only"
|
|
||||||
if options.upgrade:
|
|
||||||
upgrade_strategy = options.upgrade_strategy
|
|
||||||
|
|
||||||
if options.build_dir:
|
|
||||||
options.build_dir = os.path.abspath(options.build_dir)
|
|
||||||
|
|
||||||
cmdoptions.check_dist_restriction(options, check_target=True)
|
|
||||||
|
|
||||||
if options.python_version:
|
|
||||||
python_versions = [options.python_version]
|
|
||||||
else:
|
|
||||||
python_versions = None
|
|
||||||
|
|
||||||
options.src_dir = os.path.abspath(options.src_dir)
|
|
||||||
install_options = options.install_options or []
|
|
||||||
if options.use_user_site:
|
|
||||||
if options.prefix_path:
|
|
||||||
raise CommandError(
|
|
||||||
"Can not combine '--user' and '--prefix' as they imply "
|
|
||||||
"different installation locations"
|
|
||||||
)
|
|
||||||
if virtualenv_no_global():
|
|
||||||
raise InstallationError(
|
|
||||||
"Can not perform a '--user' install. User site-packages "
|
|
||||||
"are not visible in this virtualenv."
|
|
||||||
)
|
|
||||||
install_options.append('--user')
|
|
||||||
install_options.append('--prefix=')
|
|
||||||
|
|
||||||
target_temp_dir = TempDirectory(kind="target")
|
|
||||||
if options.target_dir:
|
|
||||||
options.ignore_installed = True
|
|
||||||
options.target_dir = os.path.abspath(options.target_dir)
|
|
||||||
if (os.path.exists(options.target_dir) and not
|
|
||||||
os.path.isdir(options.target_dir)):
|
|
||||||
raise CommandError(
|
|
||||||
"Target path exists but is not a directory, will not "
|
|
||||||
"continue."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create a target directory for using with the target option
|
|
||||||
target_temp_dir.create()
|
|
||||||
install_options.append('--home=' + target_temp_dir.path)
|
|
||||||
|
|
||||||
global_options = options.global_options or []
|
|
||||||
|
|
||||||
with self._build_session(options) as session:
|
|
||||||
finder = self._build_package_finder(
|
|
||||||
options=options,
|
|
||||||
session=session,
|
|
||||||
platform=options.platform,
|
|
||||||
python_versions=python_versions,
|
|
||||||
abi=options.abi,
|
|
||||||
implementation=options.implementation,
|
|
||||||
)
|
|
||||||
build_delete = (not (options.no_clean or options.build_dir))
|
|
||||||
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
|
||||||
|
|
||||||
if options.cache_dir and not check_path_owner(options.cache_dir):
|
|
||||||
logger.warning(
|
|
||||||
"The directory '%s' or its parent directory is not owned "
|
|
||||||
"by the current user and caching wheels has been "
|
|
||||||
"disabled. check the permissions and owner of that "
|
|
||||||
"directory. If executing pip with sudo, you may want "
|
|
||||||
"sudo's -H flag.",
|
|
||||||
options.cache_dir,
|
|
||||||
)
|
|
||||||
options.cache_dir = None
|
|
||||||
|
|
||||||
with RequirementTracker() as req_tracker, TempDirectory(
|
|
||||||
options.build_dir, delete=build_delete, kind="install"
|
|
||||||
) as directory:
|
|
||||||
requirement_set = RequirementSet(
|
|
||||||
require_hashes=options.require_hashes,
|
|
||||||
check_supported_wheels=not options.target_dir,
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.populate_requirement_set(
|
|
||||||
requirement_set, args, options, finder, session,
|
|
||||||
self.name, wheel_cache
|
|
||||||
)
|
|
||||||
preparer = RequirementPreparer(
|
|
||||||
build_dir=directory.path,
|
|
||||||
src_dir=options.src_dir,
|
|
||||||
download_dir=None,
|
|
||||||
wheel_download_dir=None,
|
|
||||||
progress_bar=options.progress_bar,
|
|
||||||
build_isolation=options.build_isolation,
|
|
||||||
req_tracker=req_tracker,
|
|
||||||
)
|
|
||||||
|
|
||||||
resolver = Resolver(
|
|
||||||
preparer=preparer,
|
|
||||||
finder=finder,
|
|
||||||
session=session,
|
|
||||||
wheel_cache=wheel_cache,
|
|
||||||
use_user_site=options.use_user_site,
|
|
||||||
upgrade_strategy=upgrade_strategy,
|
|
||||||
force_reinstall=options.force_reinstall,
|
|
||||||
ignore_dependencies=options.ignore_dependencies,
|
|
||||||
ignore_requires_python=options.ignore_requires_python,
|
|
||||||
ignore_installed=options.ignore_installed,
|
|
||||||
isolated=options.isolated_mode,
|
|
||||||
use_pep517=options.use_pep517
|
|
||||||
)
|
|
||||||
resolver.resolve(requirement_set)
|
|
||||||
|
|
||||||
protect_pip_from_modification_on_windows(
|
|
||||||
modifying_pip=requirement_set.has_requirement("pip")
|
|
||||||
)
|
|
||||||
|
|
||||||
# Consider legacy and PEP517-using requirements separately
|
|
||||||
legacy_requirements = []
|
|
||||||
pep517_requirements = []
|
|
||||||
for req in requirement_set.requirements.values():
|
|
||||||
if req.use_pep517:
|
|
||||||
pep517_requirements.append(req)
|
|
||||||
else:
|
|
||||||
legacy_requirements.append(req)
|
|
||||||
|
|
||||||
# We don't build wheels for legacy requirements if we
|
|
||||||
# don't have wheel installed or we don't have a cache dir
|
|
||||||
try:
|
|
||||||
import wheel # noqa: F401
|
|
||||||
build_legacy = bool(options.cache_dir)
|
|
||||||
except ImportError:
|
|
||||||
build_legacy = False
|
|
||||||
|
|
||||||
wb = WheelBuilder(
|
|
||||||
finder, preparer, wheel_cache,
|
|
||||||
build_options=[], global_options=[],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Always build PEP 517 requirements
|
|
||||||
build_failures = wb.build(
|
|
||||||
pep517_requirements,
|
|
||||||
session=session, autobuilding=True
|
|
||||||
)
|
|
||||||
|
|
||||||
if build_legacy:
|
|
||||||
# We don't care about failures building legacy
|
|
||||||
# requirements, as we'll fall through to a direct
|
|
||||||
# install for those.
|
|
||||||
wb.build(
|
|
||||||
legacy_requirements,
|
|
||||||
session=session, autobuilding=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# If we're using PEP 517, we cannot do a direct install
|
|
||||||
# so we fail here.
|
|
||||||
if build_failures:
|
|
||||||
raise InstallationError(
|
|
||||||
"Could not build wheels for {} which use"
|
|
||||||
" PEP 517 and cannot be installed directly".format(
|
|
||||||
", ".join(r.name for r in build_failures)))
|
|
||||||
|
|
||||||
to_install = resolver.get_installation_order(
|
|
||||||
requirement_set
|
|
||||||
)
|
|
||||||
|
|
||||||
# Consistency Checking of the package set we're installing.
|
|
||||||
should_warn_about_conflicts = (
|
|
||||||
not options.ignore_dependencies and
|
|
||||||
options.warn_about_conflicts
|
|
||||||
)
|
|
||||||
if should_warn_about_conflicts:
|
|
||||||
self._warn_about_conflicts(to_install)
|
|
||||||
|
|
||||||
# Don't warn about script install locations if
|
|
||||||
# --target has been specified
|
|
||||||
warn_script_location = options.warn_script_location
|
|
||||||
if options.target_dir:
|
|
||||||
warn_script_location = False
|
|
||||||
|
|
||||||
installed = install_given_reqs(
|
|
||||||
to_install,
|
|
||||||
install_options,
|
|
||||||
global_options,
|
|
||||||
root=options.root_path,
|
|
||||||
home=target_temp_dir.path,
|
|
||||||
prefix=options.prefix_path,
|
|
||||||
pycompile=options.compile,
|
|
||||||
warn_script_location=warn_script_location,
|
|
||||||
use_user_site=options.use_user_site,
|
|
||||||
)
|
|
||||||
|
|
||||||
lib_locations = get_lib_location_guesses(
|
|
||||||
user=options.use_user_site,
|
|
||||||
home=target_temp_dir.path,
|
|
||||||
root=options.root_path,
|
|
||||||
prefix=options.prefix_path,
|
|
||||||
isolated=options.isolated_mode,
|
|
||||||
)
|
|
||||||
working_set = pkg_resources.WorkingSet(lib_locations)
|
|
||||||
|
|
||||||
reqs = sorted(installed, key=operator.attrgetter('name'))
|
|
||||||
items = []
|
|
||||||
for req in reqs:
|
|
||||||
item = req.name
|
|
||||||
try:
|
|
||||||
installed_version = get_installed_version(
|
|
||||||
req.name, working_set=working_set
|
|
||||||
)
|
|
||||||
if installed_version:
|
|
||||||
item += '-' + installed_version
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
items.append(item)
|
|
||||||
installed = ' '.join(items)
|
|
||||||
if installed:
|
|
||||||
logger.info('Successfully installed %s', installed)
|
|
||||||
except EnvironmentError as error:
|
|
||||||
show_traceback = (self.verbosity >= 1)
|
|
||||||
|
|
||||||
message = create_env_error_message(
|
|
||||||
error, show_traceback, options.use_user_site,
|
|
||||||
)
|
|
||||||
logger.error(message, exc_info=show_traceback)
|
|
||||||
|
|
||||||
return ERROR
|
|
||||||
except PreviousBuildDirError:
|
|
||||||
options.no_clean = True
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
# Clean up
|
|
||||||
if not options.no_clean:
|
|
||||||
requirement_set.cleanup_files()
|
|
||||||
wheel_cache.cleanup()
|
|
||||||
|
|
||||||
if options.target_dir:
|
|
||||||
self._handle_target_dir(
|
|
||||||
options.target_dir, target_temp_dir, options.upgrade
|
|
||||||
)
|
|
||||||
return requirement_set
|
|
||||||
|
|
||||||
def _handle_target_dir(self, target_dir, target_temp_dir, upgrade):
|
|
||||||
ensure_dir(target_dir)
|
|
||||||
|
|
||||||
# Checking both purelib and platlib directories for installed
|
|
||||||
# packages to be moved to target directory
|
|
||||||
lib_dir_list = []
|
|
||||||
|
|
||||||
with target_temp_dir:
|
|
||||||
# Checking both purelib and platlib directories for installed
|
|
||||||
# packages to be moved to target directory
|
|
||||||
scheme = distutils_scheme('', home=target_temp_dir.path)
|
|
||||||
purelib_dir = scheme['purelib']
|
|
||||||
platlib_dir = scheme['platlib']
|
|
||||||
data_dir = scheme['data']
|
|
||||||
|
|
||||||
if os.path.exists(purelib_dir):
|
|
||||||
lib_dir_list.append(purelib_dir)
|
|
||||||
if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
|
|
||||||
lib_dir_list.append(platlib_dir)
|
|
||||||
if os.path.exists(data_dir):
|
|
||||||
lib_dir_list.append(data_dir)
|
|
||||||
|
|
||||||
for lib_dir in lib_dir_list:
|
|
||||||
for item in os.listdir(lib_dir):
|
|
||||||
if lib_dir == data_dir:
|
|
||||||
ddir = os.path.join(data_dir, item)
|
|
||||||
if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
|
|
||||||
continue
|
|
||||||
target_item_dir = os.path.join(target_dir, item)
|
|
||||||
if os.path.exists(target_item_dir):
|
|
||||||
if not upgrade:
|
|
||||||
logger.warning(
|
|
||||||
'Target directory %s already exists. Specify '
|
|
||||||
'--upgrade to force replacement.',
|
|
||||||
target_item_dir
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
if os.path.islink(target_item_dir):
|
|
||||||
logger.warning(
|
|
||||||
'Target directory %s already exists and is '
|
|
||||||
'a link. Pip will not automatically replace '
|
|
||||||
'links, please remove if replacement is '
|
|
||||||
'desired.',
|
|
||||||
target_item_dir
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
if os.path.isdir(target_item_dir):
|
|
||||||
shutil.rmtree(target_item_dir)
|
|
||||||
else:
|
|
||||||
os.remove(target_item_dir)
|
|
||||||
|
|
||||||
shutil.move(
|
|
||||||
os.path.join(lib_dir, item),
|
|
||||||
target_item_dir
|
|
||||||
)
|
|
||||||
|
|
||||||
def _warn_about_conflicts(self, to_install):
|
|
||||||
try:
|
|
||||||
package_set, _dep_info = check_install_conflicts(to_install)
|
|
||||||
except Exception:
|
|
||||||
logger.error("Error checking for conflicts.", exc_info=True)
|
|
||||||
return
|
|
||||||
missing, conflicting = _dep_info
|
|
||||||
|
|
||||||
# NOTE: There is some duplication here from pip check
|
|
||||||
for project_name in missing:
|
|
||||||
version = package_set[project_name][0]
|
|
||||||
for dependency in missing[project_name]:
|
|
||||||
logger.critical(
|
|
||||||
"%s %s requires %s, which is not installed.",
|
|
||||||
project_name, version, dependency[1],
|
|
||||||
)
|
|
||||||
|
|
||||||
for project_name in conflicting:
|
|
||||||
version = package_set[project_name][0]
|
|
||||||
for dep_name, dep_version, req in conflicting[project_name]:
|
|
||||||
logger.critical(
|
|
||||||
"%s %s has requirement %s, but you'll have %s %s which is "
|
|
||||||
"incompatible.",
|
|
||||||
project_name, version, req, dep_name, dep_version,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_lib_location_guesses(*args, **kwargs):
|
|
||||||
scheme = distutils_scheme('', *args, **kwargs)
|
|
||||||
return [scheme['purelib'], scheme['platlib']]
|
|
||||||
|
|
||||||
|
|
||||||
def create_env_error_message(error, show_traceback, using_user_site):
|
|
||||||
"""Format an error message for an EnvironmentError
|
|
||||||
|
|
||||||
It may occur anytime during the execution of the install command.
|
|
||||||
"""
|
|
||||||
parts = []
|
|
||||||
|
|
||||||
# Mention the error if we are not going to show a traceback
|
|
||||||
parts.append("Could not install packages due to an EnvironmentError")
|
|
||||||
if not show_traceback:
|
|
||||||
parts.append(": ")
|
|
||||||
parts.append(str(error))
|
|
||||||
else:
|
|
||||||
parts.append(".")
|
|
||||||
|
|
||||||
# Spilt the error indication from a helper message (if any)
|
|
||||||
parts[-1] += "\n"
|
|
||||||
|
|
||||||
# Suggest useful actions to the user:
|
|
||||||
# (1) using user site-packages or (2) verifying the permissions
|
|
||||||
if error.errno == errno.EACCES:
|
|
||||||
user_option_part = "Consider using the `--user` option"
|
|
||||||
permissions_part = "Check the permissions"
|
|
||||||
|
|
||||||
if not using_user_site:
|
|
||||||
parts.extend([
|
|
||||||
user_option_part, " or ",
|
|
||||||
permissions_part.lower(),
|
|
||||||
])
|
|
||||||
else:
|
|
||||||
parts.append(permissions_part)
|
|
||||||
parts.append(".\n")
|
|
||||||
|
|
||||||
return "".join(parts).strip() + "\n"
|
|
@ -1,301 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from pip._vendor import six
|
|
||||||
from pip._vendor.six.moves import zip_longest
|
|
||||||
|
|
||||||
from pip._internal.cli import cmdoptions
|
|
||||||
from pip._internal.cli.base_command import Command
|
|
||||||
from pip._internal.exceptions import CommandError
|
|
||||||
from pip._internal.index import PackageFinder
|
|
||||||
from pip._internal.utils.misc import (
|
|
||||||
dist_is_editable, get_installed_distributions,
|
|
||||||
)
|
|
||||||
from pip._internal.utils.packaging import get_installer
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class ListCommand(Command):
|
|
||||||
"""
|
|
||||||
List installed packages, including editables.
|
|
||||||
|
|
||||||
Packages are listed in a case-insensitive sorted order.
|
|
||||||
"""
|
|
||||||
name = 'list'
|
|
||||||
usage = """
|
|
||||||
%prog [options]"""
|
|
||||||
summary = 'List installed packages.'
|
|
||||||
|
|
||||||
def __init__(self, *args, **kw):
|
|
||||||
super(ListCommand, self).__init__(*args, **kw)
|
|
||||||
|
|
||||||
cmd_opts = self.cmd_opts
|
|
||||||
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'-o', '--outdated',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help='List outdated packages')
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'-u', '--uptodate',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help='List uptodate packages')
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'-e', '--editable',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help='List editable projects.')
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'-l', '--local',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help=('If in a virtualenv that has global access, do not list '
|
|
||||||
'globally-installed packages.'),
|
|
||||||
)
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'--user',
|
|
||||||
dest='user',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help='Only output packages installed in user-site.')
|
|
||||||
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'--pre',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help=("Include pre-release and development versions. By default, "
|
|
||||||
"pip only finds stable versions."),
|
|
||||||
)
|
|
||||||
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'--format',
|
|
||||||
action='store',
|
|
||||||
dest='list_format',
|
|
||||||
default="columns",
|
|
||||||
choices=('columns', 'freeze', 'json'),
|
|
||||||
help="Select the output format among: columns (default), freeze, "
|
|
||||||
"or json",
|
|
||||||
)
|
|
||||||
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'--not-required',
|
|
||||||
action='store_true',
|
|
||||||
dest='not_required',
|
|
||||||
help="List packages that are not dependencies of "
|
|
||||||
"installed packages.",
|
|
||||||
)
|
|
||||||
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'--exclude-editable',
|
|
||||||
action='store_false',
|
|
||||||
dest='include_editable',
|
|
||||||
help='Exclude editable package from output.',
|
|
||||||
)
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'--include-editable',
|
|
||||||
action='store_true',
|
|
||||||
dest='include_editable',
|
|
||||||
help='Include editable package from output.',
|
|
||||||
default=True,
|
|
||||||
)
|
|
||||||
index_opts = cmdoptions.make_option_group(
|
|
||||||
cmdoptions.index_group, self.parser
|
|
||||||
)
|
|
||||||
|
|
||||||
self.parser.insert_option_group(0, index_opts)
|
|
||||||
self.parser.insert_option_group(0, cmd_opts)
|
|
||||||
|
|
||||||
def _build_package_finder(self, options, index_urls, session):
|
|
||||||
"""
|
|
||||||
Create a package finder appropriate to this list command.
|
|
||||||
"""
|
|
||||||
return PackageFinder(
|
|
||||||
find_links=options.find_links,
|
|
||||||
index_urls=index_urls,
|
|
||||||
allow_all_prereleases=options.pre,
|
|
||||||
trusted_hosts=options.trusted_hosts,
|
|
||||||
session=session,
|
|
||||||
)
|
|
||||||
|
|
||||||
def run(self, options, args):
|
|
||||||
if options.outdated and options.uptodate:
|
|
||||||
raise CommandError(
|
|
||||||
"Options --outdated and --uptodate cannot be combined.")
|
|
||||||
|
|
||||||
packages = get_installed_distributions(
|
|
||||||
local_only=options.local,
|
|
||||||
user_only=options.user,
|
|
||||||
editables_only=options.editable,
|
|
||||||
include_editables=options.include_editable,
|
|
||||||
)
|
|
||||||
|
|
||||||
# get_not_required must be called firstly in order to find and
|
|
||||||
# filter out all dependencies correctly. Otherwise a package
|
|
||||||
# can't be identified as requirement because some parent packages
|
|
||||||
# could be filtered out before.
|
|
||||||
if options.not_required:
|
|
||||||
packages = self.get_not_required(packages, options)
|
|
||||||
|
|
||||||
if options.outdated:
|
|
||||||
packages = self.get_outdated(packages, options)
|
|
||||||
elif options.uptodate:
|
|
||||||
packages = self.get_uptodate(packages, options)
|
|
||||||
|
|
||||||
self.output_package_listing(packages, options)
|
|
||||||
|
|
||||||
def get_outdated(self, packages, options):
|
|
||||||
return [
|
|
||||||
dist for dist in self.iter_packages_latest_infos(packages, options)
|
|
||||||
if dist.latest_version > dist.parsed_version
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_uptodate(self, packages, options):
|
|
||||||
return [
|
|
||||||
dist for dist in self.iter_packages_latest_infos(packages, options)
|
|
||||||
if dist.latest_version == dist.parsed_version
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_not_required(self, packages, options):
|
|
||||||
dep_keys = set()
|
|
||||||
for dist in packages:
|
|
||||||
dep_keys.update(requirement.key for requirement in dist.requires())
|
|
||||||
return {pkg for pkg in packages if pkg.key not in dep_keys}
|
|
||||||
|
|
||||||
def iter_packages_latest_infos(self, packages, options):
|
|
||||||
index_urls = [options.index_url] + options.extra_index_urls
|
|
||||||
if options.no_index:
|
|
||||||
logger.debug('Ignoring indexes: %s', ','.join(index_urls))
|
|
||||||
index_urls = []
|
|
||||||
|
|
||||||
with self._build_session(options) as session:
|
|
||||||
finder = self._build_package_finder(options, index_urls, session)
|
|
||||||
|
|
||||||
for dist in packages:
|
|
||||||
typ = 'unknown'
|
|
||||||
all_candidates = finder.find_all_candidates(dist.key)
|
|
||||||
if not options.pre:
|
|
||||||
# Remove prereleases
|
|
||||||
all_candidates = [candidate for candidate in all_candidates
|
|
||||||
if not candidate.version.is_prerelease]
|
|
||||||
|
|
||||||
if not all_candidates:
|
|
||||||
continue
|
|
||||||
best_candidate = max(all_candidates,
|
|
||||||
key=finder._candidate_sort_key)
|
|
||||||
remote_version = best_candidate.version
|
|
||||||
if best_candidate.location.is_wheel:
|
|
||||||
typ = 'wheel'
|
|
||||||
else:
|
|
||||||
typ = 'sdist'
|
|
||||||
# This is dirty but makes the rest of the code much cleaner
|
|
||||||
dist.latest_version = remote_version
|
|
||||||
dist.latest_filetype = typ
|
|
||||||
yield dist
|
|
||||||
|
|
||||||
def output_package_listing(self, packages, options):
|
|
||||||
packages = sorted(
|
|
||||||
packages,
|
|
||||||
key=lambda dist: dist.project_name.lower(),
|
|
||||||
)
|
|
||||||
if options.list_format == 'columns' and packages:
|
|
||||||
data, header = format_for_columns(packages, options)
|
|
||||||
self.output_package_listing_columns(data, header)
|
|
||||||
elif options.list_format == 'freeze':
|
|
||||||
for dist in packages:
|
|
||||||
if options.verbose >= 1:
|
|
||||||
logger.info("%s==%s (%s)", dist.project_name,
|
|
||||||
dist.version, dist.location)
|
|
||||||
else:
|
|
||||||
logger.info("%s==%s", dist.project_name, dist.version)
|
|
||||||
elif options.list_format == 'json':
|
|
||||||
logger.info(format_for_json(packages, options))
|
|
||||||
|
|
||||||
def output_package_listing_columns(self, data, header):
|
|
||||||
# insert the header first: we need to know the size of column names
|
|
||||||
if len(data) > 0:
|
|
||||||
data.insert(0, header)
|
|
||||||
|
|
||||||
pkg_strings, sizes = tabulate(data)
|
|
||||||
|
|
||||||
# Create and add a separator.
|
|
||||||
if len(data) > 0:
|
|
||||||
pkg_strings.insert(1, " ".join(map(lambda x: '-' * x, sizes)))
|
|
||||||
|
|
||||||
for val in pkg_strings:
|
|
||||||
logger.info(val)
|
|
||||||
|
|
||||||
|
|
||||||
def tabulate(vals):
|
|
||||||
# From pfmoore on GitHub:
|
|
||||||
# https://github.com/pypa/pip/issues/3651#issuecomment-216932564
|
|
||||||
assert len(vals) > 0
|
|
||||||
|
|
||||||
sizes = [0] * max(len(x) for x in vals)
|
|
||||||
for row in vals:
|
|
||||||
sizes = [max(s, len(str(c))) for s, c in zip_longest(sizes, row)]
|
|
||||||
|
|
||||||
result = []
|
|
||||||
for row in vals:
|
|
||||||
display = " ".join([str(c).ljust(s) if c is not None else ''
|
|
||||||
for s, c in zip_longest(sizes, row)])
|
|
||||||
result.append(display)
|
|
||||||
|
|
||||||
return result, sizes
|
|
||||||
|
|
||||||
|
|
||||||
def format_for_columns(pkgs, options):
|
|
||||||
"""
|
|
||||||
Convert the package data into something usable
|
|
||||||
by output_package_listing_columns.
|
|
||||||
"""
|
|
||||||
running_outdated = options.outdated
|
|
||||||
# Adjust the header for the `pip list --outdated` case.
|
|
||||||
if running_outdated:
|
|
||||||
header = ["Package", "Version", "Latest", "Type"]
|
|
||||||
else:
|
|
||||||
header = ["Package", "Version"]
|
|
||||||
|
|
||||||
data = []
|
|
||||||
if options.verbose >= 1 or any(dist_is_editable(x) for x in pkgs):
|
|
||||||
header.append("Location")
|
|
||||||
if options.verbose >= 1:
|
|
||||||
header.append("Installer")
|
|
||||||
|
|
||||||
for proj in pkgs:
|
|
||||||
# if we're working on the 'outdated' list, separate out the
|
|
||||||
# latest_version and type
|
|
||||||
row = [proj.project_name, proj.version]
|
|
||||||
|
|
||||||
if running_outdated:
|
|
||||||
row.append(proj.latest_version)
|
|
||||||
row.append(proj.latest_filetype)
|
|
||||||
|
|
||||||
if options.verbose >= 1 or dist_is_editable(proj):
|
|
||||||
row.append(proj.location)
|
|
||||||
if options.verbose >= 1:
|
|
||||||
row.append(get_installer(proj))
|
|
||||||
|
|
||||||
data.append(row)
|
|
||||||
|
|
||||||
return data, header
|
|
||||||
|
|
||||||
|
|
||||||
def format_for_json(packages, options):
|
|
||||||
data = []
|
|
||||||
for dist in packages:
|
|
||||||
info = {
|
|
||||||
'name': dist.project_name,
|
|
||||||
'version': six.text_type(dist.version),
|
|
||||||
}
|
|
||||||
if options.verbose >= 1:
|
|
||||||
info['location'] = dist.location
|
|
||||||
info['installer'] = get_installer(dist)
|
|
||||||
if options.outdated:
|
|
||||||
info['latest_version'] = six.text_type(dist.latest_version)
|
|
||||||
info['latest_filetype'] = dist.latest_filetype
|
|
||||||
data.append(info)
|
|
||||||
return json.dumps(data)
|
|
@ -1,135 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import sys
|
|
||||||
import textwrap
|
|
||||||
from collections import OrderedDict
|
|
||||||
|
|
||||||
from pip._vendor import pkg_resources
|
|
||||||
from pip._vendor.packaging.version import parse as parse_version
|
|
||||||
# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
|
|
||||||
# why we ignore the type on this import
|
|
||||||
from pip._vendor.six.moves import xmlrpc_client # type: ignore
|
|
||||||
|
|
||||||
from pip._internal.cli.base_command import Command
|
|
||||||
from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
|
|
||||||
from pip._internal.download import PipXmlrpcTransport
|
|
||||||
from pip._internal.exceptions import CommandError
|
|
||||||
from pip._internal.models.index import PyPI
|
|
||||||
from pip._internal.utils.compat import get_terminal_size
|
|
||||||
from pip._internal.utils.logging import indent_log
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class SearchCommand(Command):
|
|
||||||
"""Search for PyPI packages whose name or summary contains <query>."""
|
|
||||||
name = 'search'
|
|
||||||
usage = """
|
|
||||||
%prog [options] <query>"""
|
|
||||||
summary = 'Search PyPI for packages.'
|
|
||||||
ignore_require_venv = True
|
|
||||||
|
|
||||||
def __init__(self, *args, **kw):
|
|
||||||
super(SearchCommand, self).__init__(*args, **kw)
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'-i', '--index',
|
|
||||||
dest='index',
|
|
||||||
metavar='URL',
|
|
||||||
default=PyPI.pypi_url,
|
|
||||||
help='Base URL of Python Package Index (default %default)')
|
|
||||||
|
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
|
||||||
|
|
||||||
def run(self, options, args):
|
|
||||||
if not args:
|
|
||||||
raise CommandError('Missing required argument (search query).')
|
|
||||||
query = args
|
|
||||||
pypi_hits = self.search(query, options)
|
|
||||||
hits = transform_hits(pypi_hits)
|
|
||||||
|
|
||||||
terminal_width = None
|
|
||||||
if sys.stdout.isatty():
|
|
||||||
terminal_width = get_terminal_size()[0]
|
|
||||||
|
|
||||||
print_results(hits, terminal_width=terminal_width)
|
|
||||||
if pypi_hits:
|
|
||||||
return SUCCESS
|
|
||||||
return NO_MATCHES_FOUND
|
|
||||||
|
|
||||||
def search(self, query, options):
|
|
||||||
index_url = options.index
|
|
||||||
with self._build_session(options) as session:
|
|
||||||
transport = PipXmlrpcTransport(index_url, session)
|
|
||||||
pypi = xmlrpc_client.ServerProxy(index_url, transport)
|
|
||||||
hits = pypi.search({'name': query, 'summary': query}, 'or')
|
|
||||||
return hits
|
|
||||||
|
|
||||||
|
|
||||||
def transform_hits(hits):
|
|
||||||
"""
|
|
||||||
The list from pypi is really a list of versions. We want a list of
|
|
||||||
packages with the list of versions stored inline. This converts the
|
|
||||||
list from pypi into one we can use.
|
|
||||||
"""
|
|
||||||
packages = OrderedDict()
|
|
||||||
for hit in hits:
|
|
||||||
name = hit['name']
|
|
||||||
summary = hit['summary']
|
|
||||||
version = hit['version']
|
|
||||||
|
|
||||||
if name not in packages.keys():
|
|
||||||
packages[name] = {
|
|
||||||
'name': name,
|
|
||||||
'summary': summary,
|
|
||||||
'versions': [version],
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
packages[name]['versions'].append(version)
|
|
||||||
|
|
||||||
# if this is the highest version, replace summary and score
|
|
||||||
if version == highest_version(packages[name]['versions']):
|
|
||||||
packages[name]['summary'] = summary
|
|
||||||
|
|
||||||
return list(packages.values())
|
|
||||||
|
|
||||||
|
|
||||||
def print_results(hits, name_column_width=None, terminal_width=None):
|
|
||||||
if not hits:
|
|
||||||
return
|
|
||||||
if name_column_width is None:
|
|
||||||
name_column_width = max([
|
|
||||||
len(hit['name']) + len(highest_version(hit.get('versions', ['-'])))
|
|
||||||
for hit in hits
|
|
||||||
]) + 4
|
|
||||||
|
|
||||||
installed_packages = [p.project_name for p in pkg_resources.working_set]
|
|
||||||
for hit in hits:
|
|
||||||
name = hit['name']
|
|
||||||
summary = hit['summary'] or ''
|
|
||||||
latest = highest_version(hit.get('versions', ['-']))
|
|
||||||
if terminal_width is not None:
|
|
||||||
target_width = terminal_width - name_column_width - 5
|
|
||||||
if target_width > 10:
|
|
||||||
# wrap and indent summary to fit terminal
|
|
||||||
summary = textwrap.wrap(summary, target_width)
|
|
||||||
summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
|
|
||||||
|
|
||||||
line = '%-*s - %s' % (name_column_width,
|
|
||||||
'%s (%s)' % (name, latest), summary)
|
|
||||||
try:
|
|
||||||
logger.info(line)
|
|
||||||
if name in installed_packages:
|
|
||||||
dist = pkg_resources.get_distribution(name)
|
|
||||||
with indent_log():
|
|
||||||
if dist.version == latest:
|
|
||||||
logger.info('INSTALLED: %s (latest)', dist.version)
|
|
||||||
else:
|
|
||||||
logger.info('INSTALLED: %s', dist.version)
|
|
||||||
logger.info('LATEST: %s', latest)
|
|
||||||
except UnicodeEncodeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def highest_version(versions):
|
|
||||||
return max(versions, key=parse_version)
|
|
@ -1,168 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
from email.parser import FeedParser # type: ignore
|
|
||||||
|
|
||||||
from pip._vendor import pkg_resources
|
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
|
||||||
|
|
||||||
from pip._internal.cli.base_command import Command
|
|
||||||
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class ShowCommand(Command):
|
|
||||||
"""
|
|
||||||
Show information about one or more installed packages.
|
|
||||||
|
|
||||||
The output is in RFC-compliant mail header format.
|
|
||||||
"""
|
|
||||||
name = 'show'
|
|
||||||
usage = """
|
|
||||||
%prog [options] <package> ..."""
|
|
||||||
summary = 'Show information about installed packages.'
|
|
||||||
ignore_require_venv = True
|
|
||||||
|
|
||||||
def __init__(self, *args, **kw):
|
|
||||||
super(ShowCommand, self).__init__(*args, **kw)
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'-f', '--files',
|
|
||||||
dest='files',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help='Show the full list of installed files for each package.')
|
|
||||||
|
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
|
||||||
|
|
||||||
def run(self, options, args):
|
|
||||||
if not args:
|
|
||||||
logger.warning('ERROR: Please provide a package name or names.')
|
|
||||||
return ERROR
|
|
||||||
query = args
|
|
||||||
|
|
||||||
results = search_packages_info(query)
|
|
||||||
if not print_results(
|
|
||||||
results, list_files=options.files, verbose=options.verbose):
|
|
||||||
return ERROR
|
|
||||||
return SUCCESS
|
|
||||||
|
|
||||||
|
|
||||||
def search_packages_info(query):
|
|
||||||
"""
|
|
||||||
Gather details from installed distributions. Print distribution name,
|
|
||||||
version, location, and installed files. Installed files requires a
|
|
||||||
pip generated 'installed-files.txt' in the distributions '.egg-info'
|
|
||||||
directory.
|
|
||||||
"""
|
|
||||||
installed = {}
|
|
||||||
for p in pkg_resources.working_set:
|
|
||||||
installed[canonicalize_name(p.project_name)] = p
|
|
||||||
|
|
||||||
query_names = [canonicalize_name(name) for name in query]
|
|
||||||
|
|
||||||
for dist in [installed[pkg] for pkg in query_names if pkg in installed]:
|
|
||||||
package = {
|
|
||||||
'name': dist.project_name,
|
|
||||||
'version': dist.version,
|
|
||||||
'location': dist.location,
|
|
||||||
'requires': [dep.project_name for dep in dist.requires()],
|
|
||||||
}
|
|
||||||
file_list = None
|
|
||||||
metadata = None
|
|
||||||
if isinstance(dist, pkg_resources.DistInfoDistribution):
|
|
||||||
# RECORDs should be part of .dist-info metadatas
|
|
||||||
if dist.has_metadata('RECORD'):
|
|
||||||
lines = dist.get_metadata_lines('RECORD')
|
|
||||||
paths = [l.split(',')[0] for l in lines]
|
|
||||||
paths = [os.path.join(dist.location, p) for p in paths]
|
|
||||||
file_list = [os.path.relpath(p, dist.location) for p in paths]
|
|
||||||
|
|
||||||
if dist.has_metadata('METADATA'):
|
|
||||||
metadata = dist.get_metadata('METADATA')
|
|
||||||
else:
|
|
||||||
# Otherwise use pip's log for .egg-info's
|
|
||||||
if dist.has_metadata('installed-files.txt'):
|
|
||||||
paths = dist.get_metadata_lines('installed-files.txt')
|
|
||||||
paths = [os.path.join(dist.egg_info, p) for p in paths]
|
|
||||||
file_list = [os.path.relpath(p, dist.location) for p in paths]
|
|
||||||
|
|
||||||
if dist.has_metadata('PKG-INFO'):
|
|
||||||
metadata = dist.get_metadata('PKG-INFO')
|
|
||||||
|
|
||||||
if dist.has_metadata('entry_points.txt'):
|
|
||||||
entry_points = dist.get_metadata_lines('entry_points.txt')
|
|
||||||
package['entry_points'] = entry_points
|
|
||||||
|
|
||||||
if dist.has_metadata('INSTALLER'):
|
|
||||||
for line in dist.get_metadata_lines('INSTALLER'):
|
|
||||||
if line.strip():
|
|
||||||
package['installer'] = line.strip()
|
|
||||||
break
|
|
||||||
|
|
||||||
# @todo: Should pkg_resources.Distribution have a
|
|
||||||
# `get_pkg_info` method?
|
|
||||||
feed_parser = FeedParser()
|
|
||||||
feed_parser.feed(metadata)
|
|
||||||
pkg_info_dict = feed_parser.close()
|
|
||||||
for key in ('metadata-version', 'summary',
|
|
||||||
'home-page', 'author', 'author-email', 'license'):
|
|
||||||
package[key] = pkg_info_dict.get(key)
|
|
||||||
|
|
||||||
# It looks like FeedParser cannot deal with repeated headers
|
|
||||||
classifiers = []
|
|
||||||
for line in metadata.splitlines():
|
|
||||||
if line.startswith('Classifier: '):
|
|
||||||
classifiers.append(line[len('Classifier: '):])
|
|
||||||
package['classifiers'] = classifiers
|
|
||||||
|
|
||||||
if file_list:
|
|
||||||
package['files'] = sorted(file_list)
|
|
||||||
yield package
|
|
||||||
|
|
||||||
|
|
||||||
def print_results(distributions, list_files=False, verbose=False):
|
|
||||||
"""
|
|
||||||
Print the informations from installed distributions found.
|
|
||||||
"""
|
|
||||||
results_printed = False
|
|
||||||
for i, dist in enumerate(distributions):
|
|
||||||
results_printed = True
|
|
||||||
if i > 0:
|
|
||||||
logger.info("---")
|
|
||||||
|
|
||||||
name = dist.get('name', '')
|
|
||||||
required_by = [
|
|
||||||
pkg.project_name for pkg in pkg_resources.working_set
|
|
||||||
if name in [required.name for required in pkg.requires()]
|
|
||||||
]
|
|
||||||
|
|
||||||
logger.info("Name: %s", name)
|
|
||||||
logger.info("Version: %s", dist.get('version', ''))
|
|
||||||
logger.info("Summary: %s", dist.get('summary', ''))
|
|
||||||
logger.info("Home-page: %s", dist.get('home-page', ''))
|
|
||||||
logger.info("Author: %s", dist.get('author', ''))
|
|
||||||
logger.info("Author-email: %s", dist.get('author-email', ''))
|
|
||||||
logger.info("License: %s", dist.get('license', ''))
|
|
||||||
logger.info("Location: %s", dist.get('location', ''))
|
|
||||||
logger.info("Requires: %s", ', '.join(dist.get('requires', [])))
|
|
||||||
logger.info("Required-by: %s", ', '.join(required_by))
|
|
||||||
|
|
||||||
if verbose:
|
|
||||||
logger.info("Metadata-Version: %s",
|
|
||||||
dist.get('metadata-version', ''))
|
|
||||||
logger.info("Installer: %s", dist.get('installer', ''))
|
|
||||||
logger.info("Classifiers:")
|
|
||||||
for classifier in dist.get('classifiers', []):
|
|
||||||
logger.info(" %s", classifier)
|
|
||||||
logger.info("Entry-points:")
|
|
||||||
for entry in dist.get('entry_points', []):
|
|
||||||
logger.info(" %s", entry.strip())
|
|
||||||
if list_files:
|
|
||||||
logger.info("Files:")
|
|
||||||
for line in dist.get('files', []):
|
|
||||||
logger.info(" %s", line.strip())
|
|
||||||
if "files" not in dist:
|
|
||||||
logger.info("Cannot locate installed-files.txt")
|
|
||||||
return results_printed
|
|
@ -1,78 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
|
||||||
|
|
||||||
from pip._internal.cli.base_command import Command
|
|
||||||
from pip._internal.exceptions import InstallationError
|
|
||||||
from pip._internal.req import parse_requirements
|
|
||||||
from pip._internal.req.constructors import install_req_from_line
|
|
||||||
from pip._internal.utils.misc import protect_pip_from_modification_on_windows
|
|
||||||
|
|
||||||
|
|
||||||
class UninstallCommand(Command):
|
|
||||||
"""
|
|
||||||
Uninstall packages.
|
|
||||||
|
|
||||||
pip is able to uninstall most installed packages. Known exceptions are:
|
|
||||||
|
|
||||||
- Pure distutils packages installed with ``python setup.py install``, which
|
|
||||||
leave behind no metadata to determine what files were installed.
|
|
||||||
- Script wrappers installed by ``python setup.py develop``.
|
|
||||||
"""
|
|
||||||
name = 'uninstall'
|
|
||||||
usage = """
|
|
||||||
%prog [options] <package> ...
|
|
||||||
%prog [options] -r <requirements file> ..."""
|
|
||||||
summary = 'Uninstall packages.'
|
|
||||||
|
|
||||||
def __init__(self, *args, **kw):
|
|
||||||
super(UninstallCommand, self).__init__(*args, **kw)
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'-r', '--requirement',
|
|
||||||
dest='requirements',
|
|
||||||
action='append',
|
|
||||||
default=[],
|
|
||||||
metavar='file',
|
|
||||||
help='Uninstall all the packages listed in the given requirements '
|
|
||||||
'file. This option can be used multiple times.',
|
|
||||||
)
|
|
||||||
self.cmd_opts.add_option(
|
|
||||||
'-y', '--yes',
|
|
||||||
dest='yes',
|
|
||||||
action='store_true',
|
|
||||||
help="Don't ask for confirmation of uninstall deletions.")
|
|
||||||
|
|
||||||
self.parser.insert_option_group(0, self.cmd_opts)
|
|
||||||
|
|
||||||
def run(self, options, args):
|
|
||||||
with self._build_session(options) as session:
|
|
||||||
reqs_to_uninstall = {}
|
|
||||||
for name in args:
|
|
||||||
req = install_req_from_line(
|
|
||||||
name, isolated=options.isolated_mode,
|
|
||||||
)
|
|
||||||
if req.name:
|
|
||||||
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
|
||||||
for filename in options.requirements:
|
|
||||||
for req in parse_requirements(
|
|
||||||
filename,
|
|
||||||
options=options,
|
|
||||||
session=session):
|
|
||||||
if req.name:
|
|
||||||
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
|
||||||
if not reqs_to_uninstall:
|
|
||||||
raise InstallationError(
|
|
||||||
'You must give at least one requirement to %(name)s (see '
|
|
||||||
'"pip help %(name)s")' % dict(name=self.name)
|
|
||||||
)
|
|
||||||
|
|
||||||
protect_pip_from_modification_on_windows(
|
|
||||||
modifying_pip="pip" in reqs_to_uninstall
|
|
||||||
)
|
|
||||||
|
|
||||||
for req in reqs_to_uninstall.values():
|
|
||||||
uninstall_pathset = req.uninstall(
|
|
||||||
auto_confirm=options.yes, verbose=self.verbosity > 0,
|
|
||||||
)
|
|
||||||
if uninstall_pathset:
|
|
||||||
uninstall_pathset.commit()
|
|
@ -1,186 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
from pip._internal.cache import WheelCache
|
|
||||||
from pip._internal.cli import cmdoptions
|
|
||||||
from pip._internal.cli.base_command import RequirementCommand
|
|
||||||
from pip._internal.exceptions import CommandError, PreviousBuildDirError
|
|
||||||
from pip._internal.operations.prepare import RequirementPreparer
|
|
||||||
from pip._internal.req import RequirementSet
|
|
||||||
from pip._internal.req.req_tracker import RequirementTracker
|
|
||||||
from pip._internal.resolve import Resolver
|
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
|
||||||
from pip._internal.wheel import WheelBuilder
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class WheelCommand(RequirementCommand):
|
|
||||||
"""
|
|
||||||
Build Wheel archives for your requirements and dependencies.
|
|
||||||
|
|
||||||
Wheel is a built-package format, and offers the advantage of not
|
|
||||||
recompiling your software during every install. For more details, see the
|
|
||||||
wheel docs: https://wheel.readthedocs.io/en/latest/
|
|
||||||
|
|
||||||
Requirements: setuptools>=0.8, and wheel.
|
|
||||||
|
|
||||||
'pip wheel' uses the bdist_wheel setuptools extension from the wheel
|
|
||||||
package to build individual wheels.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = 'wheel'
|
|
||||||
usage = """
|
|
||||||
%prog [options] <requirement specifier> ...
|
|
||||||
%prog [options] -r <requirements file> ...
|
|
||||||
%prog [options] [-e] <vcs project url> ...
|
|
||||||
%prog [options] [-e] <local project path> ...
|
|
||||||
%prog [options] <archive url/path> ..."""
|
|
||||||
|
|
||||||
summary = 'Build wheels from your requirements.'
|
|
||||||
|
|
||||||
def __init__(self, *args, **kw):
|
|
||||||
super(WheelCommand, self).__init__(*args, **kw)
|
|
||||||
|
|
||||||
cmd_opts = self.cmd_opts
|
|
||||||
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'-w', '--wheel-dir',
|
|
||||||
dest='wheel_dir',
|
|
||||||
metavar='dir',
|
|
||||||
default=os.curdir,
|
|
||||||
help=("Build wheels into <dir>, where the default is the "
|
|
||||||
"current working directory."),
|
|
||||||
)
|
|
||||||
cmd_opts.add_option(cmdoptions.no_binary())
|
|
||||||
cmd_opts.add_option(cmdoptions.only_binary())
|
|
||||||
cmd_opts.add_option(cmdoptions.prefer_binary())
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'--build-option',
|
|
||||||
dest='build_options',
|
|
||||||
metavar='options',
|
|
||||||
action='append',
|
|
||||||
help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
|
|
||||||
)
|
|
||||||
cmd_opts.add_option(cmdoptions.no_build_isolation())
|
|
||||||
cmd_opts.add_option(cmdoptions.use_pep517())
|
|
||||||
cmd_opts.add_option(cmdoptions.no_use_pep517())
|
|
||||||
cmd_opts.add_option(cmdoptions.constraints())
|
|
||||||
cmd_opts.add_option(cmdoptions.editable())
|
|
||||||
cmd_opts.add_option(cmdoptions.requirements())
|
|
||||||
cmd_opts.add_option(cmdoptions.src())
|
|
||||||
cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
|
||||||
cmd_opts.add_option(cmdoptions.no_deps())
|
|
||||||
cmd_opts.add_option(cmdoptions.build_dir())
|
|
||||||
cmd_opts.add_option(cmdoptions.progress_bar())
|
|
||||||
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'--global-option',
|
|
||||||
dest='global_options',
|
|
||||||
action='append',
|
|
||||||
metavar='options',
|
|
||||||
help="Extra global options to be supplied to the setup.py "
|
|
||||||
"call before the 'bdist_wheel' command.")
|
|
||||||
|
|
||||||
cmd_opts.add_option(
|
|
||||||
'--pre',
|
|
||||||
action='store_true',
|
|
||||||
default=False,
|
|
||||||
help=("Include pre-release and development versions. By default, "
|
|
||||||
"pip only finds stable versions."),
|
|
||||||
)
|
|
||||||
|
|
||||||
cmd_opts.add_option(cmdoptions.no_clean())
|
|
||||||
cmd_opts.add_option(cmdoptions.require_hashes())
|
|
||||||
|
|
||||||
index_opts = cmdoptions.make_option_group(
|
|
||||||
cmdoptions.index_group,
|
|
||||||
self.parser,
|
|
||||||
)
|
|
||||||
|
|
||||||
self.parser.insert_option_group(0, index_opts)
|
|
||||||
self.parser.insert_option_group(0, cmd_opts)
|
|
||||||
|
|
||||||
def run(self, options, args):
|
|
||||||
cmdoptions.check_install_build_global(options)
|
|
||||||
|
|
||||||
index_urls = [options.index_url] + options.extra_index_urls
|
|
||||||
if options.no_index:
|
|
||||||
logger.debug('Ignoring indexes: %s', ','.join(index_urls))
|
|
||||||
index_urls = []
|
|
||||||
|
|
||||||
if options.build_dir:
|
|
||||||
options.build_dir = os.path.abspath(options.build_dir)
|
|
||||||
|
|
||||||
options.src_dir = os.path.abspath(options.src_dir)
|
|
||||||
|
|
||||||
with self._build_session(options) as session:
|
|
||||||
finder = self._build_package_finder(options, session)
|
|
||||||
build_delete = (not (options.no_clean or options.build_dir))
|
|
||||||
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
|
||||||
|
|
||||||
with RequirementTracker() as req_tracker, TempDirectory(
|
|
||||||
options.build_dir, delete=build_delete, kind="wheel"
|
|
||||||
) as directory:
|
|
||||||
|
|
||||||
requirement_set = RequirementSet(
|
|
||||||
require_hashes=options.require_hashes,
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.populate_requirement_set(
|
|
||||||
requirement_set, args, options, finder, session,
|
|
||||||
self.name, wheel_cache
|
|
||||||
)
|
|
||||||
|
|
||||||
preparer = RequirementPreparer(
|
|
||||||
build_dir=directory.path,
|
|
||||||
src_dir=options.src_dir,
|
|
||||||
download_dir=None,
|
|
||||||
wheel_download_dir=options.wheel_dir,
|
|
||||||
progress_bar=options.progress_bar,
|
|
||||||
build_isolation=options.build_isolation,
|
|
||||||
req_tracker=req_tracker,
|
|
||||||
)
|
|
||||||
|
|
||||||
resolver = Resolver(
|
|
||||||
preparer=preparer,
|
|
||||||
finder=finder,
|
|
||||||
session=session,
|
|
||||||
wheel_cache=wheel_cache,
|
|
||||||
use_user_site=False,
|
|
||||||
upgrade_strategy="to-satisfy-only",
|
|
||||||
force_reinstall=False,
|
|
||||||
ignore_dependencies=options.ignore_dependencies,
|
|
||||||
ignore_requires_python=options.ignore_requires_python,
|
|
||||||
ignore_installed=True,
|
|
||||||
isolated=options.isolated_mode,
|
|
||||||
use_pep517=options.use_pep517
|
|
||||||
)
|
|
||||||
resolver.resolve(requirement_set)
|
|
||||||
|
|
||||||
# build wheels
|
|
||||||
wb = WheelBuilder(
|
|
||||||
finder, preparer, wheel_cache,
|
|
||||||
build_options=options.build_options or [],
|
|
||||||
global_options=options.global_options or [],
|
|
||||||
no_clean=options.no_clean,
|
|
||||||
)
|
|
||||||
build_failures = wb.build(
|
|
||||||
requirement_set.requirements.values(), session=session,
|
|
||||||
)
|
|
||||||
if len(build_failures) != 0:
|
|
||||||
raise CommandError(
|
|
||||||
"Failed to build one or more wheels"
|
|
||||||
)
|
|
||||||
except PreviousBuildDirError:
|
|
||||||
options.no_clean = True
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
if not options.no_clean:
|
|
||||||
requirement_set.cleanup_files()
|
|
||||||
wheel_cache.cleanup()
|
|
@ -1,387 +0,0 @@
|
|||||||
"""Configuration management setup
|
|
||||||
|
|
||||||
Some terminology:
|
|
||||||
- name
|
|
||||||
As written in config files.
|
|
||||||
- value
|
|
||||||
Value associated with a name
|
|
||||||
- key
|
|
||||||
Name combined with it's section (section.name)
|
|
||||||
- variant
|
|
||||||
A single word describing where the configuration key-value pair came from
|
|
||||||
"""
|
|
||||||
|
|
||||||
import locale
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
from pip._vendor import six
|
|
||||||
from pip._vendor.six.moves import configparser
|
|
||||||
|
|
||||||
from pip._internal.exceptions import (
|
|
||||||
ConfigurationError, ConfigurationFileCouldNotBeLoaded,
|
|
||||||
)
|
|
||||||
from pip._internal.locations import (
|
|
||||||
legacy_config_file, new_config_file, running_under_virtualenv,
|
|
||||||
site_config_files, venv_config_file,
|
|
||||||
)
|
|
||||||
from pip._internal.utils.misc import ensure_dir, enum
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import ( # noqa: F401
|
|
||||||
Any, Dict, Iterable, List, NewType, Optional, Tuple
|
|
||||||
)
|
|
||||||
|
|
||||||
RawConfigParser = configparser.RawConfigParser # Shorthand
|
|
||||||
Kind = NewType("Kind", str)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
# NOTE: Maybe use the optionx attribute to normalize keynames.
|
|
||||||
def _normalize_name(name):
|
|
||||||
# type: (str) -> str
|
|
||||||
"""Make a name consistent regardless of source (environment or file)
|
|
||||||
"""
|
|
||||||
name = name.lower().replace('_', '-')
|
|
||||||
if name.startswith('--'):
|
|
||||||
name = name[2:] # only prefer long opts
|
|
||||||
return name
|
|
||||||
|
|
||||||
|
|
||||||
def _disassemble_key(name):
|
|
||||||
# type: (str) -> List[str]
|
|
||||||
return name.split(".", 1)
|
|
||||||
|
|
||||||
|
|
||||||
# The kinds of configurations there are.
|
|
||||||
kinds = enum(
|
|
||||||
USER="user", # User Specific
|
|
||||||
GLOBAL="global", # System Wide
|
|
||||||
VENV="venv", # Virtual Environment Specific
|
|
||||||
ENV="env", # from PIP_CONFIG_FILE
|
|
||||||
ENV_VAR="env-var", # from Environment Variables
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Configuration(object):
|
|
||||||
"""Handles management of configuration.
|
|
||||||
|
|
||||||
Provides an interface to accessing and managing configuration files.
|
|
||||||
|
|
||||||
This class converts provides an API that takes "section.key-name" style
|
|
||||||
keys and stores the value associated with it as "key-name" under the
|
|
||||||
section "section".
|
|
||||||
|
|
||||||
This allows for a clean interface wherein the both the section and the
|
|
||||||
key-name are preserved in an easy to manage form in the configuration files
|
|
||||||
and the data stored is also nice.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, isolated, load_only=None):
|
|
||||||
# type: (bool, Kind) -> None
|
|
||||||
super(Configuration, self).__init__()
|
|
||||||
|
|
||||||
_valid_load_only = [kinds.USER, kinds.GLOBAL, kinds.VENV, None]
|
|
||||||
if load_only not in _valid_load_only:
|
|
||||||
raise ConfigurationError(
|
|
||||||
"Got invalid value for load_only - should be one of {}".format(
|
|
||||||
", ".join(map(repr, _valid_load_only[:-1]))
|
|
||||||
)
|
|
||||||
)
|
|
||||||
self.isolated = isolated # type: bool
|
|
||||||
self.load_only = load_only # type: Optional[Kind]
|
|
||||||
|
|
||||||
# The order here determines the override order.
|
|
||||||
self._override_order = [
|
|
||||||
kinds.GLOBAL, kinds.USER, kinds.VENV, kinds.ENV, kinds.ENV_VAR
|
|
||||||
]
|
|
||||||
|
|
||||||
self._ignore_env_names = ["version", "help"]
|
|
||||||
|
|
||||||
# Because we keep track of where we got the data from
|
|
||||||
self._parsers = {
|
|
||||||
variant: [] for variant in self._override_order
|
|
||||||
} # type: Dict[Kind, List[Tuple[str, RawConfigParser]]]
|
|
||||||
self._config = {
|
|
||||||
variant: {} for variant in self._override_order
|
|
||||||
} # type: Dict[Kind, Dict[str, Any]]
|
|
||||||
self._modified_parsers = [] # type: List[Tuple[str, RawConfigParser]]
|
|
||||||
|
|
||||||
def load(self):
|
|
||||||
# type: () -> None
|
|
||||||
"""Loads configuration from configuration files and environment
|
|
||||||
"""
|
|
||||||
self._load_config_files()
|
|
||||||
if not self.isolated:
|
|
||||||
self._load_environment_vars()
|
|
||||||
|
|
||||||
def get_file_to_edit(self):
|
|
||||||
# type: () -> Optional[str]
|
|
||||||
"""Returns the file with highest priority in configuration
|
|
||||||
"""
|
|
||||||
assert self.load_only is not None, \
|
|
||||||
"Need to be specified a file to be editing"
|
|
||||||
|
|
||||||
try:
|
|
||||||
return self._get_parser_to_modify()[0]
|
|
||||||
except IndexError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def items(self):
|
|
||||||
# type: () -> Iterable[Tuple[str, Any]]
|
|
||||||
"""Returns key-value pairs like dict.items() representing the loaded
|
|
||||||
configuration
|
|
||||||
"""
|
|
||||||
return self._dictionary.items()
|
|
||||||
|
|
||||||
def get_value(self, key):
|
|
||||||
# type: (str) -> Any
|
|
||||||
"""Get a value from the configuration.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return self._dictionary[key]
|
|
||||||
except KeyError:
|
|
||||||
raise ConfigurationError("No such key - {}".format(key))
|
|
||||||
|
|
||||||
def set_value(self, key, value):
|
|
||||||
# type: (str, Any) -> None
|
|
||||||
"""Modify a value in the configuration.
|
|
||||||
"""
|
|
||||||
self._ensure_have_load_only()
|
|
||||||
|
|
||||||
fname, parser = self._get_parser_to_modify()
|
|
||||||
|
|
||||||
if parser is not None:
|
|
||||||
section, name = _disassemble_key(key)
|
|
||||||
|
|
||||||
# Modify the parser and the configuration
|
|
||||||
if not parser.has_section(section):
|
|
||||||
parser.add_section(section)
|
|
||||||
parser.set(section, name, value)
|
|
||||||
|
|
||||||
self._config[self.load_only][key] = value
|
|
||||||
self._mark_as_modified(fname, parser)
|
|
||||||
|
|
||||||
def unset_value(self, key):
|
|
||||||
# type: (str) -> None
|
|
||||||
"""Unset a value in the configuration.
|
|
||||||
"""
|
|
||||||
self._ensure_have_load_only()
|
|
||||||
|
|
||||||
if key not in self._config[self.load_only]:
|
|
||||||
raise ConfigurationError("No such key - {}".format(key))
|
|
||||||
|
|
||||||
fname, parser = self._get_parser_to_modify()
|
|
||||||
|
|
||||||
if parser is not None:
|
|
||||||
section, name = _disassemble_key(key)
|
|
||||||
|
|
||||||
# Remove the key in the parser
|
|
||||||
modified_something = False
|
|
||||||
if parser.has_section(section):
|
|
||||||
# Returns whether the option was removed or not
|
|
||||||
modified_something = parser.remove_option(section, name)
|
|
||||||
|
|
||||||
if modified_something:
|
|
||||||
# name removed from parser, section may now be empty
|
|
||||||
section_iter = iter(parser.items(section))
|
|
||||||
try:
|
|
||||||
val = six.next(section_iter)
|
|
||||||
except StopIteration:
|
|
||||||
val = None
|
|
||||||
|
|
||||||
if val is None:
|
|
||||||
parser.remove_section(section)
|
|
||||||
|
|
||||||
self._mark_as_modified(fname, parser)
|
|
||||||
else:
|
|
||||||
raise ConfigurationError(
|
|
||||||
"Fatal Internal error [id=1]. Please report as a bug."
|
|
||||||
)
|
|
||||||
|
|
||||||
del self._config[self.load_only][key]
|
|
||||||
|
|
||||||
def save(self):
|
|
||||||
# type: () -> None
|
|
||||||
"""Save the currentin-memory state.
|
|
||||||
"""
|
|
||||||
self._ensure_have_load_only()
|
|
||||||
|
|
||||||
for fname, parser in self._modified_parsers:
|
|
||||||
logger.info("Writing to %s", fname)
|
|
||||||
|
|
||||||
# Ensure directory exists.
|
|
||||||
ensure_dir(os.path.dirname(fname))
|
|
||||||
|
|
||||||
with open(fname, "w") as f:
|
|
||||||
parser.write(f) # type: ignore
|
|
||||||
|
|
||||||
#
|
|
||||||
# Private routines
|
|
||||||
#
|
|
||||||
|
|
||||||
def _ensure_have_load_only(self):
|
|
||||||
# type: () -> None
|
|
||||||
if self.load_only is None:
|
|
||||||
raise ConfigurationError("Needed a specific file to be modifying.")
|
|
||||||
logger.debug("Will be working with %s variant only", self.load_only)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _dictionary(self):
|
|
||||||
# type: () -> Dict[str, Any]
|
|
||||||
"""A dictionary representing the loaded configuration.
|
|
||||||
"""
|
|
||||||
# NOTE: Dictionaries are not populated if not loaded. So, conditionals
|
|
||||||
# are not needed here.
|
|
||||||
retval = {}
|
|
||||||
|
|
||||||
for variant in self._override_order:
|
|
||||||
retval.update(self._config[variant])
|
|
||||||
|
|
||||||
return retval
|
|
||||||
|
|
||||||
def _load_config_files(self):
|
|
||||||
# type: () -> None
|
|
||||||
"""Loads configuration from configuration files
|
|
||||||
"""
|
|
||||||
config_files = dict(self._iter_config_files())
|
|
||||||
if config_files[kinds.ENV][0:1] == [os.devnull]:
|
|
||||||
logger.debug(
|
|
||||||
"Skipping loading configuration files due to "
|
|
||||||
"environment's PIP_CONFIG_FILE being os.devnull"
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
for variant, files in config_files.items():
|
|
||||||
for fname in files:
|
|
||||||
# If there's specific variant set in `load_only`, load only
|
|
||||||
# that variant, not the others.
|
|
||||||
if self.load_only is not None and variant != self.load_only:
|
|
||||||
logger.debug(
|
|
||||||
"Skipping file '%s' (variant: %s)", fname, variant
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
parser = self._load_file(variant, fname)
|
|
||||||
|
|
||||||
# Keeping track of the parsers used
|
|
||||||
self._parsers[variant].append((fname, parser))
|
|
||||||
|
|
||||||
def _load_file(self, variant, fname):
|
|
||||||
# type: (Kind, str) -> RawConfigParser
|
|
||||||
logger.debug("For variant '%s', will try loading '%s'", variant, fname)
|
|
||||||
parser = self._construct_parser(fname)
|
|
||||||
|
|
||||||
for section in parser.sections():
|
|
||||||
items = parser.items(section)
|
|
||||||
self._config[variant].update(self._normalized_keys(section, items))
|
|
||||||
|
|
||||||
return parser
|
|
||||||
|
|
||||||
def _construct_parser(self, fname):
|
|
||||||
# type: (str) -> RawConfigParser
|
|
||||||
parser = configparser.RawConfigParser()
|
|
||||||
# If there is no such file, don't bother reading it but create the
|
|
||||||
# parser anyway, to hold the data.
|
|
||||||
# Doing this is useful when modifying and saving files, where we don't
|
|
||||||
# need to construct a parser.
|
|
||||||
if os.path.exists(fname):
|
|
||||||
try:
|
|
||||||
parser.read(fname)
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
# See https://github.com/pypa/pip/issues/4963
|
|
||||||
raise ConfigurationFileCouldNotBeLoaded(
|
|
||||||
reason="contains invalid {} characters".format(
|
|
||||||
locale.getpreferredencoding(False)
|
|
||||||
),
|
|
||||||
fname=fname,
|
|
||||||
)
|
|
||||||
except configparser.Error as error:
|
|
||||||
# See https://github.com/pypa/pip/issues/4893
|
|
||||||
raise ConfigurationFileCouldNotBeLoaded(error=error)
|
|
||||||
return parser
|
|
||||||
|
|
||||||
def _load_environment_vars(self):
|
|
||||||
# type: () -> None
|
|
||||||
"""Loads configuration from environment variables
|
|
||||||
"""
|
|
||||||
self._config[kinds.ENV_VAR].update(
|
|
||||||
self._normalized_keys(":env:", self._get_environ_vars())
|
|
||||||
)
|
|
||||||
|
|
||||||
def _normalized_keys(self, section, items):
|
|
||||||
# type: (str, Iterable[Tuple[str, Any]]) -> Dict[str, Any]
|
|
||||||
"""Normalizes items to construct a dictionary with normalized keys.
|
|
||||||
|
|
||||||
This routine is where the names become keys and are made the same
|
|
||||||
regardless of source - configuration files or environment.
|
|
||||||
"""
|
|
||||||
normalized = {}
|
|
||||||
for name, val in items:
|
|
||||||
key = section + "." + _normalize_name(name)
|
|
||||||
normalized[key] = val
|
|
||||||
return normalized
|
|
||||||
|
|
||||||
def _get_environ_vars(self):
|
|
||||||
# type: () -> Iterable[Tuple[str, str]]
|
|
||||||
"""Returns a generator with all environmental vars with prefix PIP_"""
|
|
||||||
for key, val in os.environ.items():
|
|
||||||
should_be_yielded = (
|
|
||||||
key.startswith("PIP_") and
|
|
||||||
key[4:].lower() not in self._ignore_env_names
|
|
||||||
)
|
|
||||||
if should_be_yielded:
|
|
||||||
yield key[4:].lower(), val
|
|
||||||
|
|
||||||
# XXX: This is patched in the tests.
|
|
||||||
def _iter_config_files(self):
|
|
||||||
# type: () -> Iterable[Tuple[Kind, List[str]]]
|
|
||||||
"""Yields variant and configuration files associated with it.
|
|
||||||
|
|
||||||
This should be treated like items of a dictionary.
|
|
||||||
"""
|
|
||||||
# SMELL: Move the conditions out of this function
|
|
||||||
|
|
||||||
# environment variables have the lowest priority
|
|
||||||
config_file = os.environ.get('PIP_CONFIG_FILE', None)
|
|
||||||
if config_file is not None:
|
|
||||||
yield kinds.ENV, [config_file]
|
|
||||||
else:
|
|
||||||
yield kinds.ENV, []
|
|
||||||
|
|
||||||
# at the base we have any global configuration
|
|
||||||
yield kinds.GLOBAL, list(site_config_files)
|
|
||||||
|
|
||||||
# per-user configuration next
|
|
||||||
should_load_user_config = not self.isolated and not (
|
|
||||||
config_file and os.path.exists(config_file)
|
|
||||||
)
|
|
||||||
if should_load_user_config:
|
|
||||||
# The legacy config file is overridden by the new config file
|
|
||||||
yield kinds.USER, [legacy_config_file, new_config_file]
|
|
||||||
|
|
||||||
# finally virtualenv configuration first trumping others
|
|
||||||
if running_under_virtualenv():
|
|
||||||
yield kinds.VENV, [venv_config_file]
|
|
||||||
|
|
||||||
def _get_parser_to_modify(self):
|
|
||||||
# type: () -> Tuple[str, RawConfigParser]
|
|
||||||
# Determine which parser to modify
|
|
||||||
parsers = self._parsers[self.load_only]
|
|
||||||
if not parsers:
|
|
||||||
# This should not happen if everything works correctly.
|
|
||||||
raise ConfigurationError(
|
|
||||||
"Fatal Internal error [id=2]. Please report as a bug."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Use the highest priority parser.
|
|
||||||
return parsers[-1]
|
|
||||||
|
|
||||||
# XXX: This is patched in the tests.
|
|
||||||
def _mark_as_modified(self, fname, parser):
|
|
||||||
# type: (str, RawConfigParser) -> None
|
|
||||||
file_parser_tuple = (fname, parser)
|
|
||||||
if file_parser_tuple not in self._modified_parsers:
|
|
||||||
self._modified_parsers.append(file_parser_tuple)
|
|
@ -1,971 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import cgi
|
|
||||||
import email.utils
|
|
||||||
import getpass
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import mimetypes
|
|
||||||
import os
|
|
||||||
import platform
|
|
||||||
import re
|
|
||||||
import shutil
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pip._vendor import requests, six, urllib3
|
|
||||||
from pip._vendor.cachecontrol import CacheControlAdapter
|
|
||||||
from pip._vendor.cachecontrol.caches import FileCache
|
|
||||||
from pip._vendor.lockfile import LockError
|
|
||||||
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
|
|
||||||
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
|
|
||||||
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
|
||||||
from pip._vendor.requests.structures import CaseInsensitiveDict
|
|
||||||
from pip._vendor.requests.utils import get_netrc_auth
|
|
||||||
# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
|
|
||||||
# why we ignore the type on this import
|
|
||||||
from pip._vendor.six.moves import xmlrpc_client # type: ignore
|
|
||||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
|
||||||
from pip._vendor.six.moves.urllib import request as urllib_request
|
|
||||||
from pip._vendor.urllib3.util import IS_PYOPENSSL
|
|
||||||
|
|
||||||
import pip
|
|
||||||
from pip._internal.exceptions import HashMismatch, InstallationError
|
|
||||||
from pip._internal.locations import write_delete_marker_file
|
|
||||||
from pip._internal.models.index import PyPI
|
|
||||||
from pip._internal.utils.encoding import auto_decode
|
|
||||||
from pip._internal.utils.filesystem import check_path_owner
|
|
||||||
from pip._internal.utils.glibc import libc_ver
|
|
||||||
from pip._internal.utils.logging import indent_log
|
|
||||||
from pip._internal.utils.misc import (
|
|
||||||
ARCHIVE_EXTENSIONS, ask_path_exists, backup_dir, call_subprocess, consume,
|
|
||||||
display_path, format_size, get_installed_version, rmtree,
|
|
||||||
split_auth_from_netloc, splitext, unpack_file,
|
|
||||||
)
|
|
||||||
from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM
|
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
from pip._internal.utils.ui import DownloadProgressProvider
|
|
||||||
from pip._internal.vcs import vcs
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import ( # noqa: F401
|
|
||||||
Optional, Tuple, Dict, IO, Text, Union
|
|
||||||
)
|
|
||||||
from pip._internal.models.link import Link # noqa: F401
|
|
||||||
from pip._internal.utils.hashes import Hashes # noqa: F401
|
|
||||||
from pip._internal.vcs import AuthInfo # noqa: F401
|
|
||||||
|
|
||||||
try:
|
|
||||||
import ssl # noqa
|
|
||||||
except ImportError:
|
|
||||||
ssl = None
|
|
||||||
|
|
||||||
HAS_TLS = (ssl is not None) or IS_PYOPENSSL
|
|
||||||
|
|
||||||
__all__ = ['get_file_content',
|
|
||||||
'is_url', 'url_to_path', 'path_to_url',
|
|
||||||
'is_archive_file', 'unpack_vcs_link',
|
|
||||||
'unpack_file_url', 'is_vcs_url', 'is_file_url',
|
|
||||||
'unpack_http_url', 'unpack_url']
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def user_agent():
|
|
||||||
"""
|
|
||||||
Return a string representing the user agent.
|
|
||||||
"""
|
|
||||||
data = {
|
|
||||||
"installer": {"name": "pip", "version": pip.__version__},
|
|
||||||
"python": platform.python_version(),
|
|
||||||
"implementation": {
|
|
||||||
"name": platform.python_implementation(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
if data["implementation"]["name"] == 'CPython':
|
|
||||||
data["implementation"]["version"] = platform.python_version()
|
|
||||||
elif data["implementation"]["name"] == 'PyPy':
|
|
||||||
if sys.pypy_version_info.releaselevel == 'final':
|
|
||||||
pypy_version_info = sys.pypy_version_info[:3]
|
|
||||||
else:
|
|
||||||
pypy_version_info = sys.pypy_version_info
|
|
||||||
data["implementation"]["version"] = ".".join(
|
|
||||||
[str(x) for x in pypy_version_info]
|
|
||||||
)
|
|
||||||
elif data["implementation"]["name"] == 'Jython':
|
|
||||||
# Complete Guess
|
|
||||||
data["implementation"]["version"] = platform.python_version()
|
|
||||||
elif data["implementation"]["name"] == 'IronPython':
|
|
||||||
# Complete Guess
|
|
||||||
data["implementation"]["version"] = platform.python_version()
|
|
||||||
|
|
||||||
if sys.platform.startswith("linux"):
|
|
||||||
from pip._vendor import distro
|
|
||||||
distro_infos = dict(filter(
|
|
||||||
lambda x: x[1],
|
|
||||||
zip(["name", "version", "id"], distro.linux_distribution()),
|
|
||||||
))
|
|
||||||
libc = dict(filter(
|
|
||||||
lambda x: x[1],
|
|
||||||
zip(["lib", "version"], libc_ver()),
|
|
||||||
))
|
|
||||||
if libc:
|
|
||||||
distro_infos["libc"] = libc
|
|
||||||
if distro_infos:
|
|
||||||
data["distro"] = distro_infos
|
|
||||||
|
|
||||||
if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
|
|
||||||
data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
|
|
||||||
|
|
||||||
if platform.system():
|
|
||||||
data.setdefault("system", {})["name"] = platform.system()
|
|
||||||
|
|
||||||
if platform.release():
|
|
||||||
data.setdefault("system", {})["release"] = platform.release()
|
|
||||||
|
|
||||||
if platform.machine():
|
|
||||||
data["cpu"] = platform.machine()
|
|
||||||
|
|
||||||
if HAS_TLS:
|
|
||||||
data["openssl_version"] = ssl.OPENSSL_VERSION
|
|
||||||
|
|
||||||
setuptools_version = get_installed_version("setuptools")
|
|
||||||
if setuptools_version is not None:
|
|
||||||
data["setuptools_version"] = setuptools_version
|
|
||||||
|
|
||||||
return "{data[installer][name]}/{data[installer][version]} {json}".format(
|
|
||||||
data=data,
|
|
||||||
json=json.dumps(data, separators=(",", ":"), sort_keys=True),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class MultiDomainBasicAuth(AuthBase):
|
|
||||||
|
|
||||||
def __init__(self, prompting=True):
|
|
||||||
# type: (bool) -> None
|
|
||||||
self.prompting = prompting
|
|
||||||
self.passwords = {} # type: Dict[str, AuthInfo]
|
|
||||||
|
|
||||||
def __call__(self, req):
|
|
||||||
parsed = urllib_parse.urlparse(req.url)
|
|
||||||
|
|
||||||
# Split the credentials from the netloc.
|
|
||||||
netloc, url_user_password = split_auth_from_netloc(parsed.netloc)
|
|
||||||
|
|
||||||
# Set the url of the request to the url without any credentials
|
|
||||||
req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:])
|
|
||||||
|
|
||||||
# Use any stored credentials that we have for this netloc
|
|
||||||
username, password = self.passwords.get(netloc, (None, None))
|
|
||||||
|
|
||||||
# Use the credentials embedded in the url if we have none stored
|
|
||||||
if username is None:
|
|
||||||
username, password = url_user_password
|
|
||||||
|
|
||||||
# Get creds from netrc if we still don't have them
|
|
||||||
if username is None and password is None:
|
|
||||||
netrc_auth = get_netrc_auth(req.url)
|
|
||||||
username, password = netrc_auth if netrc_auth else (None, None)
|
|
||||||
|
|
||||||
if username or password:
|
|
||||||
# Store the username and password
|
|
||||||
self.passwords[netloc] = (username, password)
|
|
||||||
|
|
||||||
# Send the basic auth with this request
|
|
||||||
req = HTTPBasicAuth(username or "", password or "")(req)
|
|
||||||
|
|
||||||
# Attach a hook to handle 401 responses
|
|
||||||
req.register_hook("response", self.handle_401)
|
|
||||||
|
|
||||||
return req
|
|
||||||
|
|
||||||
def handle_401(self, resp, **kwargs):
|
|
||||||
# We only care about 401 responses, anything else we want to just
|
|
||||||
# pass through the actual response
|
|
||||||
if resp.status_code != 401:
|
|
||||||
return resp
|
|
||||||
|
|
||||||
# We are not able to prompt the user so simply return the response
|
|
||||||
if not self.prompting:
|
|
||||||
return resp
|
|
||||||
|
|
||||||
parsed = urllib_parse.urlparse(resp.url)
|
|
||||||
|
|
||||||
# Prompt the user for a new username and password
|
|
||||||
username = six.moves.input("User for %s: " % parsed.netloc)
|
|
||||||
password = getpass.getpass("Password: ")
|
|
||||||
|
|
||||||
# Store the new username and password to use for future requests
|
|
||||||
if username or password:
|
|
||||||
self.passwords[parsed.netloc] = (username, password)
|
|
||||||
|
|
||||||
# Consume content and release the original connection to allow our new
|
|
||||||
# request to reuse the same one.
|
|
||||||
resp.content
|
|
||||||
resp.raw.release_conn()
|
|
||||||
|
|
||||||
# Add our new username and password to the request
|
|
||||||
req = HTTPBasicAuth(username or "", password or "")(resp.request)
|
|
||||||
req.register_hook("response", self.warn_on_401)
|
|
||||||
|
|
||||||
# Send our new request
|
|
||||||
new_resp = resp.connection.send(req, **kwargs)
|
|
||||||
new_resp.history.append(resp)
|
|
||||||
|
|
||||||
return new_resp
|
|
||||||
|
|
||||||
def warn_on_401(self, resp, **kwargs):
|
|
||||||
# warn user that they provided incorrect credentials
|
|
||||||
if resp.status_code == 401:
|
|
||||||
logger.warning('401 Error, Credentials not correct for %s',
|
|
||||||
resp.request.url)
|
|
||||||
|
|
||||||
|
|
||||||
class LocalFSAdapter(BaseAdapter):
|
|
||||||
|
|
||||||
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
|
|
||||||
proxies=None):
|
|
||||||
pathname = url_to_path(request.url)
|
|
||||||
|
|
||||||
resp = Response()
|
|
||||||
resp.status_code = 200
|
|
||||||
resp.url = request.url
|
|
||||||
|
|
||||||
try:
|
|
||||||
stats = os.stat(pathname)
|
|
||||||
except OSError as exc:
|
|
||||||
resp.status_code = 404
|
|
||||||
resp.raw = exc
|
|
||||||
else:
|
|
||||||
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
|
|
||||||
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
|
|
||||||
resp.headers = CaseInsensitiveDict({
|
|
||||||
"Content-Type": content_type,
|
|
||||||
"Content-Length": stats.st_size,
|
|
||||||
"Last-Modified": modified,
|
|
||||||
})
|
|
||||||
|
|
||||||
resp.raw = open(pathname, "rb")
|
|
||||||
resp.close = resp.raw.close
|
|
||||||
|
|
||||||
return resp
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class SafeFileCache(FileCache):
|
|
||||||
"""
|
|
||||||
A file based cache which is safe to use even when the target directory may
|
|
||||||
not be accessible or writable.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super(SafeFileCache, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
# Check to ensure that the directory containing our cache directory
|
|
||||||
# is owned by the user current executing pip. If it does not exist
|
|
||||||
# we will check the parent directory until we find one that does exist.
|
|
||||||
# If it is not owned by the user executing pip then we will disable
|
|
||||||
# the cache and log a warning.
|
|
||||||
if not check_path_owner(self.directory):
|
|
||||||
logger.warning(
|
|
||||||
"The directory '%s' or its parent directory is not owned by "
|
|
||||||
"the current user and the cache has been disabled. Please "
|
|
||||||
"check the permissions and owner of that directory. If "
|
|
||||||
"executing pip with sudo, you may want sudo's -H flag.",
|
|
||||||
self.directory,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Set our directory to None to disable the Cache
|
|
||||||
self.directory = None
|
|
||||||
|
|
||||||
def get(self, *args, **kwargs):
|
|
||||||
# If we don't have a directory, then the cache should be a no-op.
|
|
||||||
if self.directory is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
return super(SafeFileCache, self).get(*args, **kwargs)
|
|
||||||
except (LockError, OSError, IOError):
|
|
||||||
# We intentionally silence this error, if we can't access the cache
|
|
||||||
# then we can just skip caching and process the request as if
|
|
||||||
# caching wasn't enabled.
|
|
||||||
pass
|
|
||||||
|
|
||||||
def set(self, *args, **kwargs):
|
|
||||||
# If we don't have a directory, then the cache should be a no-op.
|
|
||||||
if self.directory is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
return super(SafeFileCache, self).set(*args, **kwargs)
|
|
||||||
except (LockError, OSError, IOError):
|
|
||||||
# We intentionally silence this error, if we can't access the cache
|
|
||||||
# then we can just skip caching and process the request as if
|
|
||||||
# caching wasn't enabled.
|
|
||||||
pass
|
|
||||||
|
|
||||||
def delete(self, *args, **kwargs):
|
|
||||||
# If we don't have a directory, then the cache should be a no-op.
|
|
||||||
if self.directory is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
return super(SafeFileCache, self).delete(*args, **kwargs)
|
|
||||||
except (LockError, OSError, IOError):
|
|
||||||
# We intentionally silence this error, if we can't access the cache
|
|
||||||
# then we can just skip caching and process the request as if
|
|
||||||
# caching wasn't enabled.
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InsecureHTTPAdapter(HTTPAdapter):
|
|
||||||
|
|
||||||
def cert_verify(self, conn, url, verify, cert):
|
|
||||||
conn.cert_reqs = 'CERT_NONE'
|
|
||||||
conn.ca_certs = None
|
|
||||||
|
|
||||||
|
|
||||||
class PipSession(requests.Session):
|
|
||||||
|
|
||||||
timeout = None # type: Optional[int]
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
retries = kwargs.pop("retries", 0)
|
|
||||||
cache = kwargs.pop("cache", None)
|
|
||||||
insecure_hosts = kwargs.pop("insecure_hosts", [])
|
|
||||||
|
|
||||||
super(PipSession, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
# Attach our User Agent to the request
|
|
||||||
self.headers["User-Agent"] = user_agent()
|
|
||||||
|
|
||||||
# Attach our Authentication handler to the session
|
|
||||||
self.auth = MultiDomainBasicAuth()
|
|
||||||
|
|
||||||
# Create our urllib3.Retry instance which will allow us to customize
|
|
||||||
# how we handle retries.
|
|
||||||
retries = urllib3.Retry(
|
|
||||||
# Set the total number of retries that a particular request can
|
|
||||||
# have.
|
|
||||||
total=retries,
|
|
||||||
|
|
||||||
# A 503 error from PyPI typically means that the Fastly -> Origin
|
|
||||||
# connection got interrupted in some way. A 503 error in general
|
|
||||||
# is typically considered a transient error so we'll go ahead and
|
|
||||||
# retry it.
|
|
||||||
# A 500 may indicate transient error in Amazon S3
|
|
||||||
# A 520 or 527 - may indicate transient error in CloudFlare
|
|
||||||
status_forcelist=[500, 503, 520, 527],
|
|
||||||
|
|
||||||
# Add a small amount of back off between failed requests in
|
|
||||||
# order to prevent hammering the service.
|
|
||||||
backoff_factor=0.25,
|
|
||||||
)
|
|
||||||
|
|
||||||
# We want to _only_ cache responses on securely fetched origins. We do
|
|
||||||
# this because we can't validate the response of an insecurely fetched
|
|
||||||
# origin, and we don't want someone to be able to poison the cache and
|
|
||||||
# require manual eviction from the cache to fix it.
|
|
||||||
if cache:
|
|
||||||
secure_adapter = CacheControlAdapter(
|
|
||||||
cache=SafeFileCache(cache, use_dir_lock=True),
|
|
||||||
max_retries=retries,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
secure_adapter = HTTPAdapter(max_retries=retries)
|
|
||||||
|
|
||||||
# Our Insecure HTTPAdapter disables HTTPS validation. It does not
|
|
||||||
# support caching (see above) so we'll use it for all http:// URLs as
|
|
||||||
# well as any https:// host that we've marked as ignoring TLS errors
|
|
||||||
# for.
|
|
||||||
insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
|
|
||||||
|
|
||||||
self.mount("https://", secure_adapter)
|
|
||||||
self.mount("http://", insecure_adapter)
|
|
||||||
|
|
||||||
# Enable file:// urls
|
|
||||||
self.mount("file://", LocalFSAdapter())
|
|
||||||
|
|
||||||
# We want to use a non-validating adapter for any requests which are
|
|
||||||
# deemed insecure.
|
|
||||||
for host in insecure_hosts:
|
|
||||||
self.mount("https://{}/".format(host), insecure_adapter)
|
|
||||||
|
|
||||||
def request(self, method, url, *args, **kwargs):
|
|
||||||
# Allow setting a default timeout on a session
|
|
||||||
kwargs.setdefault("timeout", self.timeout)
|
|
||||||
|
|
||||||
# Dispatch the actual request
|
|
||||||
return super(PipSession, self).request(method, url, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def get_file_content(url, comes_from=None, session=None):
|
|
||||||
# type: (str, Optional[str], Optional[PipSession]) -> Tuple[str, Text]
|
|
||||||
"""Gets the content of a file; it may be a filename, file: URL, or
|
|
||||||
http: URL. Returns (location, content). Content is unicode.
|
|
||||||
|
|
||||||
:param url: File path or url.
|
|
||||||
:param comes_from: Origin description of requirements.
|
|
||||||
:param session: Instance of pip.download.PipSession.
|
|
||||||
"""
|
|
||||||
if session is None:
|
|
||||||
raise TypeError(
|
|
||||||
"get_file_content() missing 1 required keyword argument: 'session'"
|
|
||||||
)
|
|
||||||
|
|
||||||
match = _scheme_re.search(url)
|
|
||||||
if match:
|
|
||||||
scheme = match.group(1).lower()
|
|
||||||
if (scheme == 'file' and comes_from and
|
|
||||||
comes_from.startswith('http')):
|
|
||||||
raise InstallationError(
|
|
||||||
'Requirements file %s references URL %s, which is local'
|
|
||||||
% (comes_from, url))
|
|
||||||
if scheme == 'file':
|
|
||||||
path = url.split(':', 1)[1]
|
|
||||||
path = path.replace('\\', '/')
|
|
||||||
match = _url_slash_drive_re.match(path)
|
|
||||||
if match:
|
|
||||||
path = match.group(1) + ':' + path.split('|', 1)[1]
|
|
||||||
path = urllib_parse.unquote(path)
|
|
||||||
if path.startswith('/'):
|
|
||||||
path = '/' + path.lstrip('/')
|
|
||||||
url = path
|
|
||||||
else:
|
|
||||||
# FIXME: catch some errors
|
|
||||||
resp = session.get(url)
|
|
||||||
resp.raise_for_status()
|
|
||||||
return resp.url, resp.text
|
|
||||||
try:
|
|
||||||
with open(url, 'rb') as f:
|
|
||||||
content = auto_decode(f.read())
|
|
||||||
except IOError as exc:
|
|
||||||
raise InstallationError(
|
|
||||||
'Could not open requirements file: %s' % str(exc)
|
|
||||||
)
|
|
||||||
return url, content
|
|
||||||
|
|
||||||
|
|
||||||
_scheme_re = re.compile(r'^(http|https|file):', re.I)
|
|
||||||
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
|
|
||||||
|
|
||||||
|
|
||||||
def is_url(name):
|
|
||||||
# type: (Union[str, Text]) -> bool
|
|
||||||
"""Returns true if the name looks like a URL"""
|
|
||||||
if ':' not in name:
|
|
||||||
return False
|
|
||||||
scheme = name.split(':', 1)[0].lower()
|
|
||||||
return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
|
|
||||||
|
|
||||||
|
|
||||||
def url_to_path(url):
|
|
||||||
# type: (str) -> str
|
|
||||||
"""
|
|
||||||
Convert a file: URL to a path.
|
|
||||||
"""
|
|
||||||
assert url.startswith('file:'), (
|
|
||||||
"You can only turn file: urls into filenames (not %r)" % url)
|
|
||||||
|
|
||||||
_, netloc, path, _, _ = urllib_parse.urlsplit(url)
|
|
||||||
|
|
||||||
# if we have a UNC path, prepend UNC share notation
|
|
||||||
if netloc:
|
|
||||||
netloc = '\\\\' + netloc
|
|
||||||
|
|
||||||
path = urllib_request.url2pathname(netloc + path)
|
|
||||||
return path
|
|
||||||
|
|
||||||
|
|
||||||
def path_to_url(path):
|
|
||||||
# type: (Union[str, Text]) -> str
|
|
||||||
"""
|
|
||||||
Convert a path to a file: URL. The path will be made absolute and have
|
|
||||||
quoted path parts.
|
|
||||||
"""
|
|
||||||
path = os.path.normpath(os.path.abspath(path))
|
|
||||||
url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path))
|
|
||||||
return url
|
|
||||||
|
|
||||||
|
|
||||||
def is_archive_file(name):
|
|
||||||
# type: (str) -> bool
|
|
||||||
"""Return True if `name` is a considered as an archive file."""
|
|
||||||
ext = splitext(name)[1].lower()
|
|
||||||
if ext in ARCHIVE_EXTENSIONS:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def unpack_vcs_link(link, location):
|
|
||||||
vcs_backend = _get_used_vcs_backend(link)
|
|
||||||
vcs_backend.unpack(location)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_used_vcs_backend(link):
|
|
||||||
for backend in vcs.backends:
|
|
||||||
if link.scheme in backend.schemes:
|
|
||||||
vcs_backend = backend(link.url)
|
|
||||||
return vcs_backend
|
|
||||||
|
|
||||||
|
|
||||||
def is_vcs_url(link):
|
|
||||||
# type: (Link) -> bool
|
|
||||||
return bool(_get_used_vcs_backend(link))
|
|
||||||
|
|
||||||
|
|
||||||
def is_file_url(link):
|
|
||||||
# type: (Link) -> bool
|
|
||||||
return link.url.lower().startswith('file:')
|
|
||||||
|
|
||||||
|
|
||||||
def is_dir_url(link):
|
|
||||||
# type: (Link) -> bool
|
|
||||||
"""Return whether a file:// Link points to a directory.
|
|
||||||
|
|
||||||
``link`` must not have any other scheme but file://. Call is_file_url()
|
|
||||||
first.
|
|
||||||
|
|
||||||
"""
|
|
||||||
link_path = url_to_path(link.url_without_fragment)
|
|
||||||
return os.path.isdir(link_path)
|
|
||||||
|
|
||||||
|
|
||||||
def _progress_indicator(iterable, *args, **kwargs):
|
|
||||||
return iterable
|
|
||||||
|
|
||||||
|
|
||||||
def _download_url(
|
|
||||||
resp, # type: Response
|
|
||||||
link, # type: Link
|
|
||||||
content_file, # type: IO
|
|
||||||
hashes, # type: Hashes
|
|
||||||
progress_bar # type: str
|
|
||||||
):
|
|
||||||
# type: (...) -> None
|
|
||||||
try:
|
|
||||||
total_length = int(resp.headers['content-length'])
|
|
||||||
except (ValueError, KeyError, TypeError):
|
|
||||||
total_length = 0
|
|
||||||
|
|
||||||
cached_resp = getattr(resp, "from_cache", False)
|
|
||||||
if logger.getEffectiveLevel() > logging.INFO:
|
|
||||||
show_progress = False
|
|
||||||
elif cached_resp:
|
|
||||||
show_progress = False
|
|
||||||
elif total_length > (40 * 1000):
|
|
||||||
show_progress = True
|
|
||||||
elif not total_length:
|
|
||||||
show_progress = True
|
|
||||||
else:
|
|
||||||
show_progress = False
|
|
||||||
|
|
||||||
show_url = link.show_url
|
|
||||||
|
|
||||||
def resp_read(chunk_size):
|
|
||||||
try:
|
|
||||||
# Special case for urllib3.
|
|
||||||
for chunk in resp.raw.stream(
|
|
||||||
chunk_size,
|
|
||||||
# We use decode_content=False here because we don't
|
|
||||||
# want urllib3 to mess with the raw bytes we get
|
|
||||||
# from the server. If we decompress inside of
|
|
||||||
# urllib3 then we cannot verify the checksum
|
|
||||||
# because the checksum will be of the compressed
|
|
||||||
# file. This breakage will only occur if the
|
|
||||||
# server adds a Content-Encoding header, which
|
|
||||||
# depends on how the server was configured:
|
|
||||||
# - Some servers will notice that the file isn't a
|
|
||||||
# compressible file and will leave the file alone
|
|
||||||
# and with an empty Content-Encoding
|
|
||||||
# - Some servers will notice that the file is
|
|
||||||
# already compressed and will leave the file
|
|
||||||
# alone and will add a Content-Encoding: gzip
|
|
||||||
# header
|
|
||||||
# - Some servers won't notice anything at all and
|
|
||||||
# will take a file that's already been compressed
|
|
||||||
# and compress it again and set the
|
|
||||||
# Content-Encoding: gzip header
|
|
||||||
#
|
|
||||||
# By setting this not to decode automatically we
|
|
||||||
# hope to eliminate problems with the second case.
|
|
||||||
decode_content=False):
|
|
||||||
yield chunk
|
|
||||||
except AttributeError:
|
|
||||||
# Standard file-like object.
|
|
||||||
while True:
|
|
||||||
chunk = resp.raw.read(chunk_size)
|
|
||||||
if not chunk:
|
|
||||||
break
|
|
||||||
yield chunk
|
|
||||||
|
|
||||||
def written_chunks(chunks):
|
|
||||||
for chunk in chunks:
|
|
||||||
content_file.write(chunk)
|
|
||||||
yield chunk
|
|
||||||
|
|
||||||
progress_indicator = _progress_indicator
|
|
||||||
|
|
||||||
if link.netloc == PyPI.netloc:
|
|
||||||
url = show_url
|
|
||||||
else:
|
|
||||||
url = link.url_without_fragment
|
|
||||||
|
|
||||||
if show_progress: # We don't show progress on cached responses
|
|
||||||
progress_indicator = DownloadProgressProvider(progress_bar,
|
|
||||||
max=total_length)
|
|
||||||
if total_length:
|
|
||||||
logger.info("Downloading %s (%s)", url, format_size(total_length))
|
|
||||||
else:
|
|
||||||
logger.info("Downloading %s", url)
|
|
||||||
elif cached_resp:
|
|
||||||
logger.info("Using cached %s", url)
|
|
||||||
else:
|
|
||||||
logger.info("Downloading %s", url)
|
|
||||||
|
|
||||||
logger.debug('Downloading from URL %s', link)
|
|
||||||
|
|
||||||
downloaded_chunks = written_chunks(
|
|
||||||
progress_indicator(
|
|
||||||
resp_read(CONTENT_CHUNK_SIZE),
|
|
||||||
CONTENT_CHUNK_SIZE
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if hashes:
|
|
||||||
hashes.check_against_chunks(downloaded_chunks)
|
|
||||||
else:
|
|
||||||
consume(downloaded_chunks)
|
|
||||||
|
|
||||||
|
|
||||||
def _copy_file(filename, location, link):
|
|
||||||
copy = True
|
|
||||||
download_location = os.path.join(location, link.filename)
|
|
||||||
if os.path.exists(download_location):
|
|
||||||
response = ask_path_exists(
|
|
||||||
'The file %s exists. (i)gnore, (w)ipe, (b)ackup, (a)abort' %
|
|
||||||
display_path(download_location), ('i', 'w', 'b', 'a'))
|
|
||||||
if response == 'i':
|
|
||||||
copy = False
|
|
||||||
elif response == 'w':
|
|
||||||
logger.warning('Deleting %s', display_path(download_location))
|
|
||||||
os.remove(download_location)
|
|
||||||
elif response == 'b':
|
|
||||||
dest_file = backup_dir(download_location)
|
|
||||||
logger.warning(
|
|
||||||
'Backing up %s to %s',
|
|
||||||
display_path(download_location),
|
|
||||||
display_path(dest_file),
|
|
||||||
)
|
|
||||||
shutil.move(download_location, dest_file)
|
|
||||||
elif response == 'a':
|
|
||||||
sys.exit(-1)
|
|
||||||
if copy:
|
|
||||||
shutil.copy(filename, download_location)
|
|
||||||
logger.info('Saved %s', display_path(download_location))
|
|
||||||
|
|
||||||
|
|
||||||
def unpack_http_url(
|
|
||||||
link, # type: Link
|
|
||||||
location, # type: str
|
|
||||||
download_dir=None, # type: Optional[str]
|
|
||||||
session=None, # type: Optional[PipSession]
|
|
||||||
hashes=None, # type: Optional[Hashes]
|
|
||||||
progress_bar="on" # type: str
|
|
||||||
):
|
|
||||||
# type: (...) -> None
|
|
||||||
if session is None:
|
|
||||||
raise TypeError(
|
|
||||||
"unpack_http_url() missing 1 required keyword argument: 'session'"
|
|
||||||
)
|
|
||||||
|
|
||||||
with TempDirectory(kind="unpack") as temp_dir:
|
|
||||||
# If a download dir is specified, is the file already downloaded there?
|
|
||||||
already_downloaded_path = None
|
|
||||||
if download_dir:
|
|
||||||
already_downloaded_path = _check_download_dir(link,
|
|
||||||
download_dir,
|
|
||||||
hashes)
|
|
||||||
|
|
||||||
if already_downloaded_path:
|
|
||||||
from_path = already_downloaded_path
|
|
||||||
content_type = mimetypes.guess_type(from_path)[0]
|
|
||||||
else:
|
|
||||||
# let's download to a tmp dir
|
|
||||||
from_path, content_type = _download_http_url(link,
|
|
||||||
session,
|
|
||||||
temp_dir.path,
|
|
||||||
hashes,
|
|
||||||
progress_bar)
|
|
||||||
|
|
||||||
# unpack the archive to the build dir location. even when only
|
|
||||||
# downloading archives, they have to be unpacked to parse dependencies
|
|
||||||
unpack_file(from_path, location, content_type, link)
|
|
||||||
|
|
||||||
# a download dir is specified; let's copy the archive there
|
|
||||||
if download_dir and not already_downloaded_path:
|
|
||||||
_copy_file(from_path, download_dir, link)
|
|
||||||
|
|
||||||
if not already_downloaded_path:
|
|
||||||
os.unlink(from_path)
|
|
||||||
|
|
||||||
|
|
||||||
def unpack_file_url(
|
|
||||||
link, # type: Link
|
|
||||||
location, # type: str
|
|
||||||
download_dir=None, # type: Optional[str]
|
|
||||||
hashes=None # type: Optional[Hashes]
|
|
||||||
):
|
|
||||||
# type: (...) -> None
|
|
||||||
"""Unpack link into location.
|
|
||||||
|
|
||||||
If download_dir is provided and link points to a file, make a copy
|
|
||||||
of the link file inside download_dir.
|
|
||||||
"""
|
|
||||||
link_path = url_to_path(link.url_without_fragment)
|
|
||||||
|
|
||||||
# If it's a url to a local directory
|
|
||||||
if is_dir_url(link):
|
|
||||||
if os.path.isdir(location):
|
|
||||||
rmtree(location)
|
|
||||||
shutil.copytree(link_path, location, symlinks=True)
|
|
||||||
if download_dir:
|
|
||||||
logger.info('Link is a directory, ignoring download_dir')
|
|
||||||
return
|
|
||||||
|
|
||||||
# If --require-hashes is off, `hashes` is either empty, the
|
|
||||||
# link's embedded hash, or MissingHashes; it is required to
|
|
||||||
# match. If --require-hashes is on, we are satisfied by any
|
|
||||||
# hash in `hashes` matching: a URL-based or an option-based
|
|
||||||
# one; no internet-sourced hash will be in `hashes`.
|
|
||||||
if hashes:
|
|
||||||
hashes.check_against_path(link_path)
|
|
||||||
|
|
||||||
# If a download dir is specified, is the file already there and valid?
|
|
||||||
already_downloaded_path = None
|
|
||||||
if download_dir:
|
|
||||||
already_downloaded_path = _check_download_dir(link,
|
|
||||||
download_dir,
|
|
||||||
hashes)
|
|
||||||
|
|
||||||
if already_downloaded_path:
|
|
||||||
from_path = already_downloaded_path
|
|
||||||
else:
|
|
||||||
from_path = link_path
|
|
||||||
|
|
||||||
content_type = mimetypes.guess_type(from_path)[0]
|
|
||||||
|
|
||||||
# unpack the archive to the build dir location. even when only downloading
|
|
||||||
# archives, they have to be unpacked to parse dependencies
|
|
||||||
unpack_file(from_path, location, content_type, link)
|
|
||||||
|
|
||||||
# a download dir is specified and not already downloaded
|
|
||||||
if download_dir and not already_downloaded_path:
|
|
||||||
_copy_file(from_path, download_dir, link)
|
|
||||||
|
|
||||||
|
|
||||||
def _copy_dist_from_dir(link_path, location):
|
|
||||||
"""Copy distribution files in `link_path` to `location`.
|
|
||||||
|
|
||||||
Invoked when user requests to install a local directory. E.g.:
|
|
||||||
|
|
||||||
pip install .
|
|
||||||
pip install ~/dev/git-repos/python-prompt-toolkit
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Note: This is currently VERY SLOW if you have a lot of data in the
|
|
||||||
# directory, because it copies everything with `shutil.copytree`.
|
|
||||||
# What it should really do is build an sdist and install that.
|
|
||||||
# See https://github.com/pypa/pip/issues/2195
|
|
||||||
|
|
||||||
if os.path.isdir(location):
|
|
||||||
rmtree(location)
|
|
||||||
|
|
||||||
# build an sdist
|
|
||||||
setup_py = 'setup.py'
|
|
||||||
sdist_args = [sys.executable]
|
|
||||||
sdist_args.append('-c')
|
|
||||||
sdist_args.append(SETUPTOOLS_SHIM % setup_py)
|
|
||||||
sdist_args.append('sdist')
|
|
||||||
sdist_args += ['--dist-dir', location]
|
|
||||||
logger.info('Running setup.py sdist for %s', link_path)
|
|
||||||
|
|
||||||
with indent_log():
|
|
||||||
call_subprocess(sdist_args, cwd=link_path, show_stdout=False)
|
|
||||||
|
|
||||||
# unpack sdist into `location`
|
|
||||||
sdist = os.path.join(location, os.listdir(location)[0])
|
|
||||||
logger.info('Unpacking sdist %s into %s', sdist, location)
|
|
||||||
unpack_file(sdist, location, content_type=None, link=None)
|
|
||||||
|
|
||||||
|
|
||||||
class PipXmlrpcTransport(xmlrpc_client.Transport):
|
|
||||||
"""Provide a `xmlrpclib.Transport` implementation via a `PipSession`
|
|
||||||
object.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, index_url, session, use_datetime=False):
|
|
||||||
xmlrpc_client.Transport.__init__(self, use_datetime)
|
|
||||||
index_parts = urllib_parse.urlparse(index_url)
|
|
||||||
self._scheme = index_parts.scheme
|
|
||||||
self._session = session
|
|
||||||
|
|
||||||
def request(self, host, handler, request_body, verbose=False):
|
|
||||||
parts = (self._scheme, host, handler, None, None, None)
|
|
||||||
url = urllib_parse.urlunparse(parts)
|
|
||||||
try:
|
|
||||||
headers = {'Content-Type': 'text/xml'}
|
|
||||||
response = self._session.post(url, data=request_body,
|
|
||||||
headers=headers, stream=True)
|
|
||||||
response.raise_for_status()
|
|
||||||
self.verbose = verbose
|
|
||||||
return self.parse_response(response.raw)
|
|
||||||
except requests.HTTPError as exc:
|
|
||||||
logger.critical(
|
|
||||||
"HTTP error %s while getting %s",
|
|
||||||
exc.response.status_code, url,
|
|
||||||
)
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
def unpack_url(
|
|
||||||
link, # type: Optional[Link]
|
|
||||||
location, # type: Optional[str]
|
|
||||||
download_dir=None, # type: Optional[str]
|
|
||||||
only_download=False, # type: bool
|
|
||||||
session=None, # type: Optional[PipSession]
|
|
||||||
hashes=None, # type: Optional[Hashes]
|
|
||||||
progress_bar="on" # type: str
|
|
||||||
):
|
|
||||||
# type: (...) -> None
|
|
||||||
"""Unpack link.
|
|
||||||
If link is a VCS link:
|
|
||||||
if only_download, export into download_dir and ignore location
|
|
||||||
else unpack into location
|
|
||||||
for other types of link:
|
|
||||||
- unpack into location
|
|
||||||
- if download_dir, copy the file into download_dir
|
|
||||||
- if only_download, mark location for deletion
|
|
||||||
|
|
||||||
:param hashes: A Hashes object, one of whose embedded hashes must match,
|
|
||||||
or HashMismatch will be raised. If the Hashes is empty, no matches are
|
|
||||||
required, and unhashable types of requirements (like VCS ones, which
|
|
||||||
would ordinarily raise HashUnsupported) are allowed.
|
|
||||||
"""
|
|
||||||
# non-editable vcs urls
|
|
||||||
if is_vcs_url(link):
|
|
||||||
unpack_vcs_link(link, location)
|
|
||||||
|
|
||||||
# file urls
|
|
||||||
elif is_file_url(link):
|
|
||||||
unpack_file_url(link, location, download_dir, hashes=hashes)
|
|
||||||
|
|
||||||
# http urls
|
|
||||||
else:
|
|
||||||
if session is None:
|
|
||||||
session = PipSession()
|
|
||||||
|
|
||||||
unpack_http_url(
|
|
||||||
link,
|
|
||||||
location,
|
|
||||||
download_dir,
|
|
||||||
session,
|
|
||||||
hashes=hashes,
|
|
||||||
progress_bar=progress_bar
|
|
||||||
)
|
|
||||||
if only_download:
|
|
||||||
write_delete_marker_file(location)
|
|
||||||
|
|
||||||
|
|
||||||
def _download_http_url(
|
|
||||||
link, # type: Link
|
|
||||||
session, # type: PipSession
|
|
||||||
temp_dir, # type: str
|
|
||||||
hashes, # type: Hashes
|
|
||||||
progress_bar # type: str
|
|
||||||
):
|
|
||||||
# type: (...) -> Tuple[str, str]
|
|
||||||
"""Download link url into temp_dir using provided session"""
|
|
||||||
target_url = link.url.split('#', 1)[0]
|
|
||||||
try:
|
|
||||||
resp = session.get(
|
|
||||||
target_url,
|
|
||||||
# We use Accept-Encoding: identity here because requests
|
|
||||||
# defaults to accepting compressed responses. This breaks in
|
|
||||||
# a variety of ways depending on how the server is configured.
|
|
||||||
# - Some servers will notice that the file isn't a compressible
|
|
||||||
# file and will leave the file alone and with an empty
|
|
||||||
# Content-Encoding
|
|
||||||
# - Some servers will notice that the file is already
|
|
||||||
# compressed and will leave the file alone and will add a
|
|
||||||
# Content-Encoding: gzip header
|
|
||||||
# - Some servers won't notice anything at all and will take
|
|
||||||
# a file that's already been compressed and compress it again
|
|
||||||
# and set the Content-Encoding: gzip header
|
|
||||||
# By setting this to request only the identity encoding We're
|
|
||||||
# hoping to eliminate the third case. Hopefully there does not
|
|
||||||
# exist a server which when given a file will notice it is
|
|
||||||
# already compressed and that you're not asking for a
|
|
||||||
# compressed file and will then decompress it before sending
|
|
||||||
# because if that's the case I don't think it'll ever be
|
|
||||||
# possible to make this work.
|
|
||||||
headers={"Accept-Encoding": "identity"},
|
|
||||||
stream=True,
|
|
||||||
)
|
|
||||||
resp.raise_for_status()
|
|
||||||
except requests.HTTPError as exc:
|
|
||||||
logger.critical(
|
|
||||||
"HTTP error %s while getting %s", exc.response.status_code, link,
|
|
||||||
)
|
|
||||||
raise
|
|
||||||
|
|
||||||
content_type = resp.headers.get('content-type', '')
|
|
||||||
filename = link.filename # fallback
|
|
||||||
# Have a look at the Content-Disposition header for a better guess
|
|
||||||
content_disposition = resp.headers.get('content-disposition')
|
|
||||||
if content_disposition:
|
|
||||||
type, params = cgi.parse_header(content_disposition)
|
|
||||||
# We use ``or`` here because we don't want to use an "empty" value
|
|
||||||
# from the filename param.
|
|
||||||
filename = params.get('filename') or filename
|
|
||||||
ext = splitext(filename)[1]
|
|
||||||
if not ext:
|
|
||||||
ext = mimetypes.guess_extension(content_type)
|
|
||||||
if ext:
|
|
||||||
filename += ext
|
|
||||||
if not ext and link.url != resp.url:
|
|
||||||
ext = os.path.splitext(resp.url)[1]
|
|
||||||
if ext:
|
|
||||||
filename += ext
|
|
||||||
file_path = os.path.join(temp_dir, filename)
|
|
||||||
with open(file_path, 'wb') as content_file:
|
|
||||||
_download_url(resp, link, content_file, hashes, progress_bar)
|
|
||||||
return file_path, content_type
|
|
||||||
|
|
||||||
|
|
||||||
def _check_download_dir(link, download_dir, hashes):
|
|
||||||
# type: (Link, str, Hashes) -> Optional[str]
|
|
||||||
""" Check download_dir for previously downloaded file with correct hash
|
|
||||||
If a correct file is found return its path else None
|
|
||||||
"""
|
|
||||||
download_path = os.path.join(download_dir, link.filename)
|
|
||||||
if os.path.exists(download_path):
|
|
||||||
# If already downloaded, does its hash match?
|
|
||||||
logger.info('File was already downloaded %s', download_path)
|
|
||||||
if hashes:
|
|
||||||
try:
|
|
||||||
hashes.check_against_path(download_path)
|
|
||||||
except HashMismatch:
|
|
||||||
logger.warning(
|
|
||||||
'Previously-downloaded file %s has bad hash. '
|
|
||||||
'Re-downloading.',
|
|
||||||
download_path
|
|
||||||
)
|
|
||||||
os.unlink(download_path)
|
|
||||||
return None
|
|
||||||
return download_path
|
|
||||||
return None
|
|
@ -1,274 +0,0 @@
|
|||||||
"""Exceptions used throughout package"""
|
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
from itertools import chain, groupby, repeat
|
|
||||||
|
|
||||||
from pip._vendor.six import iteritems
|
|
||||||
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import Optional # noqa: F401
|
|
||||||
from pip._internal.req.req_install import InstallRequirement # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
class PipError(Exception):
|
|
||||||
"""Base pip exception"""
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigurationError(PipError):
|
|
||||||
"""General exception in configuration"""
|
|
||||||
|
|
||||||
|
|
||||||
class InstallationError(PipError):
|
|
||||||
"""General exception during installation"""
|
|
||||||
|
|
||||||
|
|
||||||
class UninstallationError(PipError):
|
|
||||||
"""General exception during uninstallation"""
|
|
||||||
|
|
||||||
|
|
||||||
class DistributionNotFound(InstallationError):
|
|
||||||
"""Raised when a distribution cannot be found to satisfy a requirement"""
|
|
||||||
|
|
||||||
|
|
||||||
class RequirementsFileParseError(InstallationError):
|
|
||||||
"""Raised when a general error occurs parsing a requirements file line."""
|
|
||||||
|
|
||||||
|
|
||||||
class BestVersionAlreadyInstalled(PipError):
|
|
||||||
"""Raised when the most up-to-date version of a package is already
|
|
||||||
installed."""
|
|
||||||
|
|
||||||
|
|
||||||
class BadCommand(PipError):
|
|
||||||
"""Raised when virtualenv or a command is not found"""
|
|
||||||
|
|
||||||
|
|
||||||
class CommandError(PipError):
|
|
||||||
"""Raised when there is an error in command-line arguments"""
|
|
||||||
|
|
||||||
|
|
||||||
class PreviousBuildDirError(PipError):
|
|
||||||
"""Raised when there's a previous conflicting build directory"""
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidWheelFilename(InstallationError):
|
|
||||||
"""Invalid wheel filename."""
|
|
||||||
|
|
||||||
|
|
||||||
class UnsupportedWheel(InstallationError):
|
|
||||||
"""Unsupported wheel."""
|
|
||||||
|
|
||||||
|
|
||||||
class HashErrors(InstallationError):
|
|
||||||
"""Multiple HashError instances rolled into one for reporting"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.errors = []
|
|
||||||
|
|
||||||
def append(self, error):
|
|
||||||
self.errors.append(error)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
lines = []
|
|
||||||
self.errors.sort(key=lambda e: e.order)
|
|
||||||
for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
|
|
||||||
lines.append(cls.head)
|
|
||||||
lines.extend(e.body() for e in errors_of_cls)
|
|
||||||
if lines:
|
|
||||||
return '\n'.join(lines)
|
|
||||||
|
|
||||||
def __nonzero__(self):
|
|
||||||
return bool(self.errors)
|
|
||||||
|
|
||||||
def __bool__(self):
|
|
||||||
return self.__nonzero__()
|
|
||||||
|
|
||||||
|
|
||||||
class HashError(InstallationError):
|
|
||||||
"""
|
|
||||||
A failure to verify a package against known-good hashes
|
|
||||||
|
|
||||||
:cvar order: An int sorting hash exception classes by difficulty of
|
|
||||||
recovery (lower being harder), so the user doesn't bother fretting
|
|
||||||
about unpinned packages when he has deeper issues, like VCS
|
|
||||||
dependencies, to deal with. Also keeps error reports in a
|
|
||||||
deterministic order.
|
|
||||||
:cvar head: A section heading for display above potentially many
|
|
||||||
exceptions of this kind
|
|
||||||
:ivar req: The InstallRequirement that triggered this error. This is
|
|
||||||
pasted on after the exception is instantiated, because it's not
|
|
||||||
typically available earlier.
|
|
||||||
|
|
||||||
"""
|
|
||||||
req = None # type: Optional[InstallRequirement]
|
|
||||||
head = ''
|
|
||||||
|
|
||||||
def body(self):
|
|
||||||
"""Return a summary of me for display under the heading.
|
|
||||||
|
|
||||||
This default implementation simply prints a description of the
|
|
||||||
triggering requirement.
|
|
||||||
|
|
||||||
:param req: The InstallRequirement that provoked this error, with
|
|
||||||
populate_link() having already been called
|
|
||||||
|
|
||||||
"""
|
|
||||||
return ' %s' % self._requirement_name()
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return '%s\n%s' % (self.head, self.body())
|
|
||||||
|
|
||||||
def _requirement_name(self):
|
|
||||||
"""Return a description of the requirement that triggered me.
|
|
||||||
|
|
||||||
This default implementation returns long description of the req, with
|
|
||||||
line numbers
|
|
||||||
|
|
||||||
"""
|
|
||||||
return str(self.req) if self.req else 'unknown package'
|
|
||||||
|
|
||||||
|
|
||||||
class VcsHashUnsupported(HashError):
|
|
||||||
"""A hash was provided for a version-control-system-based requirement, but
|
|
||||||
we don't have a method for hashing those."""
|
|
||||||
|
|
||||||
order = 0
|
|
||||||
head = ("Can't verify hashes for these requirements because we don't "
|
|
||||||
"have a way to hash version control repositories:")
|
|
||||||
|
|
||||||
|
|
||||||
class DirectoryUrlHashUnsupported(HashError):
|
|
||||||
"""A hash was provided for a version-control-system-based requirement, but
|
|
||||||
we don't have a method for hashing those."""
|
|
||||||
|
|
||||||
order = 1
|
|
||||||
head = ("Can't verify hashes for these file:// requirements because they "
|
|
||||||
"point to directories:")
|
|
||||||
|
|
||||||
|
|
||||||
class HashMissing(HashError):
|
|
||||||
"""A hash was needed for a requirement but is absent."""
|
|
||||||
|
|
||||||
order = 2
|
|
||||||
head = ('Hashes are required in --require-hashes mode, but they are '
|
|
||||||
'missing from some requirements. Here is a list of those '
|
|
||||||
'requirements along with the hashes their downloaded archives '
|
|
||||||
'actually had. Add lines like these to your requirements files to '
|
|
||||||
'prevent tampering. (If you did not enable --require-hashes '
|
|
||||||
'manually, note that it turns on automatically when any package '
|
|
||||||
'has a hash.)')
|
|
||||||
|
|
||||||
def __init__(self, gotten_hash):
|
|
||||||
"""
|
|
||||||
:param gotten_hash: The hash of the (possibly malicious) archive we
|
|
||||||
just downloaded
|
|
||||||
"""
|
|
||||||
self.gotten_hash = gotten_hash
|
|
||||||
|
|
||||||
def body(self):
|
|
||||||
# Dodge circular import.
|
|
||||||
from pip._internal.utils.hashes import FAVORITE_HASH
|
|
||||||
|
|
||||||
package = None
|
|
||||||
if self.req:
|
|
||||||
# In the case of URL-based requirements, display the original URL
|
|
||||||
# seen in the requirements file rather than the package name,
|
|
||||||
# so the output can be directly copied into the requirements file.
|
|
||||||
package = (self.req.original_link if self.req.original_link
|
|
||||||
# In case someone feeds something downright stupid
|
|
||||||
# to InstallRequirement's constructor.
|
|
||||||
else getattr(self.req, 'req', None))
|
|
||||||
return ' %s --hash=%s:%s' % (package or 'unknown package',
|
|
||||||
FAVORITE_HASH,
|
|
||||||
self.gotten_hash)
|
|
||||||
|
|
||||||
|
|
||||||
class HashUnpinned(HashError):
|
|
||||||
"""A requirement had a hash specified but was not pinned to a specific
|
|
||||||
version."""
|
|
||||||
|
|
||||||
order = 3
|
|
||||||
head = ('In --require-hashes mode, all requirements must have their '
|
|
||||||
'versions pinned with ==. These do not:')
|
|
||||||
|
|
||||||
|
|
||||||
class HashMismatch(HashError):
|
|
||||||
"""
|
|
||||||
Distribution file hash values don't match.
|
|
||||||
|
|
||||||
:ivar package_name: The name of the package that triggered the hash
|
|
||||||
mismatch. Feel free to write to this after the exception is raise to
|
|
||||||
improve its error message.
|
|
||||||
|
|
||||||
"""
|
|
||||||
order = 4
|
|
||||||
head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS '
|
|
||||||
'FILE. If you have updated the package versions, please update '
|
|
||||||
'the hashes. Otherwise, examine the package contents carefully; '
|
|
||||||
'someone may have tampered with them.')
|
|
||||||
|
|
||||||
def __init__(self, allowed, gots):
|
|
||||||
"""
|
|
||||||
:param allowed: A dict of algorithm names pointing to lists of allowed
|
|
||||||
hex digests
|
|
||||||
:param gots: A dict of algorithm names pointing to hashes we
|
|
||||||
actually got from the files under suspicion
|
|
||||||
"""
|
|
||||||
self.allowed = allowed
|
|
||||||
self.gots = gots
|
|
||||||
|
|
||||||
def body(self):
|
|
||||||
return ' %s:\n%s' % (self._requirement_name(),
|
|
||||||
self._hash_comparison())
|
|
||||||
|
|
||||||
def _hash_comparison(self):
|
|
||||||
"""
|
|
||||||
Return a comparison of actual and expected hash values.
|
|
||||||
|
|
||||||
Example::
|
|
||||||
|
|
||||||
Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
|
|
||||||
or 123451234512345123451234512345123451234512345
|
|
||||||
Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
|
|
||||||
|
|
||||||
"""
|
|
||||||
def hash_then_or(hash_name):
|
|
||||||
# For now, all the decent hashes have 6-char names, so we can get
|
|
||||||
# away with hard-coding space literals.
|
|
||||||
return chain([hash_name], repeat(' or'))
|
|
||||||
|
|
||||||
lines = []
|
|
||||||
for hash_name, expecteds in iteritems(self.allowed):
|
|
||||||
prefix = hash_then_or(hash_name)
|
|
||||||
lines.extend((' Expected %s %s' % (next(prefix), e))
|
|
||||||
for e in expecteds)
|
|
||||||
lines.append(' Got %s\n' %
|
|
||||||
self.gots[hash_name].hexdigest())
|
|
||||||
prefix = ' or'
|
|
||||||
return '\n'.join(lines)
|
|
||||||
|
|
||||||
|
|
||||||
class UnsupportedPythonVersion(InstallationError):
|
|
||||||
"""Unsupported python version according to Requires-Python package
|
|
||||||
metadata."""
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
|
|
||||||
"""When there are errors while loading a configuration file
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, reason="could not be loaded", fname=None, error=None):
|
|
||||||
super(ConfigurationFileCouldNotBeLoaded, self).__init__(error)
|
|
||||||
self.reason = reason
|
|
||||||
self.fname = fname
|
|
||||||
self.error = error
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
if self.fname is not None:
|
|
||||||
message_part = " in {}.".format(self.fname)
|
|
||||||
else:
|
|
||||||
assert self.error is not None
|
|
||||||
message_part = ".\n{}\n".format(self.error.message)
|
|
||||||
return "Configuration file {}{}".format(self.reason, message_part)
|
|
@ -1,990 +0,0 @@
|
|||||||
"""Routines related to PyPI, indexes"""
|
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import cgi
|
|
||||||
import itertools
|
|
||||||
import logging
|
|
||||||
import mimetypes
|
|
||||||
import os
|
|
||||||
import posixpath
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from collections import namedtuple
|
|
||||||
|
|
||||||
from pip._vendor import html5lib, requests, six
|
|
||||||
from pip._vendor.distlib.compat import unescape
|
|
||||||
from pip._vendor.packaging import specifiers
|
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
|
||||||
from pip._vendor.packaging.version import parse as parse_version
|
|
||||||
from pip._vendor.requests.exceptions import RetryError, SSLError
|
|
||||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
|
||||||
from pip._vendor.six.moves.urllib import request as urllib_request
|
|
||||||
|
|
||||||
from pip._internal.download import HAS_TLS, is_url, path_to_url, url_to_path
|
|
||||||
from pip._internal.exceptions import (
|
|
||||||
BestVersionAlreadyInstalled, DistributionNotFound, InvalidWheelFilename,
|
|
||||||
UnsupportedWheel,
|
|
||||||
)
|
|
||||||
from pip._internal.models.candidate import InstallationCandidate
|
|
||||||
from pip._internal.models.format_control import FormatControl
|
|
||||||
from pip._internal.models.index import PyPI
|
|
||||||
from pip._internal.models.link import Link
|
|
||||||
from pip._internal.pep425tags import get_supported
|
|
||||||
from pip._internal.utils.compat import ipaddress
|
|
||||||
from pip._internal.utils.logging import indent_log
|
|
||||||
from pip._internal.utils.misc import (
|
|
||||||
ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, WHEEL_EXTENSION, normalize_path,
|
|
||||||
redact_password_from_url,
|
|
||||||
)
|
|
||||||
from pip._internal.utils.packaging import check_requires_python
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
from pip._internal.wheel import Wheel
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from logging import Logger # noqa: F401
|
|
||||||
from typing import ( # noqa: F401
|
|
||||||
Tuple, Optional, Any, List, Union, Callable, Set, Sequence,
|
|
||||||
Iterable, MutableMapping
|
|
||||||
)
|
|
||||||
from pip._vendor.packaging.version import _BaseVersion # noqa: F401
|
|
||||||
from pip._vendor.requests import Response # noqa: F401
|
|
||||||
from pip._internal.req import InstallRequirement # noqa: F401
|
|
||||||
from pip._internal.download import PipSession # noqa: F401
|
|
||||||
|
|
||||||
SecureOrigin = Tuple[str, str, Optional[str]]
|
|
||||||
BuildTag = Tuple[Any, ...] # either emply tuple or Tuple[int, str]
|
|
||||||
CandidateSortingKey = Tuple[int, _BaseVersion, BuildTag, Optional[int]]
|
|
||||||
|
|
||||||
__all__ = ['FormatControl', 'PackageFinder']
|
|
||||||
|
|
||||||
|
|
||||||
SECURE_ORIGINS = [
|
|
||||||
# protocol, hostname, port
|
|
||||||
# Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
|
|
||||||
("https", "*", "*"),
|
|
||||||
("*", "localhost", "*"),
|
|
||||||
("*", "127.0.0.0/8", "*"),
|
|
||||||
("*", "::1/128", "*"),
|
|
||||||
("file", "*", None),
|
|
||||||
# ssh is always secure.
|
|
||||||
("ssh", "*", "*"),
|
|
||||||
] # type: List[SecureOrigin]
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def _match_vcs_scheme(url):
|
|
||||||
# type: (str) -> Optional[str]
|
|
||||||
"""Look for VCS schemes in the URL.
|
|
||||||
|
|
||||||
Returns the matched VCS scheme, or None if there's no match.
|
|
||||||
"""
|
|
||||||
from pip._internal.vcs import VcsSupport
|
|
||||||
for scheme in VcsSupport.schemes:
|
|
||||||
if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
|
|
||||||
return scheme
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def _is_url_like_archive(url):
|
|
||||||
# type: (str) -> bool
|
|
||||||
"""Return whether the URL looks like an archive.
|
|
||||||
"""
|
|
||||||
filename = Link(url).filename
|
|
||||||
for bad_ext in ARCHIVE_EXTENSIONS:
|
|
||||||
if filename.endswith(bad_ext):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class _NotHTML(Exception):
|
|
||||||
def __init__(self, content_type, request_desc):
|
|
||||||
# type: (str, str) -> None
|
|
||||||
super(_NotHTML, self).__init__(content_type, request_desc)
|
|
||||||
self.content_type = content_type
|
|
||||||
self.request_desc = request_desc
|
|
||||||
|
|
||||||
|
|
||||||
def _ensure_html_header(response):
|
|
||||||
# type: (Response) -> None
|
|
||||||
"""Check the Content-Type header to ensure the response contains HTML.
|
|
||||||
|
|
||||||
Raises `_NotHTML` if the content type is not text/html.
|
|
||||||
"""
|
|
||||||
content_type = response.headers.get("Content-Type", "")
|
|
||||||
if not content_type.lower().startswith("text/html"):
|
|
||||||
raise _NotHTML(content_type, response.request.method)
|
|
||||||
|
|
||||||
|
|
||||||
class _NotHTTP(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def _ensure_html_response(url, session):
|
|
||||||
# type: (str, PipSession) -> None
|
|
||||||
"""Send a HEAD request to the URL, and ensure the response contains HTML.
|
|
||||||
|
|
||||||
Raises `_NotHTTP` if the URL is not available for a HEAD request, or
|
|
||||||
`_NotHTML` if the content type is not text/html.
|
|
||||||
"""
|
|
||||||
scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
|
|
||||||
if scheme not in {'http', 'https'}:
|
|
||||||
raise _NotHTTP()
|
|
||||||
|
|
||||||
resp = session.head(url, allow_redirects=True)
|
|
||||||
resp.raise_for_status()
|
|
||||||
|
|
||||||
_ensure_html_header(resp)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_html_response(url, session):
|
|
||||||
# type: (str, PipSession) -> Response
|
|
||||||
"""Access an HTML page with GET, and return the response.
|
|
||||||
|
|
||||||
This consists of three parts:
|
|
||||||
|
|
||||||
1. If the URL looks suspiciously like an archive, send a HEAD first to
|
|
||||||
check the Content-Type is HTML, to avoid downloading a large file.
|
|
||||||
Raise `_NotHTTP` if the content type cannot be determined, or
|
|
||||||
`_NotHTML` if it is not HTML.
|
|
||||||
2. Actually perform the request. Raise HTTP exceptions on network failures.
|
|
||||||
3. Check the Content-Type header to make sure we got HTML, and raise
|
|
||||||
`_NotHTML` otherwise.
|
|
||||||
"""
|
|
||||||
if _is_url_like_archive(url):
|
|
||||||
_ensure_html_response(url, session=session)
|
|
||||||
|
|
||||||
logger.debug('Getting page %s', url)
|
|
||||||
|
|
||||||
resp = session.get(
|
|
||||||
url,
|
|
||||||
headers={
|
|
||||||
"Accept": "text/html",
|
|
||||||
# We don't want to blindly returned cached data for
|
|
||||||
# /simple/, because authors generally expecting that
|
|
||||||
# twine upload && pip install will function, but if
|
|
||||||
# they've done a pip install in the last ~10 minutes
|
|
||||||
# it won't. Thus by setting this to zero we will not
|
|
||||||
# blindly use any cached data, however the benefit of
|
|
||||||
# using max-age=0 instead of no-cache, is that we will
|
|
||||||
# still support conditional requests, so we will still
|
|
||||||
# minimize traffic sent in cases where the page hasn't
|
|
||||||
# changed at all, we will just always incur the round
|
|
||||||
# trip for the conditional GET now instead of only
|
|
||||||
# once per 10 minutes.
|
|
||||||
# For more information, please see pypa/pip#5670.
|
|
||||||
"Cache-Control": "max-age=0",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
resp.raise_for_status()
|
|
||||||
|
|
||||||
# The check for archives above only works if the url ends with
|
|
||||||
# something that looks like an archive. However that is not a
|
|
||||||
# requirement of an url. Unless we issue a HEAD request on every
|
|
||||||
# url we cannot know ahead of time for sure if something is HTML
|
|
||||||
# or not. However we can check after we've downloaded it.
|
|
||||||
_ensure_html_header(resp)
|
|
||||||
|
|
||||||
return resp
|
|
||||||
|
|
||||||
|
|
||||||
def _handle_get_page_fail(
|
|
||||||
link, # type: Link
|
|
||||||
reason, # type: Union[str, Exception]
|
|
||||||
meth=None # type: Optional[Callable[..., None]]
|
|
||||||
):
|
|
||||||
# type: (...) -> None
|
|
||||||
if meth is None:
|
|
||||||
meth = logger.debug
|
|
||||||
meth("Could not fetch URL %s: %s - skipping", link, reason)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_html_page(link, session=None):
|
|
||||||
# type: (Link, Optional[PipSession]) -> Optional[HTMLPage]
|
|
||||||
if session is None:
|
|
||||||
raise TypeError(
|
|
||||||
"_get_html_page() missing 1 required keyword argument: 'session'"
|
|
||||||
)
|
|
||||||
|
|
||||||
url = link.url.split('#', 1)[0]
|
|
||||||
|
|
||||||
# Check for VCS schemes that do not support lookup as web pages.
|
|
||||||
vcs_scheme = _match_vcs_scheme(url)
|
|
||||||
if vcs_scheme:
|
|
||||||
logger.debug('Cannot look at %s URL %s', vcs_scheme, link)
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Tack index.html onto file:// URLs that point to directories
|
|
||||||
scheme, _, path, _, _, _ = urllib_parse.urlparse(url)
|
|
||||||
if (scheme == 'file' and os.path.isdir(urllib_request.url2pathname(path))):
|
|
||||||
# add trailing slash if not present so urljoin doesn't trim
|
|
||||||
# final segment
|
|
||||||
if not url.endswith('/'):
|
|
||||||
url += '/'
|
|
||||||
url = urllib_parse.urljoin(url, 'index.html')
|
|
||||||
logger.debug(' file: URL is directory, getting %s', url)
|
|
||||||
|
|
||||||
try:
|
|
||||||
resp = _get_html_response(url, session=session)
|
|
||||||
except _NotHTTP as exc:
|
|
||||||
logger.debug(
|
|
||||||
'Skipping page %s because it looks like an archive, and cannot '
|
|
||||||
'be checked by HEAD.', link,
|
|
||||||
)
|
|
||||||
except _NotHTML as exc:
|
|
||||||
logger.debug(
|
|
||||||
'Skipping page %s because the %s request got Content-Type: %s',
|
|
||||||
link, exc.request_desc, exc.content_type,
|
|
||||||
)
|
|
||||||
except requests.HTTPError as exc:
|
|
||||||
_handle_get_page_fail(link, exc)
|
|
||||||
except RetryError as exc:
|
|
||||||
_handle_get_page_fail(link, exc)
|
|
||||||
except SSLError as exc:
|
|
||||||
reason = "There was a problem confirming the ssl certificate: "
|
|
||||||
reason += str(exc)
|
|
||||||
_handle_get_page_fail(link, reason, meth=logger.info)
|
|
||||||
except requests.ConnectionError as exc:
|
|
||||||
_handle_get_page_fail(link, "connection error: %s" % exc)
|
|
||||||
except requests.Timeout:
|
|
||||||
_handle_get_page_fail(link, "timed out")
|
|
||||||
else:
|
|
||||||
return HTMLPage(resp.content, resp.url, resp.headers)
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class PackageFinder(object):
|
|
||||||
"""This finds packages.
|
|
||||||
|
|
||||||
This is meant to match easy_install's technique for looking for
|
|
||||||
packages, by reading pages and looking for appropriate links.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
find_links, # type: List[str]
|
|
||||||
index_urls, # type: List[str]
|
|
||||||
allow_all_prereleases=False, # type: bool
|
|
||||||
trusted_hosts=None, # type: Optional[Iterable[str]]
|
|
||||||
session=None, # type: Optional[PipSession]
|
|
||||||
format_control=None, # type: Optional[FormatControl]
|
|
||||||
platform=None, # type: Optional[str]
|
|
||||||
versions=None, # type: Optional[List[str]]
|
|
||||||
abi=None, # type: Optional[str]
|
|
||||||
implementation=None, # type: Optional[str]
|
|
||||||
prefer_binary=False # type: bool
|
|
||||||
):
|
|
||||||
# type: (...) -> None
|
|
||||||
"""Create a PackageFinder.
|
|
||||||
|
|
||||||
:param format_control: A FormatControl object or None. Used to control
|
|
||||||
the selection of source packages / binary packages when consulting
|
|
||||||
the index and links.
|
|
||||||
:param platform: A string or None. If None, searches for packages
|
|
||||||
that are supported by the current system. Otherwise, will find
|
|
||||||
packages that can be built on the platform passed in. These
|
|
||||||
packages will only be downloaded for distribution: they will
|
|
||||||
not be built locally.
|
|
||||||
:param versions: A list of strings or None. This is passed directly
|
|
||||||
to pep425tags.py in the get_supported() method.
|
|
||||||
:param abi: A string or None. This is passed directly
|
|
||||||
to pep425tags.py in the get_supported() method.
|
|
||||||
:param implementation: A string or None. This is passed directly
|
|
||||||
to pep425tags.py in the get_supported() method.
|
|
||||||
"""
|
|
||||||
if session is None:
|
|
||||||
raise TypeError(
|
|
||||||
"PackageFinder() missing 1 required keyword argument: "
|
|
||||||
"'session'"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Build find_links. If an argument starts with ~, it may be
|
|
||||||
# a local file relative to a home directory. So try normalizing
|
|
||||||
# it and if it exists, use the normalized version.
|
|
||||||
# This is deliberately conservative - it might be fine just to
|
|
||||||
# blindly normalize anything starting with a ~...
|
|
||||||
self.find_links = [] # type: List[str]
|
|
||||||
for link in find_links:
|
|
||||||
if link.startswith('~'):
|
|
||||||
new_link = normalize_path(link)
|
|
||||||
if os.path.exists(new_link):
|
|
||||||
link = new_link
|
|
||||||
self.find_links.append(link)
|
|
||||||
|
|
||||||
self.index_urls = index_urls
|
|
||||||
|
|
||||||
# These are boring links that have already been logged somehow:
|
|
||||||
self.logged_links = set() # type: Set[Link]
|
|
||||||
|
|
||||||
self.format_control = format_control or FormatControl(set(), set())
|
|
||||||
|
|
||||||
# Domains that we won't emit warnings for when not using HTTPS
|
|
||||||
self.secure_origins = [
|
|
||||||
("*", host, "*")
|
|
||||||
for host in (trusted_hosts if trusted_hosts else [])
|
|
||||||
] # type: List[SecureOrigin]
|
|
||||||
|
|
||||||
# Do we want to allow _all_ pre-releases?
|
|
||||||
self.allow_all_prereleases = allow_all_prereleases
|
|
||||||
|
|
||||||
# The Session we'll use to make requests
|
|
||||||
self.session = session
|
|
||||||
|
|
||||||
# The valid tags to check potential found wheel candidates against
|
|
||||||
self.valid_tags = get_supported(
|
|
||||||
versions=versions,
|
|
||||||
platform=platform,
|
|
||||||
abi=abi,
|
|
||||||
impl=implementation,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Do we prefer old, but valid, binary dist over new source dist
|
|
||||||
self.prefer_binary = prefer_binary
|
|
||||||
|
|
||||||
# If we don't have TLS enabled, then WARN if anyplace we're looking
|
|
||||||
# relies on TLS.
|
|
||||||
if not HAS_TLS:
|
|
||||||
for link in itertools.chain(self.index_urls, self.find_links):
|
|
||||||
parsed = urllib_parse.urlparse(link)
|
|
||||||
if parsed.scheme == "https":
|
|
||||||
logger.warning(
|
|
||||||
"pip is configured with locations that require "
|
|
||||||
"TLS/SSL, however the ssl module in Python is not "
|
|
||||||
"available."
|
|
||||||
)
|
|
||||||
break
|
|
||||||
|
|
||||||
def get_formatted_locations(self):
|
|
||||||
# type: () -> str
|
|
||||||
lines = []
|
|
||||||
if self.index_urls and self.index_urls != [PyPI.simple_url]:
|
|
||||||
lines.append(
|
|
||||||
"Looking in indexes: {}".format(", ".join(
|
|
||||||
redact_password_from_url(url) for url in self.index_urls))
|
|
||||||
)
|
|
||||||
if self.find_links:
|
|
||||||
lines.append(
|
|
||||||
"Looking in links: {}".format(", ".join(self.find_links))
|
|
||||||
)
|
|
||||||
return "\n".join(lines)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _sort_locations(locations, expand_dir=False):
|
|
||||||
# type: (Sequence[str], bool) -> Tuple[List[str], List[str]]
|
|
||||||
"""
|
|
||||||
Sort locations into "files" (archives) and "urls", and return
|
|
||||||
a pair of lists (files,urls)
|
|
||||||
"""
|
|
||||||
files = []
|
|
||||||
urls = []
|
|
||||||
|
|
||||||
# puts the url for the given file path into the appropriate list
|
|
||||||
def sort_path(path):
|
|
||||||
url = path_to_url(path)
|
|
||||||
if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
|
|
||||||
urls.append(url)
|
|
||||||
else:
|
|
||||||
files.append(url)
|
|
||||||
|
|
||||||
for url in locations:
|
|
||||||
|
|
||||||
is_local_path = os.path.exists(url)
|
|
||||||
is_file_url = url.startswith('file:')
|
|
||||||
|
|
||||||
if is_local_path or is_file_url:
|
|
||||||
if is_local_path:
|
|
||||||
path = url
|
|
||||||
else:
|
|
||||||
path = url_to_path(url)
|
|
||||||
if os.path.isdir(path):
|
|
||||||
if expand_dir:
|
|
||||||
path = os.path.realpath(path)
|
|
||||||
for item in os.listdir(path):
|
|
||||||
sort_path(os.path.join(path, item))
|
|
||||||
elif is_file_url:
|
|
||||||
urls.append(url)
|
|
||||||
else:
|
|
||||||
logger.warning(
|
|
||||||
"Path '{0}' is ignored: "
|
|
||||||
"it is a directory.".format(path),
|
|
||||||
)
|
|
||||||
elif os.path.isfile(path):
|
|
||||||
sort_path(path)
|
|
||||||
else:
|
|
||||||
logger.warning(
|
|
||||||
"Url '%s' is ignored: it is neither a file "
|
|
||||||
"nor a directory.", url,
|
|
||||||
)
|
|
||||||
elif is_url(url):
|
|
||||||
# Only add url with clear scheme
|
|
||||||
urls.append(url)
|
|
||||||
else:
|
|
||||||
logger.warning(
|
|
||||||
"Url '%s' is ignored. It is either a non-existing "
|
|
||||||
"path or lacks a specific scheme.", url,
|
|
||||||
)
|
|
||||||
|
|
||||||
return files, urls
|
|
||||||
|
|
||||||
def _candidate_sort_key(self, candidate):
|
|
||||||
# type: (InstallationCandidate) -> CandidateSortingKey
|
|
||||||
"""
|
|
||||||
Function used to generate link sort key for link tuples.
|
|
||||||
The greater the return value, the more preferred it is.
|
|
||||||
If not finding wheels, then sorted by version only.
|
|
||||||
If finding wheels, then the sort order is by version, then:
|
|
||||||
1. existing installs
|
|
||||||
2. wheels ordered via Wheel.support_index_min(self.valid_tags)
|
|
||||||
3. source archives
|
|
||||||
If prefer_binary was set, then all wheels are sorted above sources.
|
|
||||||
Note: it was considered to embed this logic into the Link
|
|
||||||
comparison operators, but then different sdist links
|
|
||||||
with the same version, would have to be considered equal
|
|
||||||
"""
|
|
||||||
support_num = len(self.valid_tags)
|
|
||||||
build_tag = tuple() # type: BuildTag
|
|
||||||
binary_preference = 0
|
|
||||||
if candidate.location.is_wheel:
|
|
||||||
# can raise InvalidWheelFilename
|
|
||||||
wheel = Wheel(candidate.location.filename)
|
|
||||||
if not wheel.supported(self.valid_tags):
|
|
||||||
raise UnsupportedWheel(
|
|
||||||
"%s is not a supported wheel for this platform. It "
|
|
||||||
"can't be sorted." % wheel.filename
|
|
||||||
)
|
|
||||||
if self.prefer_binary:
|
|
||||||
binary_preference = 1
|
|
||||||
pri = -(wheel.support_index_min(self.valid_tags))
|
|
||||||
if wheel.build_tag is not None:
|
|
||||||
match = re.match(r'^(\d+)(.*)$', wheel.build_tag)
|
|
||||||
build_tag_groups = match.groups()
|
|
||||||
build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
|
|
||||||
else: # sdist
|
|
||||||
pri = -(support_num)
|
|
||||||
return (binary_preference, candidate.version, build_tag, pri)
|
|
||||||
|
|
||||||
def _validate_secure_origin(self, logger, location):
|
|
||||||
# type: (Logger, Link) -> bool
|
|
||||||
# Determine if this url used a secure transport mechanism
|
|
||||||
parsed = urllib_parse.urlparse(str(location))
|
|
||||||
origin = (parsed.scheme, parsed.hostname, parsed.port)
|
|
||||||
|
|
||||||
# The protocol to use to see if the protocol matches.
|
|
||||||
# Don't count the repository type as part of the protocol: in
|
|
||||||
# cases such as "git+ssh", only use "ssh". (I.e., Only verify against
|
|
||||||
# the last scheme.)
|
|
||||||
protocol = origin[0].rsplit('+', 1)[-1]
|
|
||||||
|
|
||||||
# Determine if our origin is a secure origin by looking through our
|
|
||||||
# hardcoded list of secure origins, as well as any additional ones
|
|
||||||
# configured on this PackageFinder instance.
|
|
||||||
for secure_origin in (SECURE_ORIGINS + self.secure_origins):
|
|
||||||
if protocol != secure_origin[0] and secure_origin[0] != "*":
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
# We need to do this decode dance to ensure that we have a
|
|
||||||
# unicode object, even on Python 2.x.
|
|
||||||
addr = ipaddress.ip_address(
|
|
||||||
origin[1]
|
|
||||||
if (
|
|
||||||
isinstance(origin[1], six.text_type) or
|
|
||||||
origin[1] is None
|
|
||||||
)
|
|
||||||
else origin[1].decode("utf8")
|
|
||||||
)
|
|
||||||
network = ipaddress.ip_network(
|
|
||||||
secure_origin[1]
|
|
||||||
if isinstance(secure_origin[1], six.text_type)
|
|
||||||
# setting secure_origin[1] to proper Union[bytes, str]
|
|
||||||
# creates problems in other places
|
|
||||||
else secure_origin[1].decode("utf8") # type: ignore
|
|
||||||
)
|
|
||||||
except ValueError:
|
|
||||||
# We don't have both a valid address or a valid network, so
|
|
||||||
# we'll check this origin against hostnames.
|
|
||||||
if (origin[1] and
|
|
||||||
origin[1].lower() != secure_origin[1].lower() and
|
|
||||||
secure_origin[1] != "*"):
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
# We have a valid address and network, so see if the address
|
|
||||||
# is contained within the network.
|
|
||||||
if addr not in network:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Check to see if the port patches
|
|
||||||
if (origin[2] != secure_origin[2] and
|
|
||||||
secure_origin[2] != "*" and
|
|
||||||
secure_origin[2] is not None):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# If we've gotten here, then this origin matches the current
|
|
||||||
# secure origin and we should return True
|
|
||||||
return True
|
|
||||||
|
|
||||||
# If we've gotten to this point, then the origin isn't secure and we
|
|
||||||
# will not accept it as a valid location to search. We will however
|
|
||||||
# log a warning that we are ignoring it.
|
|
||||||
logger.warning(
|
|
||||||
"The repository located at %s is not a trusted or secure host and "
|
|
||||||
"is being ignored. If this repository is available via HTTPS we "
|
|
||||||
"recommend you use HTTPS instead, otherwise you may silence "
|
|
||||||
"this warning and allow it anyway with '--trusted-host %s'.",
|
|
||||||
parsed.hostname,
|
|
||||||
parsed.hostname,
|
|
||||||
)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _get_index_urls_locations(self, project_name):
|
|
||||||
# type: (str) -> List[str]
|
|
||||||
"""Returns the locations found via self.index_urls
|
|
||||||
|
|
||||||
Checks the url_name on the main (first in the list) index and
|
|
||||||
use this url_name to produce all locations
|
|
||||||
"""
|
|
||||||
|
|
||||||
def mkurl_pypi_url(url):
|
|
||||||
loc = posixpath.join(
|
|
||||||
url,
|
|
||||||
urllib_parse.quote(canonicalize_name(project_name)))
|
|
||||||
# For maximum compatibility with easy_install, ensure the path
|
|
||||||
# ends in a trailing slash. Although this isn't in the spec
|
|
||||||
# (and PyPI can handle it without the slash) some other index
|
|
||||||
# implementations might break if they relied on easy_install's
|
|
||||||
# behavior.
|
|
||||||
if not loc.endswith('/'):
|
|
||||||
loc = loc + '/'
|
|
||||||
return loc
|
|
||||||
|
|
||||||
return [mkurl_pypi_url(url) for url in self.index_urls]
|
|
||||||
|
|
||||||
def find_all_candidates(self, project_name):
|
|
||||||
# type: (str) -> List[Optional[InstallationCandidate]]
|
|
||||||
"""Find all available InstallationCandidate for project_name
|
|
||||||
|
|
||||||
This checks index_urls and find_links.
|
|
||||||
All versions found are returned as an InstallationCandidate list.
|
|
||||||
|
|
||||||
See _link_package_versions for details on which files are accepted
|
|
||||||
"""
|
|
||||||
index_locations = self._get_index_urls_locations(project_name)
|
|
||||||
index_file_loc, index_url_loc = self._sort_locations(index_locations)
|
|
||||||
fl_file_loc, fl_url_loc = self._sort_locations(
|
|
||||||
self.find_links, expand_dir=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
file_locations = (Link(url) for url in itertools.chain(
|
|
||||||
index_file_loc, fl_file_loc,
|
|
||||||
))
|
|
||||||
|
|
||||||
# We trust every url that the user has given us whether it was given
|
|
||||||
# via --index-url or --find-links.
|
|
||||||
# We want to filter out any thing which does not have a secure origin.
|
|
||||||
url_locations = [
|
|
||||||
link for link in itertools.chain(
|
|
||||||
(Link(url) for url in index_url_loc),
|
|
||||||
(Link(url) for url in fl_url_loc),
|
|
||||||
)
|
|
||||||
if self._validate_secure_origin(logger, link)
|
|
||||||
]
|
|
||||||
|
|
||||||
logger.debug('%d location(s) to search for versions of %s:',
|
|
||||||
len(url_locations), project_name)
|
|
||||||
|
|
||||||
for location in url_locations:
|
|
||||||
logger.debug('* %s', location)
|
|
||||||
|
|
||||||
canonical_name = canonicalize_name(project_name)
|
|
||||||
formats = self.format_control.get_allowed_formats(canonical_name)
|
|
||||||
search = Search(project_name, canonical_name, formats)
|
|
||||||
find_links_versions = self._package_versions(
|
|
||||||
# We trust every directly linked archive in find_links
|
|
||||||
(Link(url, '-f') for url in self.find_links),
|
|
||||||
search
|
|
||||||
)
|
|
||||||
|
|
||||||
page_versions = []
|
|
||||||
for page in self._get_pages(url_locations, project_name):
|
|
||||||
logger.debug('Analyzing links from page %s', page.url)
|
|
||||||
with indent_log():
|
|
||||||
page_versions.extend(
|
|
||||||
self._package_versions(page.iter_links(), search)
|
|
||||||
)
|
|
||||||
|
|
||||||
file_versions = self._package_versions(file_locations, search)
|
|
||||||
if file_versions:
|
|
||||||
file_versions.sort(reverse=True)
|
|
||||||
logger.debug(
|
|
||||||
'Local files found: %s',
|
|
||||||
', '.join([
|
|
||||||
url_to_path(candidate.location.url)
|
|
||||||
for candidate in file_versions
|
|
||||||
])
|
|
||||||
)
|
|
||||||
|
|
||||||
# This is an intentional priority ordering
|
|
||||||
return file_versions + find_links_versions + page_versions
|
|
||||||
|
|
||||||
def find_requirement(self, req, upgrade):
|
|
||||||
# type: (InstallRequirement, bool) -> Optional[Link]
|
|
||||||
"""Try to find a Link matching req
|
|
||||||
|
|
||||||
Expects req, an InstallRequirement and upgrade, a boolean
|
|
||||||
Returns a Link if found,
|
|
||||||
Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
|
|
||||||
"""
|
|
||||||
all_candidates = self.find_all_candidates(req.name)
|
|
||||||
|
|
||||||
# Filter out anything which doesn't match our specifier
|
|
||||||
compatible_versions = set(
|
|
||||||
req.specifier.filter(
|
|
||||||
# We turn the version object into a str here because otherwise
|
|
||||||
# when we're debundled but setuptools isn't, Python will see
|
|
||||||
# packaging.version.Version and
|
|
||||||
# pkg_resources._vendor.packaging.version.Version as different
|
|
||||||
# types. This way we'll use a str as a common data interchange
|
|
||||||
# format. If we stop using the pkg_resources provided specifier
|
|
||||||
# and start using our own, we can drop the cast to str().
|
|
||||||
[str(c.version) for c in all_candidates],
|
|
||||||
prereleases=(
|
|
||||||
self.allow_all_prereleases
|
|
||||||
if self.allow_all_prereleases else None
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
applicable_candidates = [
|
|
||||||
# Again, converting to str to deal with debundling.
|
|
||||||
c for c in all_candidates if str(c.version) in compatible_versions
|
|
||||||
]
|
|
||||||
|
|
||||||
if applicable_candidates:
|
|
||||||
best_candidate = max(applicable_candidates,
|
|
||||||
key=self._candidate_sort_key)
|
|
||||||
else:
|
|
||||||
best_candidate = None
|
|
||||||
|
|
||||||
if req.satisfied_by is not None:
|
|
||||||
installed_version = parse_version(req.satisfied_by.version)
|
|
||||||
else:
|
|
||||||
installed_version = None
|
|
||||||
|
|
||||||
if installed_version is None and best_candidate is None:
|
|
||||||
logger.critical(
|
|
||||||
'Could not find a version that satisfies the requirement %s '
|
|
||||||
'(from versions: %s)',
|
|
||||||
req,
|
|
||||||
', '.join(
|
|
||||||
sorted(
|
|
||||||
{str(c.version) for c in all_candidates},
|
|
||||||
key=parse_version,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
raise DistributionNotFound(
|
|
||||||
'No matching distribution found for %s' % req
|
|
||||||
)
|
|
||||||
|
|
||||||
best_installed = False
|
|
||||||
if installed_version and (
|
|
||||||
best_candidate is None or
|
|
||||||
best_candidate.version <= installed_version):
|
|
||||||
best_installed = True
|
|
||||||
|
|
||||||
if not upgrade and installed_version is not None:
|
|
||||||
if best_installed:
|
|
||||||
logger.debug(
|
|
||||||
'Existing installed version (%s) is most up-to-date and '
|
|
||||||
'satisfies requirement',
|
|
||||||
installed_version,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger.debug(
|
|
||||||
'Existing installed version (%s) satisfies requirement '
|
|
||||||
'(most up-to-date version is %s)',
|
|
||||||
installed_version,
|
|
||||||
best_candidate.version,
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
if best_installed:
|
|
||||||
# We have an existing version, and its the best version
|
|
||||||
logger.debug(
|
|
||||||
'Installed version (%s) is most up-to-date (past versions: '
|
|
||||||
'%s)',
|
|
||||||
installed_version,
|
|
||||||
', '.join(sorted(compatible_versions, key=parse_version)) or
|
|
||||||
"none",
|
|
||||||
)
|
|
||||||
raise BestVersionAlreadyInstalled
|
|
||||||
|
|
||||||
logger.debug(
|
|
||||||
'Using version %s (newest of versions: %s)',
|
|
||||||
best_candidate.version,
|
|
||||||
', '.join(sorted(compatible_versions, key=parse_version))
|
|
||||||
)
|
|
||||||
return best_candidate.location
|
|
||||||
|
|
||||||
def _get_pages(self, locations, project_name):
|
|
||||||
# type: (Iterable[Link], str) -> Iterable[HTMLPage]
|
|
||||||
"""
|
|
||||||
Yields (page, page_url) from the given locations, skipping
|
|
||||||
locations that have errors.
|
|
||||||
"""
|
|
||||||
seen = set() # type: Set[Link]
|
|
||||||
for location in locations:
|
|
||||||
if location in seen:
|
|
||||||
continue
|
|
||||||
seen.add(location)
|
|
||||||
|
|
||||||
page = _get_html_page(location, session=self.session)
|
|
||||||
if page is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
yield page
|
|
||||||
|
|
||||||
_py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
|
|
||||||
|
|
||||||
def _sort_links(self, links):
|
|
||||||
# type: (Iterable[Link]) -> List[Link]
|
|
||||||
"""
|
|
||||||
Returns elements of links in order, non-egg links first, egg links
|
|
||||||
second, while eliminating duplicates
|
|
||||||
"""
|
|
||||||
eggs, no_eggs = [], []
|
|
||||||
seen = set() # type: Set[Link]
|
|
||||||
for link in links:
|
|
||||||
if link not in seen:
|
|
||||||
seen.add(link)
|
|
||||||
if link.egg_fragment:
|
|
||||||
eggs.append(link)
|
|
||||||
else:
|
|
||||||
no_eggs.append(link)
|
|
||||||
return no_eggs + eggs
|
|
||||||
|
|
||||||
def _package_versions(
|
|
||||||
self,
|
|
||||||
links, # type: Iterable[Link]
|
|
||||||
search # type: Search
|
|
||||||
):
|
|
||||||
# type: (...) -> List[Optional[InstallationCandidate]]
|
|
||||||
result = []
|
|
||||||
for link in self._sort_links(links):
|
|
||||||
v = self._link_package_versions(link, search)
|
|
||||||
if v is not None:
|
|
||||||
result.append(v)
|
|
||||||
return result
|
|
||||||
|
|
||||||
def _log_skipped_link(self, link, reason):
|
|
||||||
# type: (Link, str) -> None
|
|
||||||
if link not in self.logged_links:
|
|
||||||
logger.debug('Skipping link %s; %s', link, reason)
|
|
||||||
self.logged_links.add(link)
|
|
||||||
|
|
||||||
def _link_package_versions(self, link, search):
|
|
||||||
# type: (Link, Search) -> Optional[InstallationCandidate]
|
|
||||||
"""Return an InstallationCandidate or None"""
|
|
||||||
version = None
|
|
||||||
if link.egg_fragment:
|
|
||||||
egg_info = link.egg_fragment
|
|
||||||
ext = link.ext
|
|
||||||
else:
|
|
||||||
egg_info, ext = link.splitext()
|
|
||||||
if not ext:
|
|
||||||
self._log_skipped_link(link, 'not a file')
|
|
||||||
return None
|
|
||||||
if ext not in SUPPORTED_EXTENSIONS:
|
|
||||||
self._log_skipped_link(
|
|
||||||
link, 'unsupported archive format: %s' % ext,
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
if "binary" not in search.formats and ext == WHEEL_EXTENSION:
|
|
||||||
self._log_skipped_link(
|
|
||||||
link, 'No binaries permitted for %s' % search.supplied,
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
if "macosx10" in link.path and ext == '.zip':
|
|
||||||
self._log_skipped_link(link, 'macosx10 one')
|
|
||||||
return None
|
|
||||||
if ext == WHEEL_EXTENSION:
|
|
||||||
try:
|
|
||||||
wheel = Wheel(link.filename)
|
|
||||||
except InvalidWheelFilename:
|
|
||||||
self._log_skipped_link(link, 'invalid wheel filename')
|
|
||||||
return None
|
|
||||||
if canonicalize_name(wheel.name) != search.canonical:
|
|
||||||
self._log_skipped_link(
|
|
||||||
link, 'wrong project name (not %s)' % search.supplied)
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not wheel.supported(self.valid_tags):
|
|
||||||
self._log_skipped_link(
|
|
||||||
link, 'it is not compatible with this Python')
|
|
||||||
return None
|
|
||||||
|
|
||||||
version = wheel.version
|
|
||||||
|
|
||||||
# This should be up by the search.ok_binary check, but see issue 2700.
|
|
||||||
if "source" not in search.formats and ext != WHEEL_EXTENSION:
|
|
||||||
self._log_skipped_link(
|
|
||||||
link, 'No sources permitted for %s' % search.supplied,
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not version:
|
|
||||||
version = _egg_info_matches(egg_info, search.canonical)
|
|
||||||
if not version:
|
|
||||||
self._log_skipped_link(
|
|
||||||
link, 'Missing project version for %s' % search.supplied)
|
|
||||||
return None
|
|
||||||
|
|
||||||
match = self._py_version_re.search(version)
|
|
||||||
if match:
|
|
||||||
version = version[:match.start()]
|
|
||||||
py_version = match.group(1)
|
|
||||||
if py_version != sys.version[:3]:
|
|
||||||
self._log_skipped_link(
|
|
||||||
link, 'Python version is incorrect')
|
|
||||||
return None
|
|
||||||
try:
|
|
||||||
support_this_python = check_requires_python(link.requires_python)
|
|
||||||
except specifiers.InvalidSpecifier:
|
|
||||||
logger.debug("Package %s has an invalid Requires-Python entry: %s",
|
|
||||||
link.filename, link.requires_python)
|
|
||||||
support_this_python = True
|
|
||||||
|
|
||||||
if not support_this_python:
|
|
||||||
logger.debug("The package %s is incompatible with the python "
|
|
||||||
"version in use. Acceptable python versions are: %s",
|
|
||||||
link, link.requires_python)
|
|
||||||
return None
|
|
||||||
logger.debug('Found link %s, version: %s', link, version)
|
|
||||||
|
|
||||||
return InstallationCandidate(search.supplied, version, link)
|
|
||||||
|
|
||||||
|
|
||||||
def _find_name_version_sep(egg_info, canonical_name):
|
|
||||||
# type: (str, str) -> int
|
|
||||||
"""Find the separator's index based on the package's canonical name.
|
|
||||||
|
|
||||||
`egg_info` must be an egg info string for the given package, and
|
|
||||||
`canonical_name` must be the package's canonical name.
|
|
||||||
|
|
||||||
This function is needed since the canonicalized name does not necessarily
|
|
||||||
have the same length as the egg info's name part. An example::
|
|
||||||
|
|
||||||
>>> egg_info = 'foo__bar-1.0'
|
|
||||||
>>> canonical_name = 'foo-bar'
|
|
||||||
>>> _find_name_version_sep(egg_info, canonical_name)
|
|
||||||
8
|
|
||||||
"""
|
|
||||||
# Project name and version must be separated by one single dash. Find all
|
|
||||||
# occurrences of dashes; if the string in front of it matches the canonical
|
|
||||||
# name, this is the one separating the name and version parts.
|
|
||||||
for i, c in enumerate(egg_info):
|
|
||||||
if c != "-":
|
|
||||||
continue
|
|
||||||
if canonicalize_name(egg_info[:i]) == canonical_name:
|
|
||||||
return i
|
|
||||||
raise ValueError("{} does not match {}".format(egg_info, canonical_name))
|
|
||||||
|
|
||||||
|
|
||||||
def _egg_info_matches(egg_info, canonical_name):
|
|
||||||
# type: (str, str) -> Optional[str]
|
|
||||||
"""Pull the version part out of a string.
|
|
||||||
|
|
||||||
:param egg_info: The string to parse. E.g. foo-2.1
|
|
||||||
:param canonical_name: The canonicalized name of the package this
|
|
||||||
belongs to.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
version_start = _find_name_version_sep(egg_info, canonical_name) + 1
|
|
||||||
except ValueError:
|
|
||||||
return None
|
|
||||||
version = egg_info[version_start:]
|
|
||||||
if not version:
|
|
||||||
return None
|
|
||||||
return version
|
|
||||||
|
|
||||||
|
|
||||||
def _determine_base_url(document, page_url):
|
|
||||||
"""Determine the HTML document's base URL.
|
|
||||||
|
|
||||||
This looks for a ``<base>`` tag in the HTML document. If present, its href
|
|
||||||
attribute denotes the base URL of anchor tags in the document. If there is
|
|
||||||
no such tag (or if it does not have a valid href attribute), the HTML
|
|
||||||
file's URL is used as the base URL.
|
|
||||||
|
|
||||||
:param document: An HTML document representation. The current
|
|
||||||
implementation expects the result of ``html5lib.parse()``.
|
|
||||||
:param page_url: The URL of the HTML document.
|
|
||||||
"""
|
|
||||||
for base in document.findall(".//base"):
|
|
||||||
href = base.get("href")
|
|
||||||
if href is not None:
|
|
||||||
return href
|
|
||||||
return page_url
|
|
||||||
|
|
||||||
|
|
||||||
def _get_encoding_from_headers(headers):
|
|
||||||
"""Determine if we have any encoding information in our headers.
|
|
||||||
"""
|
|
||||||
if headers and "Content-Type" in headers:
|
|
||||||
content_type, params = cgi.parse_header(headers["Content-Type"])
|
|
||||||
if "charset" in params:
|
|
||||||
return params['charset']
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
_CLEAN_LINK_RE = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
|
|
||||||
|
|
||||||
|
|
||||||
def _clean_link(url):
|
|
||||||
# type: (str) -> str
|
|
||||||
"""Makes sure a link is fully encoded. That is, if a ' ' shows up in
|
|
||||||
the link, it will be rewritten to %20 (while not over-quoting
|
|
||||||
% or other characters)."""
|
|
||||||
return _CLEAN_LINK_RE.sub(lambda match: '%%%2x' % ord(match.group(0)), url)
|
|
||||||
|
|
||||||
|
|
||||||
class HTMLPage(object):
|
|
||||||
"""Represents one page, along with its URL"""
|
|
||||||
|
|
||||||
def __init__(self, content, url, headers=None):
|
|
||||||
# type: (bytes, str, MutableMapping[str, str]) -> None
|
|
||||||
self.content = content
|
|
||||||
self.url = url
|
|
||||||
self.headers = headers
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return redact_password_from_url(self.url)
|
|
||||||
|
|
||||||
def iter_links(self):
|
|
||||||
# type: () -> Iterable[Link]
|
|
||||||
"""Yields all links in the page"""
|
|
||||||
document = html5lib.parse(
|
|
||||||
self.content,
|
|
||||||
transport_encoding=_get_encoding_from_headers(self.headers),
|
|
||||||
namespaceHTMLElements=False,
|
|
||||||
)
|
|
||||||
base_url = _determine_base_url(document, self.url)
|
|
||||||
for anchor in document.findall(".//a"):
|
|
||||||
if anchor.get("href"):
|
|
||||||
href = anchor.get("href")
|
|
||||||
url = _clean_link(urllib_parse.urljoin(base_url, href))
|
|
||||||
pyrequire = anchor.get('data-requires-python')
|
|
||||||
pyrequire = unescape(pyrequire) if pyrequire else None
|
|
||||||
yield Link(url, self.url, requires_python=pyrequire)
|
|
||||||
|
|
||||||
|
|
||||||
Search = namedtuple('Search', 'supplied canonical formats')
|
|
||||||
"""Capture key aspects of a search.
|
|
||||||
|
|
||||||
:attribute supplied: The user supplied package.
|
|
||||||
:attribute canonical: The canonical package name.
|
|
||||||
:attribute formats: The formats allowed for this package. Should be a set
|
|
||||||
with 'binary' or 'source' or both in it.
|
|
||||||
"""
|
|
@ -1,211 +0,0 @@
|
|||||||
"""Locations where we look for configs, install stuff, etc"""
|
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import os
|
|
||||||
import os.path
|
|
||||||
import platform
|
|
||||||
import site
|
|
||||||
import sys
|
|
||||||
import sysconfig
|
|
||||||
from distutils import sysconfig as distutils_sysconfig
|
|
||||||
from distutils.command.install import SCHEME_KEYS # type: ignore
|
|
||||||
|
|
||||||
from pip._internal.utils import appdirs
|
|
||||||
from pip._internal.utils.compat import WINDOWS, expanduser
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import Any, Union, Dict, List, Optional # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
# Application Directories
|
|
||||||
USER_CACHE_DIR = appdirs.user_cache_dir("pip")
|
|
||||||
|
|
||||||
|
|
||||||
DELETE_MARKER_MESSAGE = '''\
|
|
||||||
This file is placed here by pip to indicate the source was put
|
|
||||||
here by pip.
|
|
||||||
|
|
||||||
Once this package is successfully installed this source code will be
|
|
||||||
deleted (unless you remove this file).
|
|
||||||
'''
|
|
||||||
PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
|
|
||||||
|
|
||||||
|
|
||||||
def write_delete_marker_file(directory):
|
|
||||||
# type: (str) -> None
|
|
||||||
"""
|
|
||||||
Write the pip delete marker file into this directory.
|
|
||||||
"""
|
|
||||||
filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
|
|
||||||
with open(filepath, 'w') as marker_fp:
|
|
||||||
marker_fp.write(DELETE_MARKER_MESSAGE)
|
|
||||||
|
|
||||||
|
|
||||||
def running_under_virtualenv():
|
|
||||||
# type: () -> bool
|
|
||||||
"""
|
|
||||||
Return True if we're running inside a virtualenv, False otherwise.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if hasattr(sys, 'real_prefix'):
|
|
||||||
return True
|
|
||||||
elif sys.prefix != getattr(sys, "base_prefix", sys.prefix):
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def virtualenv_no_global():
|
|
||||||
# type: () -> bool
|
|
||||||
"""
|
|
||||||
Return True if in a venv and no system site packages.
|
|
||||||
"""
|
|
||||||
# this mirrors the logic in virtualenv.py for locating the
|
|
||||||
# no-global-site-packages.txt file
|
|
||||||
site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
|
|
||||||
no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
|
|
||||||
if running_under_virtualenv() and os.path.isfile(no_global_file):
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
if running_under_virtualenv():
|
|
||||||
src_prefix = os.path.join(sys.prefix, 'src')
|
|
||||||
else:
|
|
||||||
# FIXME: keep src in cwd for now (it is not a temporary folder)
|
|
||||||
try:
|
|
||||||
src_prefix = os.path.join(os.getcwd(), 'src')
|
|
||||||
except OSError:
|
|
||||||
# In case the current working directory has been renamed or deleted
|
|
||||||
sys.exit(
|
|
||||||
"The folder you are executing pip from can no longer be found."
|
|
||||||
)
|
|
||||||
|
|
||||||
# under macOS + virtualenv sys.prefix is not properly resolved
|
|
||||||
# it is something like /path/to/python/bin/..
|
|
||||||
# Note: using realpath due to tmp dirs on OSX being symlinks
|
|
||||||
src_prefix = os.path.abspath(src_prefix)
|
|
||||||
|
|
||||||
# FIXME doesn't account for venv linked to global site-packages
|
|
||||||
|
|
||||||
site_packages = sysconfig.get_path("purelib") # type: Optional[str]
|
|
||||||
|
|
||||||
# This is because of a bug in PyPy's sysconfig module, see
|
|
||||||
# https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths
|
|
||||||
# for more information.
|
|
||||||
if platform.python_implementation().lower() == "pypy":
|
|
||||||
site_packages = distutils_sysconfig.get_python_lib()
|
|
||||||
try:
|
|
||||||
# Use getusersitepackages if this is present, as it ensures that the
|
|
||||||
# value is initialised properly.
|
|
||||||
user_site = site.getusersitepackages()
|
|
||||||
except AttributeError:
|
|
||||||
user_site = site.USER_SITE
|
|
||||||
user_dir = expanduser('~')
|
|
||||||
if WINDOWS:
|
|
||||||
bin_py = os.path.join(sys.prefix, 'Scripts')
|
|
||||||
bin_user = os.path.join(user_site, 'Scripts')
|
|
||||||
# buildout uses 'bin' on Windows too?
|
|
||||||
if not os.path.exists(bin_py):
|
|
||||||
bin_py = os.path.join(sys.prefix, 'bin')
|
|
||||||
bin_user = os.path.join(user_site, 'bin')
|
|
||||||
|
|
||||||
config_basename = 'pip.ini'
|
|
||||||
|
|
||||||
legacy_storage_dir = os.path.join(user_dir, 'pip')
|
|
||||||
legacy_config_file = os.path.join(
|
|
||||||
legacy_storage_dir,
|
|
||||||
config_basename,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
bin_py = os.path.join(sys.prefix, 'bin')
|
|
||||||
bin_user = os.path.join(user_site, 'bin')
|
|
||||||
|
|
||||||
config_basename = 'pip.conf'
|
|
||||||
|
|
||||||
legacy_storage_dir = os.path.join(user_dir, '.pip')
|
|
||||||
legacy_config_file = os.path.join(
|
|
||||||
legacy_storage_dir,
|
|
||||||
config_basename,
|
|
||||||
)
|
|
||||||
# Forcing to use /usr/local/bin for standard macOS framework installs
|
|
||||||
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
|
|
||||||
if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
|
|
||||||
bin_py = '/usr/local/bin'
|
|
||||||
|
|
||||||
site_config_files = [
|
|
||||||
os.path.join(path, config_basename)
|
|
||||||
for path in appdirs.site_config_dirs('pip')
|
|
||||||
]
|
|
||||||
|
|
||||||
venv_config_file = os.path.join(sys.prefix, config_basename)
|
|
||||||
new_config_file = os.path.join(appdirs.user_config_dir("pip"), config_basename)
|
|
||||||
|
|
||||||
|
|
||||||
def distutils_scheme(dist_name, user=False, home=None, root=None,
|
|
||||||
isolated=False, prefix=None):
|
|
||||||
# type:(str, bool, str, str, bool, str) -> dict
|
|
||||||
"""
|
|
||||||
Return a distutils install scheme
|
|
||||||
"""
|
|
||||||
from distutils.dist import Distribution
|
|
||||||
|
|
||||||
scheme = {}
|
|
||||||
|
|
||||||
if isolated:
|
|
||||||
extra_dist_args = {"script_args": ["--no-user-cfg"]}
|
|
||||||
else:
|
|
||||||
extra_dist_args = {}
|
|
||||||
dist_args = {'name': dist_name} # type: Dict[str, Union[str, List[str]]]
|
|
||||||
dist_args.update(extra_dist_args)
|
|
||||||
|
|
||||||
d = Distribution(dist_args)
|
|
||||||
# Ignoring, typeshed issue reported python/typeshed/issues/2567
|
|
||||||
d.parse_config_files()
|
|
||||||
# NOTE: Ignoring type since mypy can't find attributes on 'Command'
|
|
||||||
i = d.get_command_obj('install', create=True) # type: Any
|
|
||||||
assert i is not None
|
|
||||||
# NOTE: setting user or home has the side-effect of creating the home dir
|
|
||||||
# or user base for installations during finalize_options()
|
|
||||||
# ideally, we'd prefer a scheme class that has no side-effects.
|
|
||||||
assert not (user and prefix), "user={} prefix={}".format(user, prefix)
|
|
||||||
i.user = user or i.user
|
|
||||||
if user:
|
|
||||||
i.prefix = ""
|
|
||||||
i.prefix = prefix or i.prefix
|
|
||||||
i.home = home or i.home
|
|
||||||
i.root = root or i.root
|
|
||||||
i.finalize_options()
|
|
||||||
for key in SCHEME_KEYS:
|
|
||||||
scheme[key] = getattr(i, 'install_' + key)
|
|
||||||
|
|
||||||
# install_lib specified in setup.cfg should install *everything*
|
|
||||||
# into there (i.e. it takes precedence over both purelib and
|
|
||||||
# platlib). Note, i.install_lib is *always* set after
|
|
||||||
# finalize_options(); we only want to override here if the user
|
|
||||||
# has explicitly requested it hence going back to the config
|
|
||||||
|
|
||||||
# Ignoring, typeshed issue reported python/typeshed/issues/2567
|
|
||||||
if 'install_lib' in d.get_option_dict('install'): # type: ignore
|
|
||||||
scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
|
|
||||||
|
|
||||||
if running_under_virtualenv():
|
|
||||||
scheme['headers'] = os.path.join(
|
|
||||||
sys.prefix,
|
|
||||||
'include',
|
|
||||||
'site',
|
|
||||||
'python' + sys.version[:3],
|
|
||||||
dist_name,
|
|
||||||
)
|
|
||||||
|
|
||||||
if root is not None:
|
|
||||||
path_no_drive = os.path.splitdrive(
|
|
||||||
os.path.abspath(scheme["headers"]))[1]
|
|
||||||
scheme["headers"] = os.path.join(
|
|
||||||
root,
|
|
||||||
path_no_drive[1:],
|
|
||||||
)
|
|
||||||
|
|
||||||
return scheme
|
|
@ -1,2 +0,0 @@
|
|||||||
"""A package that contains models that represent entities.
|
|
||||||
"""
|
|
@ -1,31 +0,0 @@
|
|||||||
from pip._vendor.packaging.version import parse as parse_version
|
|
||||||
|
|
||||||
from pip._internal.utils.models import KeyBasedCompareMixin
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from pip._vendor.packaging.version import _BaseVersion # noqa: F401
|
|
||||||
from pip._internal.models.link import Link # noqa: F401
|
|
||||||
from typing import Any, Union # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
class InstallationCandidate(KeyBasedCompareMixin):
|
|
||||||
"""Represents a potential "candidate" for installation.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, project, version, location):
|
|
||||||
# type: (Any, str, Link) -> None
|
|
||||||
self.project = project
|
|
||||||
self.version = parse_version(version) # type: _BaseVersion
|
|
||||||
self.location = location
|
|
||||||
|
|
||||||
super(InstallationCandidate, self).__init__(
|
|
||||||
key=(self.project, self.version, self.location),
|
|
||||||
defining_class=InstallationCandidate
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
# type: () -> str
|
|
||||||
return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
|
|
||||||
self.project, self.version, self.location,
|
|
||||||
)
|
|
@ -1,73 +0,0 @@
|
|||||||
from pip._vendor.packaging.utils import canonicalize_name
|
|
||||||
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import Optional, Set, FrozenSet # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
class FormatControl(object):
|
|
||||||
"""Helper for managing formats from which a package can be installed.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, no_binary=None, only_binary=None):
|
|
||||||
# type: (Optional[Set], Optional[Set]) -> None
|
|
||||||
if no_binary is None:
|
|
||||||
no_binary = set()
|
|
||||||
if only_binary is None:
|
|
||||||
only_binary = set()
|
|
||||||
|
|
||||||
self.no_binary = no_binary
|
|
||||||
self.only_binary = only_binary
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return self.__dict__ == other.__dict__
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self.__eq__(other)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "{}({}, {})".format(
|
|
||||||
self.__class__.__name__,
|
|
||||||
self.no_binary,
|
|
||||||
self.only_binary
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def handle_mutual_excludes(value, target, other):
|
|
||||||
# type: (str, Optional[Set], Optional[Set]) -> None
|
|
||||||
new = value.split(',')
|
|
||||||
while ':all:' in new:
|
|
||||||
other.clear()
|
|
||||||
target.clear()
|
|
||||||
target.add(':all:')
|
|
||||||
del new[:new.index(':all:') + 1]
|
|
||||||
# Without a none, we want to discard everything as :all: covers it
|
|
||||||
if ':none:' not in new:
|
|
||||||
return
|
|
||||||
for name in new:
|
|
||||||
if name == ':none:':
|
|
||||||
target.clear()
|
|
||||||
continue
|
|
||||||
name = canonicalize_name(name)
|
|
||||||
other.discard(name)
|
|
||||||
target.add(name)
|
|
||||||
|
|
||||||
def get_allowed_formats(self, canonical_name):
|
|
||||||
# type: (str) -> FrozenSet
|
|
||||||
result = {"binary", "source"}
|
|
||||||
if canonical_name in self.only_binary:
|
|
||||||
result.discard('source')
|
|
||||||
elif canonical_name in self.no_binary:
|
|
||||||
result.discard('binary')
|
|
||||||
elif ':all:' in self.only_binary:
|
|
||||||
result.discard('source')
|
|
||||||
elif ':all:' in self.no_binary:
|
|
||||||
result.discard('binary')
|
|
||||||
return frozenset(result)
|
|
||||||
|
|
||||||
def disallow_binaries(self):
|
|
||||||
# type: () -> None
|
|
||||||
self.handle_mutual_excludes(
|
|
||||||
':all:', self.no_binary, self.only_binary,
|
|
||||||
)
|
|
@ -1,31 +0,0 @@
|
|||||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
|
||||||
|
|
||||||
|
|
||||||
class PackageIndex(object):
|
|
||||||
"""Represents a Package Index and provides easier access to endpoints
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, url, file_storage_domain):
|
|
||||||
# type: (str, str) -> None
|
|
||||||
super(PackageIndex, self).__init__()
|
|
||||||
self.url = url
|
|
||||||
self.netloc = urllib_parse.urlsplit(url).netloc
|
|
||||||
self.simple_url = self._url_for_path('simple')
|
|
||||||
self.pypi_url = self._url_for_path('pypi')
|
|
||||||
|
|
||||||
# This is part of a temporary hack used to block installs of PyPI
|
|
||||||
# packages which depend on external urls only necessary until PyPI can
|
|
||||||
# block such packages themselves
|
|
||||||
self.file_storage_domain = file_storage_domain
|
|
||||||
|
|
||||||
def _url_for_path(self, path):
|
|
||||||
# type: (str) -> str
|
|
||||||
return urllib_parse.urljoin(self.url, path)
|
|
||||||
|
|
||||||
|
|
||||||
PyPI = PackageIndex(
|
|
||||||
'https://pypi.org/', file_storage_domain='files.pythonhosted.org'
|
|
||||||
)
|
|
||||||
TestPyPI = PackageIndex(
|
|
||||||
'https://test.pypi.org/', file_storage_domain='test-files.pythonhosted.org'
|
|
||||||
)
|
|
@ -1,163 +0,0 @@
|
|||||||
import posixpath
|
|
||||||
import re
|
|
||||||
|
|
||||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
|
||||||
|
|
||||||
from pip._internal.download import path_to_url
|
|
||||||
from pip._internal.utils.misc import (
|
|
||||||
WHEEL_EXTENSION, redact_password_from_url, splitext,
|
|
||||||
)
|
|
||||||
from pip._internal.utils.models import KeyBasedCompareMixin
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import Optional, Tuple, Union, Text # noqa: F401
|
|
||||||
from pip._internal.index import HTMLPage # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
class Link(KeyBasedCompareMixin):
|
|
||||||
"""Represents a parsed link from a Package Index's simple URL
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, url, comes_from=None, requires_python=None):
|
|
||||||
# type: (str, Optional[Union[str, HTMLPage]], Optional[str]) -> None
|
|
||||||
"""
|
|
||||||
url:
|
|
||||||
url of the resource pointed to (href of the link)
|
|
||||||
comes_from:
|
|
||||||
instance of HTMLPage where the link was found, or string.
|
|
||||||
requires_python:
|
|
||||||
String containing the `Requires-Python` metadata field, specified
|
|
||||||
in PEP 345. This may be specified by a data-requires-python
|
|
||||||
attribute in the HTML link tag, as described in PEP 503.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# url can be a UNC windows share
|
|
||||||
if url.startswith('\\\\'):
|
|
||||||
url = path_to_url(url)
|
|
||||||
|
|
||||||
self.url = url
|
|
||||||
self.comes_from = comes_from
|
|
||||||
self.requires_python = requires_python if requires_python else None
|
|
||||||
|
|
||||||
super(Link, self).__init__(
|
|
||||||
key=(self.url),
|
|
||||||
defining_class=Link
|
|
||||||
)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
if self.requires_python:
|
|
||||||
rp = ' (requires-python:%s)' % self.requires_python
|
|
||||||
else:
|
|
||||||
rp = ''
|
|
||||||
if self.comes_from:
|
|
||||||
return '%s (from %s)%s' % (redact_password_from_url(self.url),
|
|
||||||
self.comes_from, rp)
|
|
||||||
else:
|
|
||||||
return redact_password_from_url(str(self.url))
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '<Link %s>' % self
|
|
||||||
|
|
||||||
@property
|
|
||||||
def filename(self):
|
|
||||||
# type: () -> str
|
|
||||||
_, netloc, path, _, _ = urllib_parse.urlsplit(self.url)
|
|
||||||
name = posixpath.basename(path.rstrip('/')) or netloc
|
|
||||||
name = urllib_parse.unquote(name)
|
|
||||||
assert name, ('URL %r produced no filename' % self.url)
|
|
||||||
return name
|
|
||||||
|
|
||||||
@property
|
|
||||||
def scheme(self):
|
|
||||||
# type: () -> str
|
|
||||||
return urllib_parse.urlsplit(self.url)[0]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def netloc(self):
|
|
||||||
# type: () -> str
|
|
||||||
return urllib_parse.urlsplit(self.url)[1]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path(self):
|
|
||||||
# type: () -> str
|
|
||||||
return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2])
|
|
||||||
|
|
||||||
def splitext(self):
|
|
||||||
# type: () -> Tuple[str, str]
|
|
||||||
return splitext(posixpath.basename(self.path.rstrip('/')))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def ext(self):
|
|
||||||
# type: () -> str
|
|
||||||
return self.splitext()[1]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def url_without_fragment(self):
|
|
||||||
# type: () -> str
|
|
||||||
scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url)
|
|
||||||
return urllib_parse.urlunsplit((scheme, netloc, path, query, None))
|
|
||||||
|
|
||||||
_egg_fragment_re = re.compile(r'[#&]egg=([^&]*)')
|
|
||||||
|
|
||||||
@property
|
|
||||||
def egg_fragment(self):
|
|
||||||
# type: () -> Optional[str]
|
|
||||||
match = self._egg_fragment_re.search(self.url)
|
|
||||||
if not match:
|
|
||||||
return None
|
|
||||||
return match.group(1)
|
|
||||||
|
|
||||||
_subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)')
|
|
||||||
|
|
||||||
@property
|
|
||||||
def subdirectory_fragment(self):
|
|
||||||
# type: () -> Optional[str]
|
|
||||||
match = self._subdirectory_fragment_re.search(self.url)
|
|
||||||
if not match:
|
|
||||||
return None
|
|
||||||
return match.group(1)
|
|
||||||
|
|
||||||
_hash_re = re.compile(
|
|
||||||
r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)'
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def hash(self):
|
|
||||||
# type: () -> Optional[str]
|
|
||||||
match = self._hash_re.search(self.url)
|
|
||||||
if match:
|
|
||||||
return match.group(2)
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def hash_name(self):
|
|
||||||
# type: () -> Optional[str]
|
|
||||||
match = self._hash_re.search(self.url)
|
|
||||||
if match:
|
|
||||||
return match.group(1)
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def show_url(self):
|
|
||||||
# type: () -> Optional[str]
|
|
||||||
return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_wheel(self):
|
|
||||||
# type: () -> bool
|
|
||||||
return self.ext == WHEEL_EXTENSION
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_artifact(self):
|
|
||||||
# type: () -> bool
|
|
||||||
"""
|
|
||||||
Determines if this points to an actual artifact (e.g. a tarball) or if
|
|
||||||
it points to an "abstract" thing like a path or a VCS location.
|
|
||||||
"""
|
|
||||||
from pip._internal.vcs import vcs
|
|
||||||
|
|
||||||
if self.scheme in vcs.all_schemes:
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
@ -1,155 +0,0 @@
|
|||||||
"""Validation of dependencies of packages
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from collections import namedtuple
|
|
||||||
|
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
|
||||||
from pip._vendor.pkg_resources import RequirementParseError
|
|
||||||
|
|
||||||
from pip._internal.operations.prepare import make_abstract_dist
|
|
||||||
from pip._internal.utils.misc import get_installed_distributions
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from pip._internal.req.req_install import InstallRequirement # noqa: F401
|
|
||||||
from typing import ( # noqa: F401
|
|
||||||
Any, Callable, Dict, Optional, Set, Tuple, List
|
|
||||||
)
|
|
||||||
|
|
||||||
# Shorthands
|
|
||||||
PackageSet = Dict[str, 'PackageDetails']
|
|
||||||
Missing = Tuple[str, Any]
|
|
||||||
Conflicting = Tuple[str, str, Any]
|
|
||||||
|
|
||||||
MissingDict = Dict[str, List[Missing]]
|
|
||||||
ConflictingDict = Dict[str, List[Conflicting]]
|
|
||||||
CheckResult = Tuple[MissingDict, ConflictingDict]
|
|
||||||
|
|
||||||
PackageDetails = namedtuple('PackageDetails', ['version', 'requires'])
|
|
||||||
|
|
||||||
|
|
||||||
def create_package_set_from_installed(**kwargs):
|
|
||||||
# type: (**Any) -> Tuple[PackageSet, bool]
|
|
||||||
"""Converts a list of distributions into a PackageSet.
|
|
||||||
"""
|
|
||||||
# Default to using all packages installed on the system
|
|
||||||
if kwargs == {}:
|
|
||||||
kwargs = {"local_only": False, "skip": ()}
|
|
||||||
|
|
||||||
package_set = {}
|
|
||||||
problems = False
|
|
||||||
for dist in get_installed_distributions(**kwargs):
|
|
||||||
name = canonicalize_name(dist.project_name)
|
|
||||||
try:
|
|
||||||
package_set[name] = PackageDetails(dist.version, dist.requires())
|
|
||||||
except RequirementParseError as e:
|
|
||||||
# Don't crash on broken metadata
|
|
||||||
logging.warning("Error parsing requirements for %s: %s", name, e)
|
|
||||||
problems = True
|
|
||||||
return package_set, problems
|
|
||||||
|
|
||||||
|
|
||||||
def check_package_set(package_set, should_ignore=None):
|
|
||||||
# type: (PackageSet, Optional[Callable[[str], bool]]) -> CheckResult
|
|
||||||
"""Check if a package set is consistent
|
|
||||||
|
|
||||||
If should_ignore is passed, it should be a callable that takes a
|
|
||||||
package name and returns a boolean.
|
|
||||||
"""
|
|
||||||
if should_ignore is None:
|
|
||||||
def should_ignore(name):
|
|
||||||
return False
|
|
||||||
|
|
||||||
missing = dict()
|
|
||||||
conflicting = dict()
|
|
||||||
|
|
||||||
for package_name in package_set:
|
|
||||||
# Info about dependencies of package_name
|
|
||||||
missing_deps = set() # type: Set[Missing]
|
|
||||||
conflicting_deps = set() # type: Set[Conflicting]
|
|
||||||
|
|
||||||
if should_ignore(package_name):
|
|
||||||
continue
|
|
||||||
|
|
||||||
for req in package_set[package_name].requires:
|
|
||||||
name = canonicalize_name(req.project_name) # type: str
|
|
||||||
|
|
||||||
# Check if it's missing
|
|
||||||
if name not in package_set:
|
|
||||||
missed = True
|
|
||||||
if req.marker is not None:
|
|
||||||
missed = req.marker.evaluate()
|
|
||||||
if missed:
|
|
||||||
missing_deps.add((name, req))
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Check if there's a conflict
|
|
||||||
version = package_set[name].version # type: str
|
|
||||||
if not req.specifier.contains(version, prereleases=True):
|
|
||||||
conflicting_deps.add((name, version, req))
|
|
||||||
|
|
||||||
if missing_deps:
|
|
||||||
missing[package_name] = sorted(missing_deps, key=str)
|
|
||||||
if conflicting_deps:
|
|
||||||
conflicting[package_name] = sorted(conflicting_deps, key=str)
|
|
||||||
|
|
||||||
return missing, conflicting
|
|
||||||
|
|
||||||
|
|
||||||
def check_install_conflicts(to_install):
|
|
||||||
# type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult]
|
|
||||||
"""For checking if the dependency graph would be consistent after \
|
|
||||||
installing given requirements
|
|
||||||
"""
|
|
||||||
# Start from the current state
|
|
||||||
package_set, _ = create_package_set_from_installed()
|
|
||||||
# Install packages
|
|
||||||
would_be_installed = _simulate_installation_of(to_install, package_set)
|
|
||||||
|
|
||||||
# Only warn about directly-dependent packages; create a whitelist of them
|
|
||||||
whitelist = _create_whitelist(would_be_installed, package_set)
|
|
||||||
|
|
||||||
return (
|
|
||||||
package_set,
|
|
||||||
check_package_set(
|
|
||||||
package_set, should_ignore=lambda name: name not in whitelist
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _simulate_installation_of(to_install, package_set):
|
|
||||||
# type: (List[InstallRequirement], PackageSet) -> Set[str]
|
|
||||||
"""Computes the version of packages after installing to_install.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Keep track of packages that were installed
|
|
||||||
installed = set()
|
|
||||||
|
|
||||||
# Modify it as installing requirement_set would (assuming no errors)
|
|
||||||
for inst_req in to_install:
|
|
||||||
dist = make_abstract_dist(inst_req).dist()
|
|
||||||
name = canonicalize_name(dist.key)
|
|
||||||
package_set[name] = PackageDetails(dist.version, dist.requires())
|
|
||||||
|
|
||||||
installed.add(name)
|
|
||||||
|
|
||||||
return installed
|
|
||||||
|
|
||||||
|
|
||||||
def _create_whitelist(would_be_installed, package_set):
|
|
||||||
# type: (Set[str], PackageSet) -> Set[str]
|
|
||||||
packages_affected = set(would_be_installed)
|
|
||||||
|
|
||||||
for package_name in package_set:
|
|
||||||
if package_name in packages_affected:
|
|
||||||
continue
|
|
||||||
|
|
||||||
for req in package_set[package_name].requires:
|
|
||||||
if canonicalize_name(req.name) in packages_affected:
|
|
||||||
packages_affected.add(package_name)
|
|
||||||
break
|
|
||||||
|
|
||||||
return packages_affected
|
|
@ -1,247 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import collections
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
|
|
||||||
from pip._vendor import six
|
|
||||||
from pip._vendor.packaging.utils import canonicalize_name
|
|
||||||
from pip._vendor.pkg_resources import RequirementParseError
|
|
||||||
|
|
||||||
from pip._internal.exceptions import BadCommand, InstallationError
|
|
||||||
from pip._internal.req.constructors import (
|
|
||||||
install_req_from_editable, install_req_from_line,
|
|
||||||
)
|
|
||||||
from pip._internal.req.req_file import COMMENT_RE
|
|
||||||
from pip._internal.utils.misc import (
|
|
||||||
dist_is_editable, get_installed_distributions,
|
|
||||||
)
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import ( # noqa: F401
|
|
||||||
Iterator, Optional, List, Container, Set, Dict, Tuple, Iterable, Union
|
|
||||||
)
|
|
||||||
from pip._internal.cache import WheelCache # noqa: F401
|
|
||||||
from pip._vendor.pkg_resources import ( # noqa: F401
|
|
||||||
Distribution, Requirement
|
|
||||||
)
|
|
||||||
|
|
||||||
RequirementInfo = Tuple[Optional[Union[str, Requirement]], bool, List[str]]
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def freeze(
|
|
||||||
requirement=None, # type: Optional[List[str]]
|
|
||||||
find_links=None, # type: Optional[List[str]]
|
|
||||||
local_only=None, # type: Optional[bool]
|
|
||||||
user_only=None, # type: Optional[bool]
|
|
||||||
skip_regex=None, # type: Optional[str]
|
|
||||||
isolated=False, # type: bool
|
|
||||||
wheel_cache=None, # type: Optional[WheelCache]
|
|
||||||
exclude_editable=False, # type: bool
|
|
||||||
skip=() # type: Container[str]
|
|
||||||
):
|
|
||||||
# type: (...) -> Iterator[str]
|
|
||||||
find_links = find_links or []
|
|
||||||
skip_match = None
|
|
||||||
|
|
||||||
if skip_regex:
|
|
||||||
skip_match = re.compile(skip_regex).search
|
|
||||||
|
|
||||||
for link in find_links:
|
|
||||||
yield '-f %s' % link
|
|
||||||
installations = {} # type: Dict[str, FrozenRequirement]
|
|
||||||
for dist in get_installed_distributions(local_only=local_only,
|
|
||||||
skip=(),
|
|
||||||
user_only=user_only):
|
|
||||||
try:
|
|
||||||
req = FrozenRequirement.from_dist(dist)
|
|
||||||
except RequirementParseError:
|
|
||||||
logger.warning(
|
|
||||||
"Could not parse requirement: %s",
|
|
||||||
dist.project_name
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
if exclude_editable and req.editable:
|
|
||||||
continue
|
|
||||||
installations[req.name] = req
|
|
||||||
|
|
||||||
if requirement:
|
|
||||||
# the options that don't get turned into an InstallRequirement
|
|
||||||
# should only be emitted once, even if the same option is in multiple
|
|
||||||
# requirements files, so we need to keep track of what has been emitted
|
|
||||||
# so that we don't emit it again if it's seen again
|
|
||||||
emitted_options = set() # type: Set[str]
|
|
||||||
# keep track of which files a requirement is in so that we can
|
|
||||||
# give an accurate warning if a requirement appears multiple times.
|
|
||||||
req_files = collections.defaultdict(list) # type: Dict[str, List[str]]
|
|
||||||
for req_file_path in requirement:
|
|
||||||
with open(req_file_path) as req_file:
|
|
||||||
for line in req_file:
|
|
||||||
if (not line.strip() or
|
|
||||||
line.strip().startswith('#') or
|
|
||||||
(skip_match and skip_match(line)) or
|
|
||||||
line.startswith((
|
|
||||||
'-r', '--requirement',
|
|
||||||
'-Z', '--always-unzip',
|
|
||||||
'-f', '--find-links',
|
|
||||||
'-i', '--index-url',
|
|
||||||
'--pre',
|
|
||||||
'--trusted-host',
|
|
||||||
'--process-dependency-links',
|
|
||||||
'--extra-index-url'))):
|
|
||||||
line = line.rstrip()
|
|
||||||
if line not in emitted_options:
|
|
||||||
emitted_options.add(line)
|
|
||||||
yield line
|
|
||||||
continue
|
|
||||||
|
|
||||||
if line.startswith('-e') or line.startswith('--editable'):
|
|
||||||
if line.startswith('-e'):
|
|
||||||
line = line[2:].strip()
|
|
||||||
else:
|
|
||||||
line = line[len('--editable'):].strip().lstrip('=')
|
|
||||||
line_req = install_req_from_editable(
|
|
||||||
line,
|
|
||||||
isolated=isolated,
|
|
||||||
wheel_cache=wheel_cache,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
line_req = install_req_from_line(
|
|
||||||
COMMENT_RE.sub('', line).strip(),
|
|
||||||
isolated=isolated,
|
|
||||||
wheel_cache=wheel_cache,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not line_req.name:
|
|
||||||
logger.info(
|
|
||||||
"Skipping line in requirement file [%s] because "
|
|
||||||
"it's not clear what it would install: %s",
|
|
||||||
req_file_path, line.strip(),
|
|
||||||
)
|
|
||||||
logger.info(
|
|
||||||
" (add #egg=PackageName to the URL to avoid"
|
|
||||||
" this warning)"
|
|
||||||
)
|
|
||||||
elif line_req.name not in installations:
|
|
||||||
# either it's not installed, or it is installed
|
|
||||||
# but has been processed already
|
|
||||||
if not req_files[line_req.name]:
|
|
||||||
logger.warning(
|
|
||||||
"Requirement file [%s] contains %s, but "
|
|
||||||
"package %r is not installed",
|
|
||||||
req_file_path,
|
|
||||||
COMMENT_RE.sub('', line).strip(), line_req.name
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
req_files[line_req.name].append(req_file_path)
|
|
||||||
else:
|
|
||||||
yield str(installations[line_req.name]).rstrip()
|
|
||||||
del installations[line_req.name]
|
|
||||||
req_files[line_req.name].append(req_file_path)
|
|
||||||
|
|
||||||
# Warn about requirements that were included multiple times (in a
|
|
||||||
# single requirements file or in different requirements files).
|
|
||||||
for name, files in six.iteritems(req_files):
|
|
||||||
if len(files) > 1:
|
|
||||||
logger.warning("Requirement %s included multiple times [%s]",
|
|
||||||
name, ', '.join(sorted(set(files))))
|
|
||||||
|
|
||||||
yield(
|
|
||||||
'## The following requirements were added by '
|
|
||||||
'pip freeze:'
|
|
||||||
)
|
|
||||||
for installation in sorted(
|
|
||||||
installations.values(), key=lambda x: x.name.lower()):
|
|
||||||
if canonicalize_name(installation.name) not in skip:
|
|
||||||
yield str(installation).rstrip()
|
|
||||||
|
|
||||||
|
|
||||||
def get_requirement_info(dist):
|
|
||||||
# type: (Distribution) -> RequirementInfo
|
|
||||||
"""
|
|
||||||
Compute and return values (req, editable, comments) for use in
|
|
||||||
FrozenRequirement.from_dist().
|
|
||||||
"""
|
|
||||||
if not dist_is_editable(dist):
|
|
||||||
return (None, False, [])
|
|
||||||
|
|
||||||
location = os.path.normcase(os.path.abspath(dist.location))
|
|
||||||
|
|
||||||
from pip._internal.vcs import vcs, RemoteNotFoundError
|
|
||||||
vc_type = vcs.get_backend_type(location)
|
|
||||||
|
|
||||||
if not vc_type:
|
|
||||||
req = dist.as_requirement()
|
|
||||||
logger.debug(
|
|
||||||
'No VCS found for editable requirement {!r} in: {!r}', req,
|
|
||||||
location,
|
|
||||||
)
|
|
||||||
comments = [
|
|
||||||
'# Editable install with no version control ({})'.format(req)
|
|
||||||
]
|
|
||||||
return (location, True, comments)
|
|
||||||
|
|
||||||
try:
|
|
||||||
req = vc_type.get_src_requirement(location, dist.project_name)
|
|
||||||
except RemoteNotFoundError:
|
|
||||||
req = dist.as_requirement()
|
|
||||||
comments = [
|
|
||||||
'# Editable {} install with no remote ({})'.format(
|
|
||||||
vc_type.__name__, req,
|
|
||||||
)
|
|
||||||
]
|
|
||||||
return (location, True, comments)
|
|
||||||
|
|
||||||
except BadCommand:
|
|
||||||
logger.warning(
|
|
||||||
'cannot determine version of editable source in %s '
|
|
||||||
'(%s command not found in path)',
|
|
||||||
location,
|
|
||||||
vc_type.name,
|
|
||||||
)
|
|
||||||
return (None, True, [])
|
|
||||||
|
|
||||||
except InstallationError as exc:
|
|
||||||
logger.warning(
|
|
||||||
"Error when trying to get requirement for VCS system %s, "
|
|
||||||
"falling back to uneditable format", exc
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
if req is not None:
|
|
||||||
return (req, True, [])
|
|
||||||
|
|
||||||
logger.warning(
|
|
||||||
'Could not determine repository location of %s', location
|
|
||||||
)
|
|
||||||
comments = ['## !! Could not determine repository location']
|
|
||||||
|
|
||||||
return (None, False, comments)
|
|
||||||
|
|
||||||
|
|
||||||
class FrozenRequirement(object):
|
|
||||||
def __init__(self, name, req, editable, comments=()):
|
|
||||||
# type: (str, Union[str, Requirement], bool, Iterable[str]) -> None
|
|
||||||
self.name = name
|
|
||||||
self.req = req
|
|
||||||
self.editable = editable
|
|
||||||
self.comments = comments
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_dist(cls, dist):
|
|
||||||
# type: (Distribution) -> FrozenRequirement
|
|
||||||
req, editable, comments = get_requirement_info(dist)
|
|
||||||
if req is None:
|
|
||||||
req = dist.as_requirement()
|
|
||||||
|
|
||||||
return cls(dist.project_name, req, editable, comments=comments)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
req = self.req
|
|
||||||
if self.editable:
|
|
||||||
req = '-e %s' % req
|
|
||||||
return '\n'.join(list(self.comments) + [str(req)]) + '\n'
|
|
@ -1,413 +0,0 @@
|
|||||||
"""Prepares a distribution for installation
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
from pip._vendor import pkg_resources, requests
|
|
||||||
|
|
||||||
from pip._internal.build_env import BuildEnvironment
|
|
||||||
from pip._internal.download import (
|
|
||||||
is_dir_url, is_file_url, is_vcs_url, unpack_url, url_to_path,
|
|
||||||
)
|
|
||||||
from pip._internal.exceptions import (
|
|
||||||
DirectoryUrlHashUnsupported, HashUnpinned, InstallationError,
|
|
||||||
PreviousBuildDirError, VcsHashUnsupported,
|
|
||||||
)
|
|
||||||
from pip._internal.utils.compat import expanduser
|
|
||||||
from pip._internal.utils.hashes import MissingHashes
|
|
||||||
from pip._internal.utils.logging import indent_log
|
|
||||||
from pip._internal.utils.misc import display_path, normalize_path
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
from pip._internal.vcs import vcs
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import Any, Optional # noqa: F401
|
|
||||||
from pip._internal.req.req_install import InstallRequirement # noqa: F401
|
|
||||||
from pip._internal.index import PackageFinder # noqa: F401
|
|
||||||
from pip._internal.download import PipSession # noqa: F401
|
|
||||||
from pip._internal.req.req_tracker import RequirementTracker # noqa: F401
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def make_abstract_dist(req):
|
|
||||||
# type: (InstallRequirement) -> DistAbstraction
|
|
||||||
"""Factory to make an abstract dist object.
|
|
||||||
|
|
||||||
Preconditions: Either an editable req with a source_dir, or satisfied_by or
|
|
||||||
a wheel link, or a non-editable req with a source_dir.
|
|
||||||
|
|
||||||
:return: A concrete DistAbstraction.
|
|
||||||
"""
|
|
||||||
if req.editable:
|
|
||||||
return IsSDist(req)
|
|
||||||
elif req.link and req.link.is_wheel:
|
|
||||||
return IsWheel(req)
|
|
||||||
else:
|
|
||||||
return IsSDist(req)
|
|
||||||
|
|
||||||
|
|
||||||
class DistAbstraction(object):
|
|
||||||
"""Abstracts out the wheel vs non-wheel Resolver.resolve() logic.
|
|
||||||
|
|
||||||
The requirements for anything installable are as follows:
|
|
||||||
- we must be able to determine the requirement name
|
|
||||||
(or we can't correctly handle the non-upgrade case).
|
|
||||||
- we must be able to generate a list of run-time dependencies
|
|
||||||
without installing any additional packages (or we would
|
|
||||||
have to either burn time by doing temporary isolated installs
|
|
||||||
or alternatively violate pips 'don't start installing unless
|
|
||||||
all requirements are available' rule - neither of which are
|
|
||||||
desirable).
|
|
||||||
- for packages with setup requirements, we must also be able
|
|
||||||
to determine their requirements without installing additional
|
|
||||||
packages (for the same reason as run-time dependencies)
|
|
||||||
- we must be able to create a Distribution object exposing the
|
|
||||||
above metadata.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, req):
|
|
||||||
# type: (InstallRequirement) -> None
|
|
||||||
self.req = req # type: InstallRequirement
|
|
||||||
|
|
||||||
def dist(self):
|
|
||||||
# type: () -> Any
|
|
||||||
"""Return a setuptools Dist object."""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def prep_for_dist(self, finder, build_isolation):
|
|
||||||
# type: (PackageFinder, bool) -> Any
|
|
||||||
"""Ensure that we can get a Dist for this requirement."""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
|
|
||||||
class IsWheel(DistAbstraction):
|
|
||||||
|
|
||||||
def dist(self):
|
|
||||||
# type: () -> pkg_resources.Distribution
|
|
||||||
return list(pkg_resources.find_distributions(
|
|
||||||
self.req.source_dir))[0]
|
|
||||||
|
|
||||||
def prep_for_dist(self, finder, build_isolation):
|
|
||||||
# type: (PackageFinder, bool) -> Any
|
|
||||||
# FIXME:https://github.com/pypa/pip/issues/1112
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class IsSDist(DistAbstraction):
|
|
||||||
|
|
||||||
def dist(self):
|
|
||||||
return self.req.get_dist()
|
|
||||||
|
|
||||||
def prep_for_dist(self, finder, build_isolation):
|
|
||||||
# type: (PackageFinder, bool) -> None
|
|
||||||
# Prepare for building. We need to:
|
|
||||||
# 1. Load pyproject.toml (if it exists)
|
|
||||||
# 2. Set up the build environment
|
|
||||||
|
|
||||||
self.req.load_pyproject_toml()
|
|
||||||
should_isolate = self.req.use_pep517 and build_isolation
|
|
||||||
|
|
||||||
def _raise_conflicts(conflicting_with, conflicting_reqs):
|
|
||||||
raise InstallationError(
|
|
||||||
"Some build dependencies for %s conflict with %s: %s." % (
|
|
||||||
self.req, conflicting_with, ', '.join(
|
|
||||||
'%s is incompatible with %s' % (installed, wanted)
|
|
||||||
for installed, wanted in sorted(conflicting))))
|
|
||||||
|
|
||||||
if should_isolate:
|
|
||||||
# Isolate in a BuildEnvironment and install the build-time
|
|
||||||
# requirements.
|
|
||||||
self.req.build_env = BuildEnvironment()
|
|
||||||
self.req.build_env.install_requirements(
|
|
||||||
finder, self.req.pyproject_requires, 'overlay',
|
|
||||||
"Installing build dependencies"
|
|
||||||
)
|
|
||||||
conflicting, missing = self.req.build_env.check_requirements(
|
|
||||||
self.req.requirements_to_check
|
|
||||||
)
|
|
||||||
if conflicting:
|
|
||||||
_raise_conflicts("PEP 517/518 supported requirements",
|
|
||||||
conflicting)
|
|
||||||
if missing:
|
|
||||||
logger.warning(
|
|
||||||
"Missing build requirements in pyproject.toml for %s.",
|
|
||||||
self.req,
|
|
||||||
)
|
|
||||||
logger.warning(
|
|
||||||
"The project does not specify a build backend, and "
|
|
||||||
"pip cannot fall back to setuptools without %s.",
|
|
||||||
" and ".join(map(repr, sorted(missing)))
|
|
||||||
)
|
|
||||||
# Install any extra build dependencies that the backend requests.
|
|
||||||
# This must be done in a second pass, as the pyproject.toml
|
|
||||||
# dependencies must be installed before we can call the backend.
|
|
||||||
with self.req.build_env:
|
|
||||||
# We need to have the env active when calling the hook.
|
|
||||||
self.req.spin_message = "Getting requirements to build wheel"
|
|
||||||
reqs = self.req.pep517_backend.get_requires_for_build_wheel()
|
|
||||||
conflicting, missing = self.req.build_env.check_requirements(reqs)
|
|
||||||
if conflicting:
|
|
||||||
_raise_conflicts("the backend dependencies", conflicting)
|
|
||||||
self.req.build_env.install_requirements(
|
|
||||||
finder, missing, 'normal',
|
|
||||||
"Installing backend dependencies"
|
|
||||||
)
|
|
||||||
|
|
||||||
self.req.prepare_metadata()
|
|
||||||
self.req.assert_source_matches_version()
|
|
||||||
|
|
||||||
|
|
||||||
class Installed(DistAbstraction):
|
|
||||||
|
|
||||||
def dist(self):
|
|
||||||
# type: () -> pkg_resources.Distribution
|
|
||||||
return self.req.satisfied_by
|
|
||||||
|
|
||||||
def prep_for_dist(self, finder, build_isolation):
|
|
||||||
# type: (PackageFinder, bool) -> Any
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class RequirementPreparer(object):
|
|
||||||
"""Prepares a Requirement
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
build_dir, # type: str
|
|
||||||
download_dir, # type: Optional[str]
|
|
||||||
src_dir, # type: str
|
|
||||||
wheel_download_dir, # type: Optional[str]
|
|
||||||
progress_bar, # type: str
|
|
||||||
build_isolation, # type: bool
|
|
||||||
req_tracker # type: RequirementTracker
|
|
||||||
):
|
|
||||||
# type: (...) -> None
|
|
||||||
super(RequirementPreparer, self).__init__()
|
|
||||||
|
|
||||||
self.src_dir = src_dir
|
|
||||||
self.build_dir = build_dir
|
|
||||||
self.req_tracker = req_tracker
|
|
||||||
|
|
||||||
# Where still packed archives should be written to. If None, they are
|
|
||||||
# not saved, and are deleted immediately after unpacking.
|
|
||||||
self.download_dir = download_dir
|
|
||||||
|
|
||||||
# Where still-packed .whl files should be written to. If None, they are
|
|
||||||
# written to the download_dir parameter. Separate to download_dir to
|
|
||||||
# permit only keeping wheel archives for pip wheel.
|
|
||||||
if wheel_download_dir:
|
|
||||||
wheel_download_dir = normalize_path(wheel_download_dir)
|
|
||||||
self.wheel_download_dir = wheel_download_dir
|
|
||||||
|
|
||||||
# NOTE
|
|
||||||
# download_dir and wheel_download_dir overlap semantically and may
|
|
||||||
# be combined if we're willing to have non-wheel archives present in
|
|
||||||
# the wheelhouse output by 'pip wheel'.
|
|
||||||
|
|
||||||
self.progress_bar = progress_bar
|
|
||||||
|
|
||||||
# Is build isolation allowed?
|
|
||||||
self.build_isolation = build_isolation
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _download_should_save(self):
|
|
||||||
# type: () -> bool
|
|
||||||
# TODO: Modify to reduce indentation needed
|
|
||||||
if self.download_dir:
|
|
||||||
self.download_dir = expanduser(self.download_dir)
|
|
||||||
if os.path.exists(self.download_dir):
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
logger.critical('Could not find download directory')
|
|
||||||
raise InstallationError(
|
|
||||||
"Could not find or access download directory '%s'"
|
|
||||||
% display_path(self.download_dir))
|
|
||||||
return False
|
|
||||||
|
|
||||||
def prepare_linked_requirement(
|
|
||||||
self,
|
|
||||||
req, # type: InstallRequirement
|
|
||||||
session, # type: PipSession
|
|
||||||
finder, # type: PackageFinder
|
|
||||||
upgrade_allowed, # type: bool
|
|
||||||
require_hashes # type: bool
|
|
||||||
):
|
|
||||||
# type: (...) -> DistAbstraction
|
|
||||||
"""Prepare a requirement that would be obtained from req.link
|
|
||||||
"""
|
|
||||||
# TODO: Breakup into smaller functions
|
|
||||||
if req.link and req.link.scheme == 'file':
|
|
||||||
path = url_to_path(req.link.url)
|
|
||||||
logger.info('Processing %s', display_path(path))
|
|
||||||
else:
|
|
||||||
logger.info('Collecting %s', req)
|
|
||||||
|
|
||||||
with indent_log():
|
|
||||||
# @@ if filesystem packages are not marked
|
|
||||||
# editable in a req, a non deterministic error
|
|
||||||
# occurs when the script attempts to unpack the
|
|
||||||
# build directory
|
|
||||||
req.ensure_has_source_dir(self.build_dir)
|
|
||||||
# If a checkout exists, it's unwise to keep going. version
|
|
||||||
# inconsistencies are logged later, but do not fail the
|
|
||||||
# installation.
|
|
||||||
# FIXME: this won't upgrade when there's an existing
|
|
||||||
# package unpacked in `req.source_dir`
|
|
||||||
# package unpacked in `req.source_dir`
|
|
||||||
if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
|
|
||||||
raise PreviousBuildDirError(
|
|
||||||
"pip can't proceed with requirements '%s' due to a"
|
|
||||||
" pre-existing build directory (%s). This is "
|
|
||||||
"likely due to a previous installation that failed"
|
|
||||||
". pip is being responsible and not assuming it "
|
|
||||||
"can delete this. Please delete it and try again."
|
|
||||||
% (req, req.source_dir)
|
|
||||||
)
|
|
||||||
req.populate_link(finder, upgrade_allowed, require_hashes)
|
|
||||||
|
|
||||||
# We can't hit this spot and have populate_link return None.
|
|
||||||
# req.satisfied_by is None here (because we're
|
|
||||||
# guarded) and upgrade has no impact except when satisfied_by
|
|
||||||
# is not None.
|
|
||||||
# Then inside find_requirement existing_applicable -> False
|
|
||||||
# If no new versions are found, DistributionNotFound is raised,
|
|
||||||
# otherwise a result is guaranteed.
|
|
||||||
assert req.link
|
|
||||||
link = req.link
|
|
||||||
|
|
||||||
# Now that we have the real link, we can tell what kind of
|
|
||||||
# requirements we have and raise some more informative errors
|
|
||||||
# than otherwise. (For example, we can raise VcsHashUnsupported
|
|
||||||
# for a VCS URL rather than HashMissing.)
|
|
||||||
if require_hashes:
|
|
||||||
# We could check these first 2 conditions inside
|
|
||||||
# unpack_url and save repetition of conditions, but then
|
|
||||||
# we would report less-useful error messages for
|
|
||||||
# unhashable requirements, complaining that there's no
|
|
||||||
# hash provided.
|
|
||||||
if is_vcs_url(link):
|
|
||||||
raise VcsHashUnsupported()
|
|
||||||
elif is_file_url(link) and is_dir_url(link):
|
|
||||||
raise DirectoryUrlHashUnsupported()
|
|
||||||
if not req.original_link and not req.is_pinned:
|
|
||||||
# Unpinned packages are asking for trouble when a new
|
|
||||||
# version is uploaded. This isn't a security check, but
|
|
||||||
# it saves users a surprising hash mismatch in the
|
|
||||||
# future.
|
|
||||||
#
|
|
||||||
# file:/// URLs aren't pinnable, so don't complain
|
|
||||||
# about them not being pinned.
|
|
||||||
raise HashUnpinned()
|
|
||||||
|
|
||||||
hashes = req.hashes(trust_internet=not require_hashes)
|
|
||||||
if require_hashes and not hashes:
|
|
||||||
# Known-good hashes are missing for this requirement, so
|
|
||||||
# shim it with a facade object that will provoke hash
|
|
||||||
# computation and then raise a HashMissing exception
|
|
||||||
# showing the user what the hash should be.
|
|
||||||
hashes = MissingHashes()
|
|
||||||
|
|
||||||
try:
|
|
||||||
download_dir = self.download_dir
|
|
||||||
# We always delete unpacked sdists after pip ran.
|
|
||||||
autodelete_unpacked = True
|
|
||||||
if req.link.is_wheel and self.wheel_download_dir:
|
|
||||||
# when doing 'pip wheel` we download wheels to a
|
|
||||||
# dedicated dir.
|
|
||||||
download_dir = self.wheel_download_dir
|
|
||||||
if req.link.is_wheel:
|
|
||||||
if download_dir:
|
|
||||||
# When downloading, we only unpack wheels to get
|
|
||||||
# metadata.
|
|
||||||
autodelete_unpacked = True
|
|
||||||
else:
|
|
||||||
# When installing a wheel, we use the unpacked
|
|
||||||
# wheel.
|
|
||||||
autodelete_unpacked = False
|
|
||||||
unpack_url(
|
|
||||||
req.link, req.source_dir,
|
|
||||||
download_dir, autodelete_unpacked,
|
|
||||||
session=session, hashes=hashes,
|
|
||||||
progress_bar=self.progress_bar
|
|
||||||
)
|
|
||||||
except requests.HTTPError as exc:
|
|
||||||
logger.critical(
|
|
||||||
'Could not install requirement %s because of error %s',
|
|
||||||
req,
|
|
||||||
exc,
|
|
||||||
)
|
|
||||||
raise InstallationError(
|
|
||||||
'Could not install requirement %s because of HTTP '
|
|
||||||
'error %s for URL %s' %
|
|
||||||
(req, exc, req.link)
|
|
||||||
)
|
|
||||||
abstract_dist = make_abstract_dist(req)
|
|
||||||
with self.req_tracker.track(req):
|
|
||||||
abstract_dist.prep_for_dist(finder, self.build_isolation)
|
|
||||||
if self._download_should_save:
|
|
||||||
# Make a .zip of the source_dir we already created.
|
|
||||||
if req.link.scheme in vcs.all_schemes:
|
|
||||||
req.archive(self.download_dir)
|
|
||||||
return abstract_dist
|
|
||||||
|
|
||||||
def prepare_editable_requirement(
|
|
||||||
self,
|
|
||||||
req, # type: InstallRequirement
|
|
||||||
require_hashes, # type: bool
|
|
||||||
use_user_site, # type: bool
|
|
||||||
finder # type: PackageFinder
|
|
||||||
):
|
|
||||||
# type: (...) -> DistAbstraction
|
|
||||||
"""Prepare an editable requirement
|
|
||||||
"""
|
|
||||||
assert req.editable, "cannot prepare a non-editable req as editable"
|
|
||||||
|
|
||||||
logger.info('Obtaining %s', req)
|
|
||||||
|
|
||||||
with indent_log():
|
|
||||||
if require_hashes:
|
|
||||||
raise InstallationError(
|
|
||||||
'The editable requirement %s cannot be installed when '
|
|
||||||
'requiring hashes, because there is no single file to '
|
|
||||||
'hash.' % req
|
|
||||||
)
|
|
||||||
req.ensure_has_source_dir(self.src_dir)
|
|
||||||
req.update_editable(not self._download_should_save)
|
|
||||||
|
|
||||||
abstract_dist = make_abstract_dist(req)
|
|
||||||
with self.req_tracker.track(req):
|
|
||||||
abstract_dist.prep_for_dist(finder, self.build_isolation)
|
|
||||||
|
|
||||||
if self._download_should_save:
|
|
||||||
req.archive(self.download_dir)
|
|
||||||
req.check_if_exists(use_user_site)
|
|
||||||
|
|
||||||
return abstract_dist
|
|
||||||
|
|
||||||
def prepare_installed_requirement(self, req, require_hashes, skip_reason):
|
|
||||||
# type: (InstallRequirement, bool, Optional[str]) -> DistAbstraction
|
|
||||||
"""Prepare an already-installed requirement
|
|
||||||
"""
|
|
||||||
assert req.satisfied_by, "req should have been satisfied but isn't"
|
|
||||||
assert skip_reason is not None, (
|
|
||||||
"did not get skip reason skipped but req.satisfied_by "
|
|
||||||
"is set to %r" % (req.satisfied_by,)
|
|
||||||
)
|
|
||||||
logger.info(
|
|
||||||
'Requirement %s: %s (%s)',
|
|
||||||
skip_reason, req, req.satisfied_by.version
|
|
||||||
)
|
|
||||||
with indent_log():
|
|
||||||
if require_hashes:
|
|
||||||
logger.debug(
|
|
||||||
'Since it is already installed, we are trusting this '
|
|
||||||
'package without checking its hash. To ensure a '
|
|
||||||
'completely repeatable environment, install into an '
|
|
||||||
'empty virtualenv.'
|
|
||||||
)
|
|
||||||
abstract_dist = Installed(req)
|
|
||||||
|
|
||||||
return abstract_dist
|
|
@ -1,381 +0,0 @@
|
|||||||
"""Generate and work with PEP 425 Compatibility Tags."""
|
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import distutils.util
|
|
||||||
import logging
|
|
||||||
import platform
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
import sysconfig
|
|
||||||
import warnings
|
|
||||||
from collections import OrderedDict
|
|
||||||
|
|
||||||
import pip._internal.utils.glibc
|
|
||||||
from pip._internal.utils.compat import get_extension_suffixes
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import ( # noqa: F401
|
|
||||||
Tuple, Callable, List, Optional, Union, Dict
|
|
||||||
)
|
|
||||||
|
|
||||||
Pep425Tag = Tuple[str, str, str]
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')
|
|
||||||
|
|
||||||
|
|
||||||
def get_config_var(var):
|
|
||||||
# type: (str) -> Optional[str]
|
|
||||||
try:
|
|
||||||
return sysconfig.get_config_var(var)
|
|
||||||
except IOError as e: # Issue #1074
|
|
||||||
warnings.warn("{}".format(e), RuntimeWarning)
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def get_abbr_impl():
|
|
||||||
# type: () -> str
|
|
||||||
"""Return abbreviated implementation name."""
|
|
||||||
if hasattr(sys, 'pypy_version_info'):
|
|
||||||
pyimpl = 'pp'
|
|
||||||
elif sys.platform.startswith('java'):
|
|
||||||
pyimpl = 'jy'
|
|
||||||
elif sys.platform == 'cli':
|
|
||||||
pyimpl = 'ip'
|
|
||||||
else:
|
|
||||||
pyimpl = 'cp'
|
|
||||||
return pyimpl
|
|
||||||
|
|
||||||
|
|
||||||
def get_impl_ver():
|
|
||||||
# type: () -> str
|
|
||||||
"""Return implementation version."""
|
|
||||||
impl_ver = get_config_var("py_version_nodot")
|
|
||||||
if not impl_ver or get_abbr_impl() == 'pp':
|
|
||||||
impl_ver = ''.join(map(str, get_impl_version_info()))
|
|
||||||
return impl_ver
|
|
||||||
|
|
||||||
|
|
||||||
def get_impl_version_info():
|
|
||||||
# type: () -> Tuple[int, ...]
|
|
||||||
"""Return sys.version_info-like tuple for use in decrementing the minor
|
|
||||||
version."""
|
|
||||||
if get_abbr_impl() == 'pp':
|
|
||||||
# as per https://github.com/pypa/pip/issues/2882
|
|
||||||
# attrs exist only on pypy
|
|
||||||
return (sys.version_info[0],
|
|
||||||
sys.pypy_version_info.major, # type: ignore
|
|
||||||
sys.pypy_version_info.minor) # type: ignore
|
|
||||||
else:
|
|
||||||
return sys.version_info[0], sys.version_info[1]
|
|
||||||
|
|
||||||
|
|
||||||
def get_impl_tag():
|
|
||||||
# type: () -> str
|
|
||||||
"""
|
|
||||||
Returns the Tag for this specific implementation.
|
|
||||||
"""
|
|
||||||
return "{}{}".format(get_abbr_impl(), get_impl_ver())
|
|
||||||
|
|
||||||
|
|
||||||
def get_flag(var, fallback, expected=True, warn=True):
|
|
||||||
# type: (str, Callable[..., bool], Union[bool, int], bool) -> bool
|
|
||||||
"""Use a fallback method for determining SOABI flags if the needed config
|
|
||||||
var is unset or unavailable."""
|
|
||||||
val = get_config_var(var)
|
|
||||||
if val is None:
|
|
||||||
if warn:
|
|
||||||
logger.debug("Config variable '%s' is unset, Python ABI tag may "
|
|
||||||
"be incorrect", var)
|
|
||||||
return fallback()
|
|
||||||
return val == expected
|
|
||||||
|
|
||||||
|
|
||||||
def get_abi_tag():
|
|
||||||
# type: () -> Optional[str]
|
|
||||||
"""Return the ABI tag based on SOABI (if available) or emulate SOABI
|
|
||||||
(CPython 2, PyPy)."""
|
|
||||||
soabi = get_config_var('SOABI')
|
|
||||||
impl = get_abbr_impl()
|
|
||||||
if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'):
|
|
||||||
d = ''
|
|
||||||
m = ''
|
|
||||||
u = ''
|
|
||||||
if get_flag('Py_DEBUG',
|
|
||||||
lambda: hasattr(sys, 'gettotalrefcount'),
|
|
||||||
warn=(impl == 'cp')):
|
|
||||||
d = 'd'
|
|
||||||
if get_flag('WITH_PYMALLOC',
|
|
||||||
lambda: impl == 'cp',
|
|
||||||
warn=(impl == 'cp')):
|
|
||||||
m = 'm'
|
|
||||||
if get_flag('Py_UNICODE_SIZE',
|
|
||||||
lambda: sys.maxunicode == 0x10ffff,
|
|
||||||
expected=4,
|
|
||||||
warn=(impl == 'cp' and
|
|
||||||
sys.version_info < (3, 3))) \
|
|
||||||
and sys.version_info < (3, 3):
|
|
||||||
u = 'u'
|
|
||||||
abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
|
|
||||||
elif soabi and soabi.startswith('cpython-'):
|
|
||||||
abi = 'cp' + soabi.split('-')[1]
|
|
||||||
elif soabi:
|
|
||||||
abi = soabi.replace('.', '_').replace('-', '_')
|
|
||||||
else:
|
|
||||||
abi = None
|
|
||||||
return abi
|
|
||||||
|
|
||||||
|
|
||||||
def _is_running_32bit():
|
|
||||||
# type: () -> bool
|
|
||||||
return sys.maxsize == 2147483647
|
|
||||||
|
|
||||||
|
|
||||||
def get_platform():
|
|
||||||
# type: () -> str
|
|
||||||
"""Return our platform name 'win32', 'linux_x86_64'"""
|
|
||||||
if sys.platform == 'darwin':
|
|
||||||
# distutils.util.get_platform() returns the release based on the value
|
|
||||||
# of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may
|
|
||||||
# be significantly older than the user's current machine.
|
|
||||||
release, _, machine = platform.mac_ver()
|
|
||||||
split_ver = release.split('.')
|
|
||||||
|
|
||||||
if machine == "x86_64" and _is_running_32bit():
|
|
||||||
machine = "i386"
|
|
||||||
elif machine == "ppc64" and _is_running_32bit():
|
|
||||||
machine = "ppc"
|
|
||||||
|
|
||||||
return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine)
|
|
||||||
|
|
||||||
# XXX remove distutils dependency
|
|
||||||
result = distutils.util.get_platform().replace('.', '_').replace('-', '_')
|
|
||||||
if result == "linux_x86_64" and _is_running_32bit():
|
|
||||||
# 32 bit Python program (running on a 64 bit Linux): pip should only
|
|
||||||
# install and run 32 bit compiled extensions in that case.
|
|
||||||
result = "linux_i686"
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def is_manylinux1_compatible():
|
|
||||||
# type: () -> bool
|
|
||||||
# Only Linux, and only x86-64 / i686
|
|
||||||
if get_platform() not in {"linux_x86_64", "linux_i686"}:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Check for presence of _manylinux module
|
|
||||||
try:
|
|
||||||
import _manylinux
|
|
||||||
return bool(_manylinux.manylinux1_compatible)
|
|
||||||
except (ImportError, AttributeError):
|
|
||||||
# Fall through to heuristic check below
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Check glibc version. CentOS 5 uses glibc 2.5.
|
|
||||||
return pip._internal.utils.glibc.have_compatible_glibc(2, 5)
|
|
||||||
|
|
||||||
|
|
||||||
def is_manylinux2010_compatible():
|
|
||||||
# type: () -> bool
|
|
||||||
# Only Linux, and only x86-64 / i686
|
|
||||||
if get_platform() not in {"linux_x86_64", "linux_i686"}:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Check for presence of _manylinux module
|
|
||||||
try:
|
|
||||||
import _manylinux
|
|
||||||
return bool(_manylinux.manylinux2010_compatible)
|
|
||||||
except (ImportError, AttributeError):
|
|
||||||
# Fall through to heuristic check below
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Check glibc version. CentOS 6 uses glibc 2.12.
|
|
||||||
return pip._internal.utils.glibc.have_compatible_glibc(2, 12)
|
|
||||||
|
|
||||||
|
|
||||||
def get_darwin_arches(major, minor, machine):
|
|
||||||
# type: (int, int, str) -> List[str]
|
|
||||||
"""Return a list of supported arches (including group arches) for
|
|
||||||
the given major, minor and machine architecture of an macOS machine.
|
|
||||||
"""
|
|
||||||
arches = []
|
|
||||||
|
|
||||||
def _supports_arch(major, minor, arch):
|
|
||||||
# type: (int, int, str) -> bool
|
|
||||||
# Looking at the application support for macOS versions in the chart
|
|
||||||
# provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears
|
|
||||||
# our timeline looks roughly like:
|
|
||||||
#
|
|
||||||
# 10.0 - Introduces ppc support.
|
|
||||||
# 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64
|
|
||||||
# and x86_64 support is CLI only, and cannot be used for GUI
|
|
||||||
# applications.
|
|
||||||
# 10.5 - Extends ppc64 and x86_64 support to cover GUI applications.
|
|
||||||
# 10.6 - Drops support for ppc64
|
|
||||||
# 10.7 - Drops support for ppc
|
|
||||||
#
|
|
||||||
# Given that we do not know if we're installing a CLI or a GUI
|
|
||||||
# application, we must be conservative and assume it might be a GUI
|
|
||||||
# application and behave as if ppc64 and x86_64 support did not occur
|
|
||||||
# until 10.5.
|
|
||||||
#
|
|
||||||
# Note: The above information is taken from the "Application support"
|
|
||||||
# column in the chart not the "Processor support" since I believe
|
|
||||||
# that we care about what instruction sets an application can use
|
|
||||||
# not which processors the OS supports.
|
|
||||||
if arch == 'ppc':
|
|
||||||
return (major, minor) <= (10, 5)
|
|
||||||
if arch == 'ppc64':
|
|
||||||
return (major, minor) == (10, 5)
|
|
||||||
if arch == 'i386':
|
|
||||||
return (major, minor) >= (10, 4)
|
|
||||||
if arch == 'x86_64':
|
|
||||||
return (major, minor) >= (10, 5)
|
|
||||||
if arch in groups:
|
|
||||||
for garch in groups[arch]:
|
|
||||||
if _supports_arch(major, minor, garch):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
groups = OrderedDict([
|
|
||||||
("fat", ("i386", "ppc")),
|
|
||||||
("intel", ("x86_64", "i386")),
|
|
||||||
("fat64", ("x86_64", "ppc64")),
|
|
||||||
("fat32", ("x86_64", "i386", "ppc")),
|
|
||||||
]) # type: Dict[str, Tuple[str, ...]]
|
|
||||||
|
|
||||||
if _supports_arch(major, minor, machine):
|
|
||||||
arches.append(machine)
|
|
||||||
|
|
||||||
for garch in groups:
|
|
||||||
if machine in groups[garch] and _supports_arch(major, minor, garch):
|
|
||||||
arches.append(garch)
|
|
||||||
|
|
||||||
arches.append('universal')
|
|
||||||
|
|
||||||
return arches
|
|
||||||
|
|
||||||
|
|
||||||
def get_all_minor_versions_as_strings(version_info):
|
|
||||||
# type: (Tuple[int, ...]) -> List[str]
|
|
||||||
versions = []
|
|
||||||
major = version_info[:-1]
|
|
||||||
# Support all previous minor Python versions.
|
|
||||||
for minor in range(version_info[-1], -1, -1):
|
|
||||||
versions.append(''.join(map(str, major + (minor,))))
|
|
||||||
return versions
|
|
||||||
|
|
||||||
|
|
||||||
def get_supported(
|
|
||||||
versions=None, # type: Optional[List[str]]
|
|
||||||
noarch=False, # type: bool
|
|
||||||
platform=None, # type: Optional[str]
|
|
||||||
impl=None, # type: Optional[str]
|
|
||||||
abi=None # type: Optional[str]
|
|
||||||
):
|
|
||||||
# type: (...) -> List[Pep425Tag]
|
|
||||||
"""Return a list of supported tags for each version specified in
|
|
||||||
`versions`.
|
|
||||||
|
|
||||||
:param versions: a list of string versions, of the form ["33", "32"],
|
|
||||||
or None. The first version will be assumed to support our ABI.
|
|
||||||
:param platform: specify the exact platform you want valid
|
|
||||||
tags for, or None. If None, use the local system platform.
|
|
||||||
:param impl: specify the exact implementation you want valid
|
|
||||||
tags for, or None. If None, use the local interpreter impl.
|
|
||||||
:param abi: specify the exact abi you want valid
|
|
||||||
tags for, or None. If None, use the local interpreter abi.
|
|
||||||
"""
|
|
||||||
supported = []
|
|
||||||
|
|
||||||
# Versions must be given with respect to the preference
|
|
||||||
if versions is None:
|
|
||||||
version_info = get_impl_version_info()
|
|
||||||
versions = get_all_minor_versions_as_strings(version_info)
|
|
||||||
|
|
||||||
impl = impl or get_abbr_impl()
|
|
||||||
|
|
||||||
abis = [] # type: List[str]
|
|
||||||
|
|
||||||
abi = abi or get_abi_tag()
|
|
||||||
if abi:
|
|
||||||
abis[0:0] = [abi]
|
|
||||||
|
|
||||||
abi3s = set()
|
|
||||||
for suffix in get_extension_suffixes():
|
|
||||||
if suffix.startswith('.abi'):
|
|
||||||
abi3s.add(suffix.split('.', 2)[1])
|
|
||||||
|
|
||||||
abis.extend(sorted(list(abi3s)))
|
|
||||||
|
|
||||||
abis.append('none')
|
|
||||||
|
|
||||||
if not noarch:
|
|
||||||
arch = platform or get_platform()
|
|
||||||
arch_prefix, arch_sep, arch_suffix = arch.partition('_')
|
|
||||||
if arch.startswith('macosx'):
|
|
||||||
# support macosx-10.6-intel on macosx-10.9-x86_64
|
|
||||||
match = _osx_arch_pat.match(arch)
|
|
||||||
if match:
|
|
||||||
name, major, minor, actual_arch = match.groups()
|
|
||||||
tpl = '{}_{}_%i_%s'.format(name, major)
|
|
||||||
arches = []
|
|
||||||
for m in reversed(range(int(minor) + 1)):
|
|
||||||
for a in get_darwin_arches(int(major), m, actual_arch):
|
|
||||||
arches.append(tpl % (m, a))
|
|
||||||
else:
|
|
||||||
# arch pattern didn't match (?!)
|
|
||||||
arches = [arch]
|
|
||||||
elif arch_prefix == 'manylinux2010':
|
|
||||||
# manylinux1 wheels run on most manylinux2010 systems with the
|
|
||||||
# exception of wheels depending on ncurses. PEP 571 states
|
|
||||||
# manylinux1 wheels should be considered manylinux2010 wheels:
|
|
||||||
# https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels
|
|
||||||
arches = [arch, 'manylinux1' + arch_sep + arch_suffix]
|
|
||||||
elif platform is None:
|
|
||||||
arches = []
|
|
||||||
if is_manylinux2010_compatible():
|
|
||||||
arches.append('manylinux2010' + arch_sep + arch_suffix)
|
|
||||||
if is_manylinux1_compatible():
|
|
||||||
arches.append('manylinux1' + arch_sep + arch_suffix)
|
|
||||||
arches.append(arch)
|
|
||||||
else:
|
|
||||||
arches = [arch]
|
|
||||||
|
|
||||||
# Current version, current API (built specifically for our Python):
|
|
||||||
for abi in abis:
|
|
||||||
for arch in arches:
|
|
||||||
supported.append(('%s%s' % (impl, versions[0]), abi, arch))
|
|
||||||
|
|
||||||
# abi3 modules compatible with older version of Python
|
|
||||||
for version in versions[1:]:
|
|
||||||
# abi3 was introduced in Python 3.2
|
|
||||||
if version in {'31', '30'}:
|
|
||||||
break
|
|
||||||
for abi in abi3s: # empty set if not Python 3
|
|
||||||
for arch in arches:
|
|
||||||
supported.append(("%s%s" % (impl, version), abi, arch))
|
|
||||||
|
|
||||||
# Has binaries, does not use the Python API:
|
|
||||||
for arch in arches:
|
|
||||||
supported.append(('py%s' % (versions[0][0]), 'none', arch))
|
|
||||||
|
|
||||||
# No abi / arch, but requires our implementation:
|
|
||||||
supported.append(('%s%s' % (impl, versions[0]), 'none', 'any'))
|
|
||||||
# Tagged specifically as being cross-version compatible
|
|
||||||
# (with just the major version specified)
|
|
||||||
supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
|
|
||||||
|
|
||||||
# No abi / arch, generic Python
|
|
||||||
for i, version in enumerate(versions):
|
|
||||||
supported.append(('py%s' % (version,), 'none', 'any'))
|
|
||||||
if i == 0:
|
|
||||||
supported.append(('py%s' % (version[0]), 'none', 'any'))
|
|
||||||
|
|
||||||
return supported
|
|
||||||
|
|
||||||
|
|
||||||
implementation_tag = get_impl_tag()
|
|
@ -1,171 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import io
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pip._vendor import pytoml, six
|
|
||||||
|
|
||||||
from pip._internal.exceptions import InstallationError
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import Any, Tuple, Optional, List # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
def _is_list_of_str(obj):
|
|
||||||
# type: (Any) -> bool
|
|
||||||
return (
|
|
||||||
isinstance(obj, list) and
|
|
||||||
all(isinstance(item, six.string_types) for item in obj)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def make_pyproject_path(setup_py_dir):
|
|
||||||
# type: (str) -> str
|
|
||||||
path = os.path.join(setup_py_dir, 'pyproject.toml')
|
|
||||||
|
|
||||||
# Python2 __file__ should not be unicode
|
|
||||||
if six.PY2 and isinstance(path, six.text_type):
|
|
||||||
path = path.encode(sys.getfilesystemencoding())
|
|
||||||
|
|
||||||
return path
|
|
||||||
|
|
||||||
|
|
||||||
def load_pyproject_toml(
|
|
||||||
use_pep517, # type: Optional[bool]
|
|
||||||
pyproject_toml, # type: str
|
|
||||||
setup_py, # type: str
|
|
||||||
req_name # type: str
|
|
||||||
):
|
|
||||||
# type: (...) -> Optional[Tuple[List[str], str, List[str]]]
|
|
||||||
"""Load the pyproject.toml file.
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
use_pep517 - Has the user requested PEP 517 processing? None
|
|
||||||
means the user hasn't explicitly specified.
|
|
||||||
pyproject_toml - Location of the project's pyproject.toml file
|
|
||||||
setup_py - Location of the project's setup.py file
|
|
||||||
req_name - The name of the requirement we're processing (for
|
|
||||||
error reporting)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
None if we should use the legacy code path, otherwise a tuple
|
|
||||||
(
|
|
||||||
requirements from pyproject.toml,
|
|
||||||
name of PEP 517 backend,
|
|
||||||
requirements we should check are installed after setting
|
|
||||||
up the build environment
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
has_pyproject = os.path.isfile(pyproject_toml)
|
|
||||||
has_setup = os.path.isfile(setup_py)
|
|
||||||
|
|
||||||
if has_pyproject:
|
|
||||||
with io.open(pyproject_toml, encoding="utf-8") as f:
|
|
||||||
pp_toml = pytoml.load(f)
|
|
||||||
build_system = pp_toml.get("build-system")
|
|
||||||
else:
|
|
||||||
build_system = None
|
|
||||||
|
|
||||||
# The following cases must use PEP 517
|
|
||||||
# We check for use_pep517 being non-None and falsey because that means
|
|
||||||
# the user explicitly requested --no-use-pep517. The value 0 as
|
|
||||||
# opposed to False can occur when the value is provided via an
|
|
||||||
# environment variable or config file option (due to the quirk of
|
|
||||||
# strtobool() returning an integer in pip's configuration code).
|
|
||||||
if has_pyproject and not has_setup:
|
|
||||||
if use_pep517 is not None and not use_pep517:
|
|
||||||
raise InstallationError(
|
|
||||||
"Disabling PEP 517 processing is invalid: "
|
|
||||||
"project does not have a setup.py"
|
|
||||||
)
|
|
||||||
use_pep517 = True
|
|
||||||
elif build_system and "build-backend" in build_system:
|
|
||||||
if use_pep517 is not None and not use_pep517:
|
|
||||||
raise InstallationError(
|
|
||||||
"Disabling PEP 517 processing is invalid: "
|
|
||||||
"project specifies a build backend of {} "
|
|
||||||
"in pyproject.toml".format(
|
|
||||||
build_system["build-backend"]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
use_pep517 = True
|
|
||||||
|
|
||||||
# If we haven't worked out whether to use PEP 517 yet,
|
|
||||||
# and the user hasn't explicitly stated a preference,
|
|
||||||
# we do so if the project has a pyproject.toml file.
|
|
||||||
elif use_pep517 is None:
|
|
||||||
use_pep517 = has_pyproject
|
|
||||||
|
|
||||||
# At this point, we know whether we're going to use PEP 517.
|
|
||||||
assert use_pep517 is not None
|
|
||||||
|
|
||||||
# If we're using the legacy code path, there is nothing further
|
|
||||||
# for us to do here.
|
|
||||||
if not use_pep517:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if build_system is None:
|
|
||||||
# Either the user has a pyproject.toml with no build-system
|
|
||||||
# section, or the user has no pyproject.toml, but has opted in
|
|
||||||
# explicitly via --use-pep517.
|
|
||||||
# In the absence of any explicit backend specification, we
|
|
||||||
# assume the setuptools backend that most closely emulates the
|
|
||||||
# traditional direct setup.py execution, and require wheel and
|
|
||||||
# a version of setuptools that supports that backend.
|
|
||||||
|
|
||||||
build_system = {
|
|
||||||
"requires": ["setuptools>=40.8.0", "wheel"],
|
|
||||||
"build-backend": "setuptools.build_meta:__legacy__",
|
|
||||||
}
|
|
||||||
|
|
||||||
# If we're using PEP 517, we have build system information (either
|
|
||||||
# from pyproject.toml, or defaulted by the code above).
|
|
||||||
# Note that at this point, we do not know if the user has actually
|
|
||||||
# specified a backend, though.
|
|
||||||
assert build_system is not None
|
|
||||||
|
|
||||||
# Ensure that the build-system section in pyproject.toml conforms
|
|
||||||
# to PEP 518.
|
|
||||||
error_template = (
|
|
||||||
"{package} has a pyproject.toml file that does not comply "
|
|
||||||
"with PEP 518: {reason}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Specifying the build-system table but not the requires key is invalid
|
|
||||||
if "requires" not in build_system:
|
|
||||||
raise InstallationError(
|
|
||||||
error_template.format(package=req_name, reason=(
|
|
||||||
"it has a 'build-system' table but not "
|
|
||||||
"'build-system.requires' which is mandatory in the table"
|
|
||||||
))
|
|
||||||
)
|
|
||||||
|
|
||||||
# Error out if requires is not a list of strings
|
|
||||||
requires = build_system["requires"]
|
|
||||||
if not _is_list_of_str(requires):
|
|
||||||
raise InstallationError(error_template.format(
|
|
||||||
package=req_name,
|
|
||||||
reason="'build-system.requires' is not a list of strings.",
|
|
||||||
))
|
|
||||||
|
|
||||||
backend = build_system.get("build-backend")
|
|
||||||
check = [] # type: List[str]
|
|
||||||
if backend is None:
|
|
||||||
# If the user didn't specify a backend, we assume they want to use
|
|
||||||
# the setuptools backend. But we can't be sure they have included
|
|
||||||
# a version of setuptools which supplies the backend, or wheel
|
|
||||||
# (which is needed by the backend) in their requirements. So we
|
|
||||||
# make a note to check that those requirements are present once
|
|
||||||
# we have set up the environment.
|
|
||||||
# This is quite a lot of work to check for a very specific case. But
|
|
||||||
# the problem is, that case is potentially quite common - projects that
|
|
||||||
# adopted PEP 518 early for the ability to specify requirements to
|
|
||||||
# execute setup.py, but never considered needing to mention the build
|
|
||||||
# tools themselves. The original PEP 518 code had a similar check (but
|
|
||||||
# implemented in a different way).
|
|
||||||
backend = "setuptools.build_meta:__legacy__"
|
|
||||||
check = ["setuptools>=40.8.0", "wheel"]
|
|
||||||
|
|
||||||
return (requires, backend, check)
|
|
@ -1,77 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from .req_install import InstallRequirement
|
|
||||||
from .req_set import RequirementSet
|
|
||||||
from .req_file import parse_requirements
|
|
||||||
from pip._internal.utils.logging import indent_log
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import List, Sequence # noqa: F401
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"RequirementSet", "InstallRequirement",
|
|
||||||
"parse_requirements", "install_given_reqs",
|
|
||||||
]
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def install_given_reqs(
|
|
||||||
to_install, # type: List[InstallRequirement]
|
|
||||||
install_options, # type: List[str]
|
|
||||||
global_options=(), # type: Sequence[str]
|
|
||||||
*args, **kwargs
|
|
||||||
):
|
|
||||||
# type: (...) -> List[InstallRequirement]
|
|
||||||
"""
|
|
||||||
Install everything in the given list.
|
|
||||||
|
|
||||||
(to be called after having downloaded and unpacked the packages)
|
|
||||||
"""
|
|
||||||
|
|
||||||
if to_install:
|
|
||||||
logger.info(
|
|
||||||
'Installing collected packages: %s',
|
|
||||||
', '.join([req.name for req in to_install]),
|
|
||||||
)
|
|
||||||
|
|
||||||
with indent_log():
|
|
||||||
for requirement in to_install:
|
|
||||||
if requirement.conflicts_with:
|
|
||||||
logger.info(
|
|
||||||
'Found existing installation: %s',
|
|
||||||
requirement.conflicts_with,
|
|
||||||
)
|
|
||||||
with indent_log():
|
|
||||||
uninstalled_pathset = requirement.uninstall(
|
|
||||||
auto_confirm=True
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
requirement.install(
|
|
||||||
install_options,
|
|
||||||
global_options,
|
|
||||||
*args,
|
|
||||||
**kwargs
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
should_rollback = (
|
|
||||||
requirement.conflicts_with and
|
|
||||||
not requirement.install_succeeded
|
|
||||||
)
|
|
||||||
# if install did not succeed, rollback previous uninstall
|
|
||||||
if should_rollback:
|
|
||||||
uninstalled_pathset.rollback()
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
should_commit = (
|
|
||||||
requirement.conflicts_with and
|
|
||||||
requirement.install_succeeded
|
|
||||||
)
|
|
||||||
if should_commit:
|
|
||||||
uninstalled_pathset.commit()
|
|
||||||
requirement.remove_temporary_source()
|
|
||||||
|
|
||||||
return to_install
|
|
@ -1,339 +0,0 @@
|
|||||||
"""Backing implementation for InstallRequirement's various constructors
|
|
||||||
|
|
||||||
The idea here is that these formed a major chunk of InstallRequirement's size
|
|
||||||
so, moving them and support code dedicated to them outside of that class
|
|
||||||
helps creates for better understandability for the rest of the code.
|
|
||||||
|
|
||||||
These are meant to be used elsewhere within pip to create instances of
|
|
||||||
InstallRequirement.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
|
|
||||||
from pip._vendor.packaging.markers import Marker
|
|
||||||
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
|
||||||
from pip._vendor.packaging.specifiers import Specifier
|
|
||||||
from pip._vendor.pkg_resources import RequirementParseError, parse_requirements
|
|
||||||
|
|
||||||
from pip._internal.download import (
|
|
||||||
is_archive_file, is_url, path_to_url, url_to_path,
|
|
||||||
)
|
|
||||||
from pip._internal.exceptions import InstallationError
|
|
||||||
from pip._internal.models.index import PyPI, TestPyPI
|
|
||||||
from pip._internal.models.link import Link
|
|
||||||
from pip._internal.pyproject import make_pyproject_path
|
|
||||||
from pip._internal.req.req_install import InstallRequirement
|
|
||||||
from pip._internal.utils.misc import is_installable_dir
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
from pip._internal.vcs import vcs
|
|
||||||
from pip._internal.wheel import Wheel
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import ( # noqa: F401
|
|
||||||
Optional, Tuple, Set, Any, Union, Text, Dict,
|
|
||||||
)
|
|
||||||
from pip._internal.cache import WheelCache # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"install_req_from_editable", "install_req_from_line",
|
|
||||||
"parse_editable"
|
|
||||||
]
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
operators = Specifier._operators.keys()
|
|
||||||
|
|
||||||
|
|
||||||
def _strip_extras(path):
|
|
||||||
# type: (str) -> Tuple[str, Optional[str]]
|
|
||||||
m = re.match(r'^(.+)(\[[^\]]+\])$', path)
|
|
||||||
extras = None
|
|
||||||
if m:
|
|
||||||
path_no_extras = m.group(1)
|
|
||||||
extras = m.group(2)
|
|
||||||
else:
|
|
||||||
path_no_extras = path
|
|
||||||
|
|
||||||
return path_no_extras, extras
|
|
||||||
|
|
||||||
|
|
||||||
def parse_editable(editable_req):
|
|
||||||
# type: (str) -> Tuple[Optional[str], str, Optional[Set[str]]]
|
|
||||||
"""Parses an editable requirement into:
|
|
||||||
- a requirement name
|
|
||||||
- an URL
|
|
||||||
- extras
|
|
||||||
- editable options
|
|
||||||
Accepted requirements:
|
|
||||||
svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
|
|
||||||
.[some_extra]
|
|
||||||
"""
|
|
||||||
|
|
||||||
url = editable_req
|
|
||||||
|
|
||||||
# If a file path is specified with extras, strip off the extras.
|
|
||||||
url_no_extras, extras = _strip_extras(url)
|
|
||||||
|
|
||||||
if os.path.isdir(url_no_extras):
|
|
||||||
if not os.path.exists(os.path.join(url_no_extras, 'setup.py')):
|
|
||||||
msg = (
|
|
||||||
'File "setup.py" not found. Directory cannot be installed '
|
|
||||||
'in editable mode: {}'.format(os.path.abspath(url_no_extras))
|
|
||||||
)
|
|
||||||
pyproject_path = make_pyproject_path(url_no_extras)
|
|
||||||
if os.path.isfile(pyproject_path):
|
|
||||||
msg += (
|
|
||||||
'\n(A "pyproject.toml" file was found, but editable '
|
|
||||||
'mode currently requires a setup.py based build.)'
|
|
||||||
)
|
|
||||||
raise InstallationError(msg)
|
|
||||||
|
|
||||||
# Treating it as code that has already been checked out
|
|
||||||
url_no_extras = path_to_url(url_no_extras)
|
|
||||||
|
|
||||||
if url_no_extras.lower().startswith('file:'):
|
|
||||||
package_name = Link(url_no_extras).egg_fragment
|
|
||||||
if extras:
|
|
||||||
return (
|
|
||||||
package_name,
|
|
||||||
url_no_extras,
|
|
||||||
Requirement("placeholder" + extras.lower()).extras,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return package_name, url_no_extras, None
|
|
||||||
|
|
||||||
for version_control in vcs:
|
|
||||||
if url.lower().startswith('%s:' % version_control):
|
|
||||||
url = '%s+%s' % (version_control, url)
|
|
||||||
break
|
|
||||||
|
|
||||||
if '+' not in url:
|
|
||||||
raise InstallationError(
|
|
||||||
'%s should either be a path to a local project or a VCS url '
|
|
||||||
'beginning with svn+, git+, hg+, or bzr+' %
|
|
||||||
editable_req
|
|
||||||
)
|
|
||||||
|
|
||||||
vc_type = url.split('+', 1)[0].lower()
|
|
||||||
|
|
||||||
if not vcs.get_backend(vc_type):
|
|
||||||
error_message = 'For --editable=%s only ' % editable_req + \
|
|
||||||
', '.join([backend.name + '+URL' for backend in vcs.backends]) + \
|
|
||||||
' is currently supported'
|
|
||||||
raise InstallationError(error_message)
|
|
||||||
|
|
||||||
package_name = Link(url).egg_fragment
|
|
||||||
if not package_name:
|
|
||||||
raise InstallationError(
|
|
||||||
"Could not detect requirement name for '%s', please specify one "
|
|
||||||
"with #egg=your_package_name" % editable_req
|
|
||||||
)
|
|
||||||
return package_name, url, None
|
|
||||||
|
|
||||||
|
|
||||||
def deduce_helpful_msg(req):
|
|
||||||
# type: (str) -> str
|
|
||||||
"""Returns helpful msg in case requirements file does not exist,
|
|
||||||
or cannot be parsed.
|
|
||||||
|
|
||||||
:params req: Requirements file path
|
|
||||||
"""
|
|
||||||
msg = ""
|
|
||||||
if os.path.exists(req):
|
|
||||||
msg = " It does exist."
|
|
||||||
# Try to parse and check if it is a requirements file.
|
|
||||||
try:
|
|
||||||
with open(req, 'r') as fp:
|
|
||||||
# parse first line only
|
|
||||||
next(parse_requirements(fp.read()))
|
|
||||||
msg += " The argument you provided " + \
|
|
||||||
"(%s) appears to be a" % (req) + \
|
|
||||||
" requirements file. If that is the" + \
|
|
||||||
" case, use the '-r' flag to install" + \
|
|
||||||
" the packages specified within it."
|
|
||||||
except RequirementParseError:
|
|
||||||
logger.debug("Cannot parse '%s' as requirements \
|
|
||||||
file" % (req), exc_info=True)
|
|
||||||
else:
|
|
||||||
msg += " File '%s' does not exist." % (req)
|
|
||||||
return msg
|
|
||||||
|
|
||||||
|
|
||||||
# ---- The actual constructors follow ----
|
|
||||||
|
|
||||||
|
|
||||||
def install_req_from_editable(
|
|
||||||
editable_req, # type: str
|
|
||||||
comes_from=None, # type: Optional[str]
|
|
||||||
use_pep517=None, # type: Optional[bool]
|
|
||||||
isolated=False, # type: bool
|
|
||||||
options=None, # type: Optional[Dict[str, Any]]
|
|
||||||
wheel_cache=None, # type: Optional[WheelCache]
|
|
||||||
constraint=False # type: bool
|
|
||||||
):
|
|
||||||
# type: (...) -> InstallRequirement
|
|
||||||
name, url, extras_override = parse_editable(editable_req)
|
|
||||||
if url.startswith('file:'):
|
|
||||||
source_dir = url_to_path(url)
|
|
||||||
else:
|
|
||||||
source_dir = None
|
|
||||||
|
|
||||||
if name is not None:
|
|
||||||
try:
|
|
||||||
req = Requirement(name)
|
|
||||||
except InvalidRequirement:
|
|
||||||
raise InstallationError("Invalid requirement: '%s'" % name)
|
|
||||||
else:
|
|
||||||
req = None
|
|
||||||
return InstallRequirement(
|
|
||||||
req, comes_from, source_dir=source_dir,
|
|
||||||
editable=True,
|
|
||||||
link=Link(url),
|
|
||||||
constraint=constraint,
|
|
||||||
use_pep517=use_pep517,
|
|
||||||
isolated=isolated,
|
|
||||||
options=options if options else {},
|
|
||||||
wheel_cache=wheel_cache,
|
|
||||||
extras=extras_override or (),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def install_req_from_line(
|
|
||||||
name, # type: str
|
|
||||||
comes_from=None, # type: Optional[Union[str, InstallRequirement]]
|
|
||||||
use_pep517=None, # type: Optional[bool]
|
|
||||||
isolated=False, # type: bool
|
|
||||||
options=None, # type: Optional[Dict[str, Any]]
|
|
||||||
wheel_cache=None, # type: Optional[WheelCache]
|
|
||||||
constraint=False # type: bool
|
|
||||||
):
|
|
||||||
# type: (...) -> InstallRequirement
|
|
||||||
"""Creates an InstallRequirement from a name, which might be a
|
|
||||||
requirement, directory containing 'setup.py', filename, or URL.
|
|
||||||
"""
|
|
||||||
if is_url(name):
|
|
||||||
marker_sep = '; '
|
|
||||||
else:
|
|
||||||
marker_sep = ';'
|
|
||||||
if marker_sep in name:
|
|
||||||
name, markers_as_string = name.split(marker_sep, 1)
|
|
||||||
markers_as_string = markers_as_string.strip()
|
|
||||||
if not markers_as_string:
|
|
||||||
markers = None
|
|
||||||
else:
|
|
||||||
markers = Marker(markers_as_string)
|
|
||||||
else:
|
|
||||||
markers = None
|
|
||||||
name = name.strip()
|
|
||||||
req_as_string = None
|
|
||||||
path = os.path.normpath(os.path.abspath(name))
|
|
||||||
link = None
|
|
||||||
extras_as_string = None
|
|
||||||
|
|
||||||
if is_url(name):
|
|
||||||
link = Link(name)
|
|
||||||
else:
|
|
||||||
p, extras_as_string = _strip_extras(path)
|
|
||||||
looks_like_dir = os.path.isdir(p) and (
|
|
||||||
os.path.sep in name or
|
|
||||||
(os.path.altsep is not None and os.path.altsep in name) or
|
|
||||||
name.startswith('.')
|
|
||||||
)
|
|
||||||
if looks_like_dir:
|
|
||||||
if not is_installable_dir(p):
|
|
||||||
raise InstallationError(
|
|
||||||
"Directory %r is not installable. Neither 'setup.py' "
|
|
||||||
"nor 'pyproject.toml' found." % name
|
|
||||||
)
|
|
||||||
link = Link(path_to_url(p))
|
|
||||||
elif is_archive_file(p):
|
|
||||||
if not os.path.isfile(p):
|
|
||||||
logger.warning(
|
|
||||||
'Requirement %r looks like a filename, but the '
|
|
||||||
'file does not exist',
|
|
||||||
name
|
|
||||||
)
|
|
||||||
link = Link(path_to_url(p))
|
|
||||||
|
|
||||||
# it's a local file, dir, or url
|
|
||||||
if link:
|
|
||||||
# Handle relative file URLs
|
|
||||||
if link.scheme == 'file' and re.search(r'\.\./', link.url):
|
|
||||||
link = Link(
|
|
||||||
path_to_url(os.path.normpath(os.path.abspath(link.path))))
|
|
||||||
# wheel file
|
|
||||||
if link.is_wheel:
|
|
||||||
wheel = Wheel(link.filename) # can raise InvalidWheelFilename
|
|
||||||
req_as_string = "%s==%s" % (wheel.name, wheel.version)
|
|
||||||
else:
|
|
||||||
# set the req to the egg fragment. when it's not there, this
|
|
||||||
# will become an 'unnamed' requirement
|
|
||||||
req_as_string = link.egg_fragment
|
|
||||||
|
|
||||||
# a requirement specifier
|
|
||||||
else:
|
|
||||||
req_as_string = name
|
|
||||||
|
|
||||||
if extras_as_string:
|
|
||||||
extras = Requirement("placeholder" + extras_as_string.lower()).extras
|
|
||||||
else:
|
|
||||||
extras = ()
|
|
||||||
if req_as_string is not None:
|
|
||||||
try:
|
|
||||||
req = Requirement(req_as_string)
|
|
||||||
except InvalidRequirement:
|
|
||||||
if os.path.sep in req_as_string:
|
|
||||||
add_msg = "It looks like a path."
|
|
||||||
add_msg += deduce_helpful_msg(req_as_string)
|
|
||||||
elif ('=' in req_as_string and
|
|
||||||
not any(op in req_as_string for op in operators)):
|
|
||||||
add_msg = "= is not a valid operator. Did you mean == ?"
|
|
||||||
else:
|
|
||||||
add_msg = ""
|
|
||||||
raise InstallationError(
|
|
||||||
"Invalid requirement: '%s'\n%s" % (req_as_string, add_msg)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
req = None
|
|
||||||
|
|
||||||
return InstallRequirement(
|
|
||||||
req, comes_from, link=link, markers=markers,
|
|
||||||
use_pep517=use_pep517, isolated=isolated,
|
|
||||||
options=options if options else {},
|
|
||||||
wheel_cache=wheel_cache,
|
|
||||||
constraint=constraint,
|
|
||||||
extras=extras,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def install_req_from_req_string(
|
|
||||||
req_string, # type: str
|
|
||||||
comes_from=None, # type: Optional[InstallRequirement]
|
|
||||||
isolated=False, # type: bool
|
|
||||||
wheel_cache=None, # type: Optional[WheelCache]
|
|
||||||
use_pep517=None # type: Optional[bool]
|
|
||||||
):
|
|
||||||
# type: (...) -> InstallRequirement
|
|
||||||
try:
|
|
||||||
req = Requirement(req_string)
|
|
||||||
except InvalidRequirement:
|
|
||||||
raise InstallationError("Invalid requirement: '%s'" % req)
|
|
||||||
|
|
||||||
domains_not_allowed = [
|
|
||||||
PyPI.file_storage_domain,
|
|
||||||
TestPyPI.file_storage_domain,
|
|
||||||
]
|
|
||||||
if req.url and comes_from.link.netloc in domains_not_allowed:
|
|
||||||
# Explicitly disallow pypi packages that depend on external urls
|
|
||||||
raise InstallationError(
|
|
||||||
"Packages installed from PyPI cannot depend on packages "
|
|
||||||
"which are not also hosted on PyPI.\n"
|
|
||||||
"%s depends on %s " % (comes_from.name, req)
|
|
||||||
)
|
|
||||||
|
|
||||||
return InstallRequirement(
|
|
||||||
req, comes_from, isolated=isolated, wheel_cache=wheel_cache,
|
|
||||||
use_pep517=use_pep517
|
|
||||||
)
|
|
@ -1,382 +0,0 @@
|
|||||||
"""
|
|
||||||
Requirements file parsing
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import optparse
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import shlex
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pip._vendor.six.moves import filterfalse
|
|
||||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
|
||||||
|
|
||||||
from pip._internal.cli import cmdoptions
|
|
||||||
from pip._internal.download import get_file_content
|
|
||||||
from pip._internal.exceptions import RequirementsFileParseError
|
|
||||||
from pip._internal.req.constructors import (
|
|
||||||
install_req_from_editable, install_req_from_line,
|
|
||||||
)
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import ( # noqa: F401
|
|
||||||
Iterator, Tuple, Optional, List, Callable, Text
|
|
||||||
)
|
|
||||||
from pip._internal.req import InstallRequirement # noqa: F401
|
|
||||||
from pip._internal.cache import WheelCache # noqa: F401
|
|
||||||
from pip._internal.index import PackageFinder # noqa: F401
|
|
||||||
from pip._internal.download import PipSession # noqa: F401
|
|
||||||
|
|
||||||
ReqFileLines = Iterator[Tuple[int, Text]]
|
|
||||||
|
|
||||||
__all__ = ['parse_requirements']
|
|
||||||
|
|
||||||
SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
|
|
||||||
COMMENT_RE = re.compile(r'(^|\s)+#.*$')
|
|
||||||
|
|
||||||
# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
|
|
||||||
# variable name consisting of only uppercase letters, digits or the '_'
|
|
||||||
# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
|
|
||||||
# 2013 Edition.
|
|
||||||
ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})')
|
|
||||||
|
|
||||||
SUPPORTED_OPTIONS = [
|
|
||||||
cmdoptions.constraints,
|
|
||||||
cmdoptions.editable,
|
|
||||||
cmdoptions.requirements,
|
|
||||||
cmdoptions.no_index,
|
|
||||||
cmdoptions.index_url,
|
|
||||||
cmdoptions.find_links,
|
|
||||||
cmdoptions.extra_index_url,
|
|
||||||
cmdoptions.always_unzip,
|
|
||||||
cmdoptions.no_binary,
|
|
||||||
cmdoptions.only_binary,
|
|
||||||
cmdoptions.pre,
|
|
||||||
cmdoptions.trusted_host,
|
|
||||||
cmdoptions.require_hashes,
|
|
||||||
] # type: List[Callable[..., optparse.Option]]
|
|
||||||
|
|
||||||
# options to be passed to requirements
|
|
||||||
SUPPORTED_OPTIONS_REQ = [
|
|
||||||
cmdoptions.install_options,
|
|
||||||
cmdoptions.global_options,
|
|
||||||
cmdoptions.hash,
|
|
||||||
] # type: List[Callable[..., optparse.Option]]
|
|
||||||
|
|
||||||
# the 'dest' string values
|
|
||||||
SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
|
|
||||||
|
|
||||||
|
|
||||||
def parse_requirements(
|
|
||||||
filename, # type: str
|
|
||||||
finder=None, # type: Optional[PackageFinder]
|
|
||||||
comes_from=None, # type: Optional[str]
|
|
||||||
options=None, # type: Optional[optparse.Values]
|
|
||||||
session=None, # type: Optional[PipSession]
|
|
||||||
constraint=False, # type: bool
|
|
||||||
wheel_cache=None, # type: Optional[WheelCache]
|
|
||||||
use_pep517=None # type: Optional[bool]
|
|
||||||
):
|
|
||||||
# type: (...) -> Iterator[InstallRequirement]
|
|
||||||
"""Parse a requirements file and yield InstallRequirement instances.
|
|
||||||
|
|
||||||
:param filename: Path or url of requirements file.
|
|
||||||
:param finder: Instance of pip.index.PackageFinder.
|
|
||||||
:param comes_from: Origin description of requirements.
|
|
||||||
:param options: cli options.
|
|
||||||
:param session: Instance of pip.download.PipSession.
|
|
||||||
:param constraint: If true, parsing a constraint file rather than
|
|
||||||
requirements file.
|
|
||||||
:param wheel_cache: Instance of pip.wheel.WheelCache
|
|
||||||
:param use_pep517: Value of the --use-pep517 option.
|
|
||||||
"""
|
|
||||||
if session is None:
|
|
||||||
raise TypeError(
|
|
||||||
"parse_requirements() missing 1 required keyword argument: "
|
|
||||||
"'session'"
|
|
||||||
)
|
|
||||||
|
|
||||||
_, content = get_file_content(
|
|
||||||
filename, comes_from=comes_from, session=session
|
|
||||||
)
|
|
||||||
|
|
||||||
lines_enum = preprocess(content, options)
|
|
||||||
|
|
||||||
for line_number, line in lines_enum:
|
|
||||||
req_iter = process_line(line, filename, line_number, finder,
|
|
||||||
comes_from, options, session, wheel_cache,
|
|
||||||
use_pep517=use_pep517, constraint=constraint)
|
|
||||||
for req in req_iter:
|
|
||||||
yield req
|
|
||||||
|
|
||||||
|
|
||||||
def preprocess(content, options):
|
|
||||||
# type: (Text, Optional[optparse.Values]) -> ReqFileLines
|
|
||||||
"""Split, filter, and join lines, and return a line iterator
|
|
||||||
|
|
||||||
:param content: the content of the requirements file
|
|
||||||
:param options: cli options
|
|
||||||
"""
|
|
||||||
lines_enum = enumerate(content.splitlines(), start=1) # type: ReqFileLines
|
|
||||||
lines_enum = join_lines(lines_enum)
|
|
||||||
lines_enum = ignore_comments(lines_enum)
|
|
||||||
lines_enum = skip_regex(lines_enum, options)
|
|
||||||
lines_enum = expand_env_variables(lines_enum)
|
|
||||||
return lines_enum
|
|
||||||
|
|
||||||
|
|
||||||
def process_line(
|
|
||||||
line, # type: Text
|
|
||||||
filename, # type: str
|
|
||||||
line_number, # type: int
|
|
||||||
finder=None, # type: Optional[PackageFinder]
|
|
||||||
comes_from=None, # type: Optional[str]
|
|
||||||
options=None, # type: Optional[optparse.Values]
|
|
||||||
session=None, # type: Optional[PipSession]
|
|
||||||
wheel_cache=None, # type: Optional[WheelCache]
|
|
||||||
use_pep517=None, # type: Optional[bool]
|
|
||||||
constraint=False # type: bool
|
|
||||||
):
|
|
||||||
# type: (...) -> Iterator[InstallRequirement]
|
|
||||||
"""Process a single requirements line; This can result in creating/yielding
|
|
||||||
requirements, or updating the finder.
|
|
||||||
|
|
||||||
For lines that contain requirements, the only options that have an effect
|
|
||||||
are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
|
|
||||||
requirement. Other options from SUPPORTED_OPTIONS may be present, but are
|
|
||||||
ignored.
|
|
||||||
|
|
||||||
For lines that do not contain requirements, the only options that have an
|
|
||||||
effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
|
|
||||||
be present, but are ignored. These lines may contain multiple options
|
|
||||||
(although our docs imply only one is supported), and all our parsed and
|
|
||||||
affect the finder.
|
|
||||||
|
|
||||||
:param constraint: If True, parsing a constraints file.
|
|
||||||
:param options: OptionParser options that we may update
|
|
||||||
"""
|
|
||||||
parser = build_parser(line)
|
|
||||||
defaults = parser.get_default_values()
|
|
||||||
defaults.index_url = None
|
|
||||||
if finder:
|
|
||||||
defaults.format_control = finder.format_control
|
|
||||||
args_str, options_str = break_args_options(line)
|
|
||||||
# Prior to 2.7.3, shlex cannot deal with unicode entries
|
|
||||||
if sys.version_info < (2, 7, 3):
|
|
||||||
# https://github.com/python/mypy/issues/1174
|
|
||||||
options_str = options_str.encode('utf8') # type: ignore
|
|
||||||
# https://github.com/python/mypy/issues/1174
|
|
||||||
opts, _ = parser.parse_args(
|
|
||||||
shlex.split(options_str), defaults) # type: ignore
|
|
||||||
|
|
||||||
# preserve for the nested code path
|
|
||||||
line_comes_from = '%s %s (line %s)' % (
|
|
||||||
'-c' if constraint else '-r', filename, line_number,
|
|
||||||
)
|
|
||||||
|
|
||||||
# yield a line requirement
|
|
||||||
if args_str:
|
|
||||||
isolated = options.isolated_mode if options else False
|
|
||||||
if options:
|
|
||||||
cmdoptions.check_install_build_global(options, opts)
|
|
||||||
# get the options that apply to requirements
|
|
||||||
req_options = {}
|
|
||||||
for dest in SUPPORTED_OPTIONS_REQ_DEST:
|
|
||||||
if dest in opts.__dict__ and opts.__dict__[dest]:
|
|
||||||
req_options[dest] = opts.__dict__[dest]
|
|
||||||
yield install_req_from_line(
|
|
||||||
args_str, line_comes_from, constraint=constraint,
|
|
||||||
use_pep517=use_pep517,
|
|
||||||
isolated=isolated, options=req_options, wheel_cache=wheel_cache
|
|
||||||
)
|
|
||||||
|
|
||||||
# yield an editable requirement
|
|
||||||
elif opts.editables:
|
|
||||||
isolated = options.isolated_mode if options else False
|
|
||||||
yield install_req_from_editable(
|
|
||||||
opts.editables[0], comes_from=line_comes_from,
|
|
||||||
use_pep517=use_pep517,
|
|
||||||
constraint=constraint, isolated=isolated, wheel_cache=wheel_cache
|
|
||||||
)
|
|
||||||
|
|
||||||
# parse a nested requirements file
|
|
||||||
elif opts.requirements or opts.constraints:
|
|
||||||
if opts.requirements:
|
|
||||||
req_path = opts.requirements[0]
|
|
||||||
nested_constraint = False
|
|
||||||
else:
|
|
||||||
req_path = opts.constraints[0]
|
|
||||||
nested_constraint = True
|
|
||||||
# original file is over http
|
|
||||||
if SCHEME_RE.search(filename):
|
|
||||||
# do a url join so relative paths work
|
|
||||||
req_path = urllib_parse.urljoin(filename, req_path)
|
|
||||||
# original file and nested file are paths
|
|
||||||
elif not SCHEME_RE.search(req_path):
|
|
||||||
# do a join so relative paths work
|
|
||||||
req_path = os.path.join(os.path.dirname(filename), req_path)
|
|
||||||
# TODO: Why not use `comes_from='-r {} (line {})'` here as well?
|
|
||||||
parsed_reqs = parse_requirements(
|
|
||||||
req_path, finder, comes_from, options, session,
|
|
||||||
constraint=nested_constraint, wheel_cache=wheel_cache
|
|
||||||
)
|
|
||||||
for req in parsed_reqs:
|
|
||||||
yield req
|
|
||||||
|
|
||||||
# percolate hash-checking option upward
|
|
||||||
elif opts.require_hashes:
|
|
||||||
options.require_hashes = opts.require_hashes
|
|
||||||
|
|
||||||
# set finder options
|
|
||||||
elif finder:
|
|
||||||
if opts.index_url:
|
|
||||||
finder.index_urls = [opts.index_url]
|
|
||||||
if opts.no_index is True:
|
|
||||||
finder.index_urls = []
|
|
||||||
if opts.extra_index_urls:
|
|
||||||
finder.index_urls.extend(opts.extra_index_urls)
|
|
||||||
if opts.find_links:
|
|
||||||
# FIXME: it would be nice to keep track of the source
|
|
||||||
# of the find_links: support a find-links local path
|
|
||||||
# relative to a requirements file.
|
|
||||||
value = opts.find_links[0]
|
|
||||||
req_dir = os.path.dirname(os.path.abspath(filename))
|
|
||||||
relative_to_reqs_file = os.path.join(req_dir, value)
|
|
||||||
if os.path.exists(relative_to_reqs_file):
|
|
||||||
value = relative_to_reqs_file
|
|
||||||
finder.find_links.append(value)
|
|
||||||
if opts.pre:
|
|
||||||
finder.allow_all_prereleases = True
|
|
||||||
if opts.trusted_hosts:
|
|
||||||
finder.secure_origins.extend(
|
|
||||||
("*", host, "*") for host in opts.trusted_hosts)
|
|
||||||
|
|
||||||
|
|
||||||
def break_args_options(line):
|
|
||||||
# type: (Text) -> Tuple[str, Text]
|
|
||||||
"""Break up the line into an args and options string. We only want to shlex
|
|
||||||
(and then optparse) the options, not the args. args can contain markers
|
|
||||||
which are corrupted by shlex.
|
|
||||||
"""
|
|
||||||
tokens = line.split(' ')
|
|
||||||
args = []
|
|
||||||
options = tokens[:]
|
|
||||||
for token in tokens:
|
|
||||||
if token.startswith('-') or token.startswith('--'):
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
args.append(token)
|
|
||||||
options.pop(0)
|
|
||||||
return ' '.join(args), ' '.join(options) # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
def build_parser(line):
|
|
||||||
# type: (Text) -> optparse.OptionParser
|
|
||||||
"""
|
|
||||||
Return a parser for parsing requirement lines
|
|
||||||
"""
|
|
||||||
parser = optparse.OptionParser(add_help_option=False)
|
|
||||||
|
|
||||||
option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
|
|
||||||
for option_factory in option_factories:
|
|
||||||
option = option_factory()
|
|
||||||
parser.add_option(option)
|
|
||||||
|
|
||||||
# By default optparse sys.exits on parsing errors. We want to wrap
|
|
||||||
# that in our own exception.
|
|
||||||
def parser_exit(self, msg):
|
|
||||||
# add offending line
|
|
||||||
msg = 'Invalid requirement: %s\n%s' % (line, msg)
|
|
||||||
raise RequirementsFileParseError(msg)
|
|
||||||
# NOTE: mypy disallows assigning to a method
|
|
||||||
# https://github.com/python/mypy/issues/2427
|
|
||||||
parser.exit = parser_exit # type: ignore
|
|
||||||
|
|
||||||
return parser
|
|
||||||
|
|
||||||
|
|
||||||
def join_lines(lines_enum):
|
|
||||||
# type: (ReqFileLines) -> ReqFileLines
|
|
||||||
"""Joins a line ending in '\' with the previous line (except when following
|
|
||||||
comments). The joined line takes on the index of the first line.
|
|
||||||
"""
|
|
||||||
primary_line_number = None
|
|
||||||
new_line = [] # type: List[Text]
|
|
||||||
for line_number, line in lines_enum:
|
|
||||||
if not line.endswith('\\') or COMMENT_RE.match(line):
|
|
||||||
if COMMENT_RE.match(line):
|
|
||||||
# this ensures comments are always matched later
|
|
||||||
line = ' ' + line
|
|
||||||
if new_line:
|
|
||||||
new_line.append(line)
|
|
||||||
yield primary_line_number, ''.join(new_line)
|
|
||||||
new_line = []
|
|
||||||
else:
|
|
||||||
yield line_number, line
|
|
||||||
else:
|
|
||||||
if not new_line:
|
|
||||||
primary_line_number = line_number
|
|
||||||
new_line.append(line.strip('\\'))
|
|
||||||
|
|
||||||
# last line contains \
|
|
||||||
if new_line:
|
|
||||||
yield primary_line_number, ''.join(new_line)
|
|
||||||
|
|
||||||
# TODO: handle space after '\'.
|
|
||||||
|
|
||||||
|
|
||||||
def ignore_comments(lines_enum):
|
|
||||||
# type: (ReqFileLines) -> ReqFileLines
|
|
||||||
"""
|
|
||||||
Strips comments and filter empty lines.
|
|
||||||
"""
|
|
||||||
for line_number, line in lines_enum:
|
|
||||||
line = COMMENT_RE.sub('', line)
|
|
||||||
line = line.strip()
|
|
||||||
if line:
|
|
||||||
yield line_number, line
|
|
||||||
|
|
||||||
|
|
||||||
def skip_regex(lines_enum, options):
|
|
||||||
# type: (ReqFileLines, Optional[optparse.Values]) -> ReqFileLines
|
|
||||||
"""
|
|
||||||
Skip lines that match '--skip-requirements-regex' pattern
|
|
||||||
|
|
||||||
Note: the regex pattern is only built once
|
|
||||||
"""
|
|
||||||
skip_regex = options.skip_requirements_regex if options else None
|
|
||||||
if skip_regex:
|
|
||||||
pattern = re.compile(skip_regex)
|
|
||||||
lines_enum = filterfalse(lambda e: pattern.search(e[1]), lines_enum)
|
|
||||||
return lines_enum
|
|
||||||
|
|
||||||
|
|
||||||
def expand_env_variables(lines_enum):
|
|
||||||
# type: (ReqFileLines) -> ReqFileLines
|
|
||||||
"""Replace all environment variables that can be retrieved via `os.getenv`.
|
|
||||||
|
|
||||||
The only allowed format for environment variables defined in the
|
|
||||||
requirement file is `${MY_VARIABLE_1}` to ensure two things:
|
|
||||||
|
|
||||||
1. Strings that contain a `$` aren't accidentally (partially) expanded.
|
|
||||||
2. Ensure consistency across platforms for requirement files.
|
|
||||||
|
|
||||||
These points are the result of a discusssion on the `github pull
|
|
||||||
request #3514 <https://github.com/pypa/pip/pull/3514>`_.
|
|
||||||
|
|
||||||
Valid characters in variable names follow the `POSIX standard
|
|
||||||
<http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited
|
|
||||||
to uppercase letter, digits and the `_` (underscore).
|
|
||||||
"""
|
|
||||||
for line_number, line in lines_enum:
|
|
||||||
for env_var, var_name in ENV_VAR_RE.findall(line):
|
|
||||||
value = os.getenv(var_name)
|
|
||||||
if not value:
|
|
||||||
continue
|
|
||||||
|
|
||||||
line = line.replace(env_var, value)
|
|
||||||
|
|
||||||
yield line_number, line
|
|
File diff suppressed because it is too large
Load Diff
@ -1,197 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from collections import OrderedDict
|
|
||||||
|
|
||||||
from pip._internal.exceptions import InstallationError
|
|
||||||
from pip._internal.utils.logging import indent_log
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
from pip._internal.wheel import Wheel
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import Optional, List, Tuple, Dict, Iterable # noqa: F401
|
|
||||||
from pip._internal.req.req_install import InstallRequirement # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class RequirementSet(object):
|
|
||||||
|
|
||||||
def __init__(self, require_hashes=False, check_supported_wheels=True):
|
|
||||||
# type: (bool, bool) -> None
|
|
||||||
"""Create a RequirementSet.
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.requirements = OrderedDict() # type: Dict[str, InstallRequirement] # noqa: E501
|
|
||||||
self.require_hashes = require_hashes
|
|
||||||
self.check_supported_wheels = check_supported_wheels
|
|
||||||
|
|
||||||
# Mapping of alias: real_name
|
|
||||||
self.requirement_aliases = {} # type: Dict[str, str]
|
|
||||||
self.unnamed_requirements = [] # type: List[InstallRequirement]
|
|
||||||
self.successfully_downloaded = [] # type: List[InstallRequirement]
|
|
||||||
self.reqs_to_cleanup = [] # type: List[InstallRequirement]
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
reqs = [req for req in self.requirements.values()
|
|
||||||
if not req.comes_from]
|
|
||||||
reqs.sort(key=lambda req: req.name.lower())
|
|
||||||
return ' '.join([str(req.req) for req in reqs])
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
reqs = [req for req in self.requirements.values()]
|
|
||||||
reqs.sort(key=lambda req: req.name.lower())
|
|
||||||
reqs_str = ', '.join([str(req.req) for req in reqs])
|
|
||||||
return ('<%s object; %d requirement(s): %s>'
|
|
||||||
% (self.__class__.__name__, len(reqs), reqs_str))
|
|
||||||
|
|
||||||
def add_requirement(
|
|
||||||
self,
|
|
||||||
install_req, # type: InstallRequirement
|
|
||||||
parent_req_name=None, # type: Optional[str]
|
|
||||||
extras_requested=None # type: Optional[Iterable[str]]
|
|
||||||
):
|
|
||||||
# type: (...) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]] # noqa: E501
|
|
||||||
"""Add install_req as a requirement to install.
|
|
||||||
|
|
||||||
:param parent_req_name: The name of the requirement that needed this
|
|
||||||
added. The name is used because when multiple unnamed requirements
|
|
||||||
resolve to the same name, we could otherwise end up with dependency
|
|
||||||
links that point outside the Requirements set. parent_req must
|
|
||||||
already be added. Note that None implies that this is a user
|
|
||||||
supplied requirement, vs an inferred one.
|
|
||||||
:param extras_requested: an iterable of extras used to evaluate the
|
|
||||||
environment markers.
|
|
||||||
:return: Additional requirements to scan. That is either [] if
|
|
||||||
the requirement is not applicable, or [install_req] if the
|
|
||||||
requirement is applicable and has just been added.
|
|
||||||
"""
|
|
||||||
name = install_req.name
|
|
||||||
|
|
||||||
# If the markers do not match, ignore this requirement.
|
|
||||||
if not install_req.match_markers(extras_requested):
|
|
||||||
logger.info(
|
|
||||||
"Ignoring %s: markers '%s' don't match your environment",
|
|
||||||
name, install_req.markers,
|
|
||||||
)
|
|
||||||
return [], None
|
|
||||||
|
|
||||||
# If the wheel is not supported, raise an error.
|
|
||||||
# Should check this after filtering out based on environment markers to
|
|
||||||
# allow specifying different wheels based on the environment/OS, in a
|
|
||||||
# single requirements file.
|
|
||||||
if install_req.link and install_req.link.is_wheel:
|
|
||||||
wheel = Wheel(install_req.link.filename)
|
|
||||||
if self.check_supported_wheels and not wheel.supported():
|
|
||||||
raise InstallationError(
|
|
||||||
"%s is not a supported wheel on this platform." %
|
|
||||||
wheel.filename
|
|
||||||
)
|
|
||||||
|
|
||||||
# This next bit is really a sanity check.
|
|
||||||
assert install_req.is_direct == (parent_req_name is None), (
|
|
||||||
"a direct req shouldn't have a parent and also, "
|
|
||||||
"a non direct req should have a parent"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Unnamed requirements are scanned again and the requirement won't be
|
|
||||||
# added as a dependency until after scanning.
|
|
||||||
if not name:
|
|
||||||
# url or path requirement w/o an egg fragment
|
|
||||||
self.unnamed_requirements.append(install_req)
|
|
||||||
return [install_req], None
|
|
||||||
|
|
||||||
try:
|
|
||||||
existing_req = self.get_requirement(name)
|
|
||||||
except KeyError:
|
|
||||||
existing_req = None
|
|
||||||
|
|
||||||
has_conflicting_requirement = (
|
|
||||||
parent_req_name is None and
|
|
||||||
existing_req and
|
|
||||||
not existing_req.constraint and
|
|
||||||
existing_req.extras == install_req.extras and
|
|
||||||
existing_req.req.specifier != install_req.req.specifier
|
|
||||||
)
|
|
||||||
if has_conflicting_requirement:
|
|
||||||
raise InstallationError(
|
|
||||||
"Double requirement given: %s (already in %s, name=%r)"
|
|
||||||
% (install_req, existing_req, name)
|
|
||||||
)
|
|
||||||
|
|
||||||
# When no existing requirement exists, add the requirement as a
|
|
||||||
# dependency and it will be scanned again after.
|
|
||||||
if not existing_req:
|
|
||||||
self.requirements[name] = install_req
|
|
||||||
# FIXME: what about other normalizations? E.g., _ vs. -?
|
|
||||||
if name.lower() != name:
|
|
||||||
self.requirement_aliases[name.lower()] = name
|
|
||||||
# We'd want to rescan this requirements later
|
|
||||||
return [install_req], install_req
|
|
||||||
|
|
||||||
# Assume there's no need to scan, and that we've already
|
|
||||||
# encountered this for scanning.
|
|
||||||
if install_req.constraint or not existing_req.constraint:
|
|
||||||
return [], existing_req
|
|
||||||
|
|
||||||
does_not_satisfy_constraint = (
|
|
||||||
install_req.link and
|
|
||||||
not (
|
|
||||||
existing_req.link and
|
|
||||||
install_req.link.path == existing_req.link.path
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if does_not_satisfy_constraint:
|
|
||||||
self.reqs_to_cleanup.append(install_req)
|
|
||||||
raise InstallationError(
|
|
||||||
"Could not satisfy constraints for '%s': "
|
|
||||||
"installation from path or url cannot be "
|
|
||||||
"constrained to a version" % name,
|
|
||||||
)
|
|
||||||
# If we're now installing a constraint, mark the existing
|
|
||||||
# object for real installation.
|
|
||||||
existing_req.constraint = False
|
|
||||||
existing_req.extras = tuple(sorted(
|
|
||||||
set(existing_req.extras) | set(install_req.extras)
|
|
||||||
))
|
|
||||||
logger.debug(
|
|
||||||
"Setting %s extras to: %s",
|
|
||||||
existing_req, existing_req.extras,
|
|
||||||
)
|
|
||||||
# Return the existing requirement for addition to the parent and
|
|
||||||
# scanning again.
|
|
||||||
return [existing_req], existing_req
|
|
||||||
|
|
||||||
def has_requirement(self, project_name):
|
|
||||||
# type: (str) -> bool
|
|
||||||
name = project_name.lower()
|
|
||||||
if (name in self.requirements and
|
|
||||||
not self.requirements[name].constraint or
|
|
||||||
name in self.requirement_aliases and
|
|
||||||
not self.requirements[self.requirement_aliases[name]].constraint):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def has_requirements(self):
|
|
||||||
# type: () -> List[InstallRequirement]
|
|
||||||
return list(req for req in self.requirements.values() if not
|
|
||||||
req.constraint) or self.unnamed_requirements
|
|
||||||
|
|
||||||
def get_requirement(self, project_name):
|
|
||||||
# type: (str) -> InstallRequirement
|
|
||||||
for name in project_name, project_name.lower():
|
|
||||||
if name in self.requirements:
|
|
||||||
return self.requirements[name]
|
|
||||||
if name in self.requirement_aliases:
|
|
||||||
return self.requirements[self.requirement_aliases[name]]
|
|
||||||
raise KeyError("No project with the name %r" % project_name)
|
|
||||||
|
|
||||||
def cleanup_files(self):
|
|
||||||
# type: () -> None
|
|
||||||
"""Clean up files, remove builds."""
|
|
||||||
logger.debug('Cleaning up...')
|
|
||||||
with indent_log():
|
|
||||||
for req in self.reqs_to_cleanup:
|
|
||||||
req.remove_temporary_source()
|
|
@ -1,88 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import contextlib
|
|
||||||
import errno
|
|
||||||
import hashlib
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import Set, Iterator # noqa: F401
|
|
||||||
from pip._internal.req.req_install import InstallRequirement # noqa: F401
|
|
||||||
from pip._internal.models.link import Link # noqa: F401
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class RequirementTracker(object):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
# type: () -> None
|
|
||||||
self._root = os.environ.get('PIP_REQ_TRACKER')
|
|
||||||
if self._root is None:
|
|
||||||
self._temp_dir = TempDirectory(delete=False, kind='req-tracker')
|
|
||||||
self._temp_dir.create()
|
|
||||||
self._root = os.environ['PIP_REQ_TRACKER'] = self._temp_dir.path
|
|
||||||
logger.debug('Created requirements tracker %r', self._root)
|
|
||||||
else:
|
|
||||||
self._temp_dir = None
|
|
||||||
logger.debug('Re-using requirements tracker %r', self._root)
|
|
||||||
self._entries = set() # type: Set[InstallRequirement]
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
self.cleanup()
|
|
||||||
|
|
||||||
def _entry_path(self, link):
|
|
||||||
# type: (Link) -> str
|
|
||||||
hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
|
|
||||||
return os.path.join(self._root, hashed)
|
|
||||||
|
|
||||||
def add(self, req):
|
|
||||||
# type: (InstallRequirement) -> None
|
|
||||||
link = req.link
|
|
||||||
info = str(req)
|
|
||||||
entry_path = self._entry_path(link)
|
|
||||||
try:
|
|
||||||
with open(entry_path) as fp:
|
|
||||||
# Error, these's already a build in progress.
|
|
||||||
raise LookupError('%s is already being built: %s'
|
|
||||||
% (link, fp.read()))
|
|
||||||
except IOError as e:
|
|
||||||
if e.errno != errno.ENOENT:
|
|
||||||
raise
|
|
||||||
assert req not in self._entries
|
|
||||||
with open(entry_path, 'w') as fp:
|
|
||||||
fp.write(info)
|
|
||||||
self._entries.add(req)
|
|
||||||
logger.debug('Added %s to build tracker %r', req, self._root)
|
|
||||||
|
|
||||||
def remove(self, req):
|
|
||||||
# type: (InstallRequirement) -> None
|
|
||||||
link = req.link
|
|
||||||
self._entries.remove(req)
|
|
||||||
os.unlink(self._entry_path(link))
|
|
||||||
logger.debug('Removed %s from build tracker %r', req, self._root)
|
|
||||||
|
|
||||||
def cleanup(self):
|
|
||||||
# type: () -> None
|
|
||||||
for req in set(self._entries):
|
|
||||||
self.remove(req)
|
|
||||||
remove = self._temp_dir is not None
|
|
||||||
if remove:
|
|
||||||
self._temp_dir.cleanup()
|
|
||||||
logger.debug('%s build tracker %r',
|
|
||||||
'Removed' if remove else 'Cleaned',
|
|
||||||
self._root)
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def track(self, req):
|
|
||||||
# type: (InstallRequirement) -> Iterator[None]
|
|
||||||
self.add(req)
|
|
||||||
yield
|
|
||||||
self.remove(req)
|
|
@ -1,596 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import csv
|
|
||||||
import functools
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import sysconfig
|
|
||||||
|
|
||||||
from pip._vendor import pkg_resources
|
|
||||||
|
|
||||||
from pip._internal.exceptions import UninstallationError
|
|
||||||
from pip._internal.locations import bin_py, bin_user
|
|
||||||
from pip._internal.utils.compat import WINDOWS, cache_from_source, uses_pycache
|
|
||||||
from pip._internal.utils.logging import indent_log
|
|
||||||
from pip._internal.utils.misc import (
|
|
||||||
FakeFile, ask, dist_in_usersite, dist_is_local, egg_link_path, is_local,
|
|
||||||
normalize_path, renames, rmtree,
|
|
||||||
)
|
|
||||||
from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def _script_names(dist, script_name, is_gui):
|
|
||||||
"""Create the fully qualified name of the files created by
|
|
||||||
{console,gui}_scripts for the given ``dist``.
|
|
||||||
Returns the list of file names
|
|
||||||
"""
|
|
||||||
if dist_in_usersite(dist):
|
|
||||||
bin_dir = bin_user
|
|
||||||
else:
|
|
||||||
bin_dir = bin_py
|
|
||||||
exe_name = os.path.join(bin_dir, script_name)
|
|
||||||
paths_to_remove = [exe_name]
|
|
||||||
if WINDOWS:
|
|
||||||
paths_to_remove.append(exe_name + '.exe')
|
|
||||||
paths_to_remove.append(exe_name + '.exe.manifest')
|
|
||||||
if is_gui:
|
|
||||||
paths_to_remove.append(exe_name + '-script.pyw')
|
|
||||||
else:
|
|
||||||
paths_to_remove.append(exe_name + '-script.py')
|
|
||||||
return paths_to_remove
|
|
||||||
|
|
||||||
|
|
||||||
def _unique(fn):
|
|
||||||
@functools.wraps(fn)
|
|
||||||
def unique(*args, **kw):
|
|
||||||
seen = set()
|
|
||||||
for item in fn(*args, **kw):
|
|
||||||
if item not in seen:
|
|
||||||
seen.add(item)
|
|
||||||
yield item
|
|
||||||
return unique
|
|
||||||
|
|
||||||
|
|
||||||
@_unique
|
|
||||||
def uninstallation_paths(dist):
|
|
||||||
"""
|
|
||||||
Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
|
|
||||||
|
|
||||||
Yield paths to all the files in RECORD. For each .py file in RECORD, add
|
|
||||||
the .pyc and .pyo in the same directory.
|
|
||||||
|
|
||||||
UninstallPathSet.add() takes care of the __pycache__ .py[co].
|
|
||||||
"""
|
|
||||||
r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
|
|
||||||
for row in r:
|
|
||||||
path = os.path.join(dist.location, row[0])
|
|
||||||
yield path
|
|
||||||
if path.endswith('.py'):
|
|
||||||
dn, fn = os.path.split(path)
|
|
||||||
base = fn[:-3]
|
|
||||||
path = os.path.join(dn, base + '.pyc')
|
|
||||||
yield path
|
|
||||||
path = os.path.join(dn, base + '.pyo')
|
|
||||||
yield path
|
|
||||||
|
|
||||||
|
|
||||||
def compact(paths):
|
|
||||||
"""Compact a path set to contain the minimal number of paths
|
|
||||||
necessary to contain all paths in the set. If /a/path/ and
|
|
||||||
/a/path/to/a/file.txt are both in the set, leave only the
|
|
||||||
shorter path."""
|
|
||||||
|
|
||||||
sep = os.path.sep
|
|
||||||
short_paths = set()
|
|
||||||
for path in sorted(paths, key=len):
|
|
||||||
should_skip = any(
|
|
||||||
path.startswith(shortpath.rstrip("*")) and
|
|
||||||
path[len(shortpath.rstrip("*").rstrip(sep))] == sep
|
|
||||||
for shortpath in short_paths
|
|
||||||
)
|
|
||||||
if not should_skip:
|
|
||||||
short_paths.add(path)
|
|
||||||
return short_paths
|
|
||||||
|
|
||||||
|
|
||||||
def compress_for_rename(paths):
|
|
||||||
"""Returns a set containing the paths that need to be renamed.
|
|
||||||
|
|
||||||
This set may include directories when the original sequence of paths
|
|
||||||
included every file on disk.
|
|
||||||
"""
|
|
||||||
case_map = dict((os.path.normcase(p), p) for p in paths)
|
|
||||||
remaining = set(case_map)
|
|
||||||
unchecked = sorted(set(os.path.split(p)[0]
|
|
||||||
for p in case_map.values()), key=len)
|
|
||||||
wildcards = set()
|
|
||||||
|
|
||||||
def norm_join(*a):
|
|
||||||
return os.path.normcase(os.path.join(*a))
|
|
||||||
|
|
||||||
for root in unchecked:
|
|
||||||
if any(os.path.normcase(root).startswith(w)
|
|
||||||
for w in wildcards):
|
|
||||||
# This directory has already been handled.
|
|
||||||
continue
|
|
||||||
|
|
||||||
all_files = set()
|
|
||||||
all_subdirs = set()
|
|
||||||
for dirname, subdirs, files in os.walk(root):
|
|
||||||
all_subdirs.update(norm_join(root, dirname, d)
|
|
||||||
for d in subdirs)
|
|
||||||
all_files.update(norm_join(root, dirname, f)
|
|
||||||
for f in files)
|
|
||||||
# If all the files we found are in our remaining set of files to
|
|
||||||
# remove, then remove them from the latter set and add a wildcard
|
|
||||||
# for the directory.
|
|
||||||
if not (all_files - remaining):
|
|
||||||
remaining.difference_update(all_files)
|
|
||||||
wildcards.add(root + os.sep)
|
|
||||||
|
|
||||||
return set(map(case_map.__getitem__, remaining)) | wildcards
|
|
||||||
|
|
||||||
|
|
||||||
def compress_for_output_listing(paths):
|
|
||||||
"""Returns a tuple of 2 sets of which paths to display to user
|
|
||||||
|
|
||||||
The first set contains paths that would be deleted. Files of a package
|
|
||||||
are not added and the top-level directory of the package has a '*' added
|
|
||||||
at the end - to signify that all it's contents are removed.
|
|
||||||
|
|
||||||
The second set contains files that would have been skipped in the above
|
|
||||||
folders.
|
|
||||||
"""
|
|
||||||
|
|
||||||
will_remove = list(paths)
|
|
||||||
will_skip = set()
|
|
||||||
|
|
||||||
# Determine folders and files
|
|
||||||
folders = set()
|
|
||||||
files = set()
|
|
||||||
for path in will_remove:
|
|
||||||
if path.endswith(".pyc"):
|
|
||||||
continue
|
|
||||||
if path.endswith("__init__.py") or ".dist-info" in path:
|
|
||||||
folders.add(os.path.dirname(path))
|
|
||||||
files.add(path)
|
|
||||||
|
|
||||||
_normcased_files = set(map(os.path.normcase, files))
|
|
||||||
|
|
||||||
folders = compact(folders)
|
|
||||||
|
|
||||||
# This walks the tree using os.walk to not miss extra folders
|
|
||||||
# that might get added.
|
|
||||||
for folder in folders:
|
|
||||||
for dirpath, _, dirfiles in os.walk(folder):
|
|
||||||
for fname in dirfiles:
|
|
||||||
if fname.endswith(".pyc"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
file_ = os.path.join(dirpath, fname)
|
|
||||||
if (os.path.isfile(file_) and
|
|
||||||
os.path.normcase(file_) not in _normcased_files):
|
|
||||||
# We are skipping this file. Add it to the set.
|
|
||||||
will_skip.add(file_)
|
|
||||||
|
|
||||||
will_remove = files | {
|
|
||||||
os.path.join(folder, "*") for folder in folders
|
|
||||||
}
|
|
||||||
|
|
||||||
return will_remove, will_skip
|
|
||||||
|
|
||||||
|
|
||||||
class StashedUninstallPathSet(object):
|
|
||||||
"""A set of file rename operations to stash files while
|
|
||||||
tentatively uninstalling them."""
|
|
||||||
def __init__(self):
|
|
||||||
# Mapping from source file root to [Adjacent]TempDirectory
|
|
||||||
# for files under that directory.
|
|
||||||
self._save_dirs = {}
|
|
||||||
# (old path, new path) tuples for each move that may need
|
|
||||||
# to be undone.
|
|
||||||
self._moves = []
|
|
||||||
|
|
||||||
def _get_directory_stash(self, path):
|
|
||||||
"""Stashes a directory.
|
|
||||||
|
|
||||||
Directories are stashed adjacent to their original location if
|
|
||||||
possible, or else moved/copied into the user's temp dir."""
|
|
||||||
|
|
||||||
try:
|
|
||||||
save_dir = AdjacentTempDirectory(path)
|
|
||||||
save_dir.create()
|
|
||||||
except OSError:
|
|
||||||
save_dir = TempDirectory(kind="uninstall")
|
|
||||||
save_dir.create()
|
|
||||||
self._save_dirs[os.path.normcase(path)] = save_dir
|
|
||||||
|
|
||||||
return save_dir.path
|
|
||||||
|
|
||||||
def _get_file_stash(self, path):
|
|
||||||
"""Stashes a file.
|
|
||||||
|
|
||||||
If no root has been provided, one will be created for the directory
|
|
||||||
in the user's temp directory."""
|
|
||||||
path = os.path.normcase(path)
|
|
||||||
head, old_head = os.path.dirname(path), None
|
|
||||||
save_dir = None
|
|
||||||
|
|
||||||
while head != old_head:
|
|
||||||
try:
|
|
||||||
save_dir = self._save_dirs[head]
|
|
||||||
break
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
head, old_head = os.path.dirname(head), head
|
|
||||||
else:
|
|
||||||
# Did not find any suitable root
|
|
||||||
head = os.path.dirname(path)
|
|
||||||
save_dir = TempDirectory(kind='uninstall')
|
|
||||||
save_dir.create()
|
|
||||||
self._save_dirs[head] = save_dir
|
|
||||||
|
|
||||||
relpath = os.path.relpath(path, head)
|
|
||||||
if relpath and relpath != os.path.curdir:
|
|
||||||
return os.path.join(save_dir.path, relpath)
|
|
||||||
return save_dir.path
|
|
||||||
|
|
||||||
def stash(self, path):
|
|
||||||
"""Stashes the directory or file and returns its new location.
|
|
||||||
"""
|
|
||||||
if os.path.isdir(path):
|
|
||||||
new_path = self._get_directory_stash(path)
|
|
||||||
else:
|
|
||||||
new_path = self._get_file_stash(path)
|
|
||||||
|
|
||||||
self._moves.append((path, new_path))
|
|
||||||
if os.path.isdir(path) and os.path.isdir(new_path):
|
|
||||||
# If we're moving a directory, we need to
|
|
||||||
# remove the destination first or else it will be
|
|
||||||
# moved to inside the existing directory.
|
|
||||||
# We just created new_path ourselves, so it will
|
|
||||||
# be removable.
|
|
||||||
os.rmdir(new_path)
|
|
||||||
renames(path, new_path)
|
|
||||||
return new_path
|
|
||||||
|
|
||||||
def commit(self):
|
|
||||||
"""Commits the uninstall by removing stashed files."""
|
|
||||||
for _, save_dir in self._save_dirs.items():
|
|
||||||
save_dir.cleanup()
|
|
||||||
self._moves = []
|
|
||||||
self._save_dirs = {}
|
|
||||||
|
|
||||||
def rollback(self):
|
|
||||||
"""Undoes the uninstall by moving stashed files back."""
|
|
||||||
for p in self._moves:
|
|
||||||
logging.info("Moving to %s\n from %s", *p)
|
|
||||||
|
|
||||||
for new_path, path in self._moves:
|
|
||||||
try:
|
|
||||||
logger.debug('Replacing %s from %s', new_path, path)
|
|
||||||
if os.path.isfile(new_path):
|
|
||||||
os.unlink(new_path)
|
|
||||||
elif os.path.isdir(new_path):
|
|
||||||
rmtree(new_path)
|
|
||||||
renames(path, new_path)
|
|
||||||
except OSError as ex:
|
|
||||||
logger.error("Failed to restore %s", new_path)
|
|
||||||
logger.debug("Exception: %s", ex)
|
|
||||||
|
|
||||||
self.commit()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def can_rollback(self):
|
|
||||||
return bool(self._moves)
|
|
||||||
|
|
||||||
|
|
||||||
class UninstallPathSet(object):
|
|
||||||
"""A set of file paths to be removed in the uninstallation of a
|
|
||||||
requirement."""
|
|
||||||
def __init__(self, dist):
|
|
||||||
self.paths = set()
|
|
||||||
self._refuse = set()
|
|
||||||
self.pth = {}
|
|
||||||
self.dist = dist
|
|
||||||
self._moved_paths = StashedUninstallPathSet()
|
|
||||||
|
|
||||||
def _permitted(self, path):
|
|
||||||
"""
|
|
||||||
Return True if the given path is one we are permitted to
|
|
||||||
remove/modify, False otherwise.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return is_local(path)
|
|
||||||
|
|
||||||
def add(self, path):
|
|
||||||
head, tail = os.path.split(path)
|
|
||||||
|
|
||||||
# we normalize the head to resolve parent directory symlinks, but not
|
|
||||||
# the tail, since we only want to uninstall symlinks, not their targets
|
|
||||||
path = os.path.join(normalize_path(head), os.path.normcase(tail))
|
|
||||||
|
|
||||||
if not os.path.exists(path):
|
|
||||||
return
|
|
||||||
if self._permitted(path):
|
|
||||||
self.paths.add(path)
|
|
||||||
else:
|
|
||||||
self._refuse.add(path)
|
|
||||||
|
|
||||||
# __pycache__ files can show up after 'installed-files.txt' is created,
|
|
||||||
# due to imports
|
|
||||||
if os.path.splitext(path)[1] == '.py' and uses_pycache:
|
|
||||||
self.add(cache_from_source(path))
|
|
||||||
|
|
||||||
def add_pth(self, pth_file, entry):
|
|
||||||
pth_file = normalize_path(pth_file)
|
|
||||||
if self._permitted(pth_file):
|
|
||||||
if pth_file not in self.pth:
|
|
||||||
self.pth[pth_file] = UninstallPthEntries(pth_file)
|
|
||||||
self.pth[pth_file].add(entry)
|
|
||||||
else:
|
|
||||||
self._refuse.add(pth_file)
|
|
||||||
|
|
||||||
def remove(self, auto_confirm=False, verbose=False):
|
|
||||||
"""Remove paths in ``self.paths`` with confirmation (unless
|
|
||||||
``auto_confirm`` is True)."""
|
|
||||||
|
|
||||||
if not self.paths:
|
|
||||||
logger.info(
|
|
||||||
"Can't uninstall '%s'. No files were found to uninstall.",
|
|
||||||
self.dist.project_name,
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
dist_name_version = (
|
|
||||||
self.dist.project_name + "-" + self.dist.version
|
|
||||||
)
|
|
||||||
logger.info('Uninstalling %s:', dist_name_version)
|
|
||||||
|
|
||||||
with indent_log():
|
|
||||||
if auto_confirm or self._allowed_to_proceed(verbose):
|
|
||||||
moved = self._moved_paths
|
|
||||||
|
|
||||||
for_rename = compress_for_rename(self.paths)
|
|
||||||
|
|
||||||
for path in sorted(compact(for_rename)):
|
|
||||||
moved.stash(path)
|
|
||||||
logger.debug('Removing file or directory %s', path)
|
|
||||||
|
|
||||||
for pth in self.pth.values():
|
|
||||||
pth.remove()
|
|
||||||
|
|
||||||
logger.info('Successfully uninstalled %s', dist_name_version)
|
|
||||||
|
|
||||||
def _allowed_to_proceed(self, verbose):
|
|
||||||
"""Display which files would be deleted and prompt for confirmation
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _display(msg, paths):
|
|
||||||
if not paths:
|
|
||||||
return
|
|
||||||
|
|
||||||
logger.info(msg)
|
|
||||||
with indent_log():
|
|
||||||
for path in sorted(compact(paths)):
|
|
||||||
logger.info(path)
|
|
||||||
|
|
||||||
if not verbose:
|
|
||||||
will_remove, will_skip = compress_for_output_listing(self.paths)
|
|
||||||
else:
|
|
||||||
# In verbose mode, display all the files that are going to be
|
|
||||||
# deleted.
|
|
||||||
will_remove = list(self.paths)
|
|
||||||
will_skip = set()
|
|
||||||
|
|
||||||
_display('Would remove:', will_remove)
|
|
||||||
_display('Would not remove (might be manually added):', will_skip)
|
|
||||||
_display('Would not remove (outside of prefix):', self._refuse)
|
|
||||||
if verbose:
|
|
||||||
_display('Will actually move:', compress_for_rename(self.paths))
|
|
||||||
|
|
||||||
return ask('Proceed (y/n)? ', ('y', 'n')) == 'y'
|
|
||||||
|
|
||||||
def rollback(self):
|
|
||||||
"""Rollback the changes previously made by remove()."""
|
|
||||||
if not self._moved_paths.can_rollback:
|
|
||||||
logger.error(
|
|
||||||
"Can't roll back %s; was not uninstalled",
|
|
||||||
self.dist.project_name,
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
logger.info('Rolling back uninstall of %s', self.dist.project_name)
|
|
||||||
self._moved_paths.rollback()
|
|
||||||
for pth in self.pth.values():
|
|
||||||
pth.rollback()
|
|
||||||
|
|
||||||
def commit(self):
|
|
||||||
"""Remove temporary save dir: rollback will no longer be possible."""
|
|
||||||
self._moved_paths.commit()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_dist(cls, dist):
|
|
||||||
dist_path = normalize_path(dist.location)
|
|
||||||
if not dist_is_local(dist):
|
|
||||||
logger.info(
|
|
||||||
"Not uninstalling %s at %s, outside environment %s",
|
|
||||||
dist.key,
|
|
||||||
dist_path,
|
|
||||||
sys.prefix,
|
|
||||||
)
|
|
||||||
return cls(dist)
|
|
||||||
|
|
||||||
if dist_path in {p for p in {sysconfig.get_path("stdlib"),
|
|
||||||
sysconfig.get_path("platstdlib")}
|
|
||||||
if p}:
|
|
||||||
logger.info(
|
|
||||||
"Not uninstalling %s at %s, as it is in the standard library.",
|
|
||||||
dist.key,
|
|
||||||
dist_path,
|
|
||||||
)
|
|
||||||
return cls(dist)
|
|
||||||
|
|
||||||
paths_to_remove = cls(dist)
|
|
||||||
develop_egg_link = egg_link_path(dist)
|
|
||||||
develop_egg_link_egg_info = '{}.egg-info'.format(
|
|
||||||
pkg_resources.to_filename(dist.project_name))
|
|
||||||
egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
|
|
||||||
# Special case for distutils installed package
|
|
||||||
distutils_egg_info = getattr(dist._provider, 'path', None)
|
|
||||||
|
|
||||||
# Uninstall cases order do matter as in the case of 2 installs of the
|
|
||||||
# same package, pip needs to uninstall the currently detected version
|
|
||||||
if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
|
|
||||||
not dist.egg_info.endswith(develop_egg_link_egg_info)):
|
|
||||||
# if dist.egg_info.endswith(develop_egg_link_egg_info), we
|
|
||||||
# are in fact in the develop_egg_link case
|
|
||||||
paths_to_remove.add(dist.egg_info)
|
|
||||||
if dist.has_metadata('installed-files.txt'):
|
|
||||||
for installed_file in dist.get_metadata(
|
|
||||||
'installed-files.txt').splitlines():
|
|
||||||
path = os.path.normpath(
|
|
||||||
os.path.join(dist.egg_info, installed_file)
|
|
||||||
)
|
|
||||||
paths_to_remove.add(path)
|
|
||||||
# FIXME: need a test for this elif block
|
|
||||||
# occurs with --single-version-externally-managed/--record outside
|
|
||||||
# of pip
|
|
||||||
elif dist.has_metadata('top_level.txt'):
|
|
||||||
if dist.has_metadata('namespace_packages.txt'):
|
|
||||||
namespaces = dist.get_metadata('namespace_packages.txt')
|
|
||||||
else:
|
|
||||||
namespaces = []
|
|
||||||
for top_level_pkg in [
|
|
||||||
p for p
|
|
||||||
in dist.get_metadata('top_level.txt').splitlines()
|
|
||||||
if p and p not in namespaces]:
|
|
||||||
path = os.path.join(dist.location, top_level_pkg)
|
|
||||||
paths_to_remove.add(path)
|
|
||||||
paths_to_remove.add(path + '.py')
|
|
||||||
paths_to_remove.add(path + '.pyc')
|
|
||||||
paths_to_remove.add(path + '.pyo')
|
|
||||||
|
|
||||||
elif distutils_egg_info:
|
|
||||||
raise UninstallationError(
|
|
||||||
"Cannot uninstall {!r}. It is a distutils installed project "
|
|
||||||
"and thus we cannot accurately determine which files belong "
|
|
||||||
"to it which would lead to only a partial uninstall.".format(
|
|
||||||
dist.project_name,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
elif dist.location.endswith('.egg'):
|
|
||||||
# package installed by easy_install
|
|
||||||
# We cannot match on dist.egg_name because it can slightly vary
|
|
||||||
# i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
|
|
||||||
paths_to_remove.add(dist.location)
|
|
||||||
easy_install_egg = os.path.split(dist.location)[1]
|
|
||||||
easy_install_pth = os.path.join(os.path.dirname(dist.location),
|
|
||||||
'easy-install.pth')
|
|
||||||
paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
|
|
||||||
|
|
||||||
elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
|
|
||||||
for path in uninstallation_paths(dist):
|
|
||||||
paths_to_remove.add(path)
|
|
||||||
|
|
||||||
elif develop_egg_link:
|
|
||||||
# develop egg
|
|
||||||
with open(develop_egg_link, 'r') as fh:
|
|
||||||
link_pointer = os.path.normcase(fh.readline().strip())
|
|
||||||
assert (link_pointer == dist.location), (
|
|
||||||
'Egg-link %s does not match installed location of %s '
|
|
||||||
'(at %s)' % (link_pointer, dist.project_name, dist.location)
|
|
||||||
)
|
|
||||||
paths_to_remove.add(develop_egg_link)
|
|
||||||
easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
|
|
||||||
'easy-install.pth')
|
|
||||||
paths_to_remove.add_pth(easy_install_pth, dist.location)
|
|
||||||
|
|
||||||
else:
|
|
||||||
logger.debug(
|
|
||||||
'Not sure how to uninstall: %s - Check: %s',
|
|
||||||
dist, dist.location,
|
|
||||||
)
|
|
||||||
|
|
||||||
# find distutils scripts= scripts
|
|
||||||
if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
|
|
||||||
for script in dist.metadata_listdir('scripts'):
|
|
||||||
if dist_in_usersite(dist):
|
|
||||||
bin_dir = bin_user
|
|
||||||
else:
|
|
||||||
bin_dir = bin_py
|
|
||||||
paths_to_remove.add(os.path.join(bin_dir, script))
|
|
||||||
if WINDOWS:
|
|
||||||
paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
|
|
||||||
|
|
||||||
# find console_scripts
|
|
||||||
_scripts_to_remove = []
|
|
||||||
console_scripts = dist.get_entry_map(group='console_scripts')
|
|
||||||
for name in console_scripts.keys():
|
|
||||||
_scripts_to_remove.extend(_script_names(dist, name, False))
|
|
||||||
# find gui_scripts
|
|
||||||
gui_scripts = dist.get_entry_map(group='gui_scripts')
|
|
||||||
for name in gui_scripts.keys():
|
|
||||||
_scripts_to_remove.extend(_script_names(dist, name, True))
|
|
||||||
|
|
||||||
for s in _scripts_to_remove:
|
|
||||||
paths_to_remove.add(s)
|
|
||||||
|
|
||||||
return paths_to_remove
|
|
||||||
|
|
||||||
|
|
||||||
class UninstallPthEntries(object):
|
|
||||||
def __init__(self, pth_file):
|
|
||||||
if not os.path.isfile(pth_file):
|
|
||||||
raise UninstallationError(
|
|
||||||
"Cannot remove entries from nonexistent file %s" % pth_file
|
|
||||||
)
|
|
||||||
self.file = pth_file
|
|
||||||
self.entries = set()
|
|
||||||
self._saved_lines = None
|
|
||||||
|
|
||||||
def add(self, entry):
|
|
||||||
entry = os.path.normcase(entry)
|
|
||||||
# On Windows, os.path.normcase converts the entry to use
|
|
||||||
# backslashes. This is correct for entries that describe absolute
|
|
||||||
# paths outside of site-packages, but all the others use forward
|
|
||||||
# slashes.
|
|
||||||
if WINDOWS and not os.path.splitdrive(entry)[0]:
|
|
||||||
entry = entry.replace('\\', '/')
|
|
||||||
self.entries.add(entry)
|
|
||||||
|
|
||||||
def remove(self):
|
|
||||||
logger.debug('Removing pth entries from %s:', self.file)
|
|
||||||
with open(self.file, 'rb') as fh:
|
|
||||||
# windows uses '\r\n' with py3k, but uses '\n' with py2.x
|
|
||||||
lines = fh.readlines()
|
|
||||||
self._saved_lines = lines
|
|
||||||
if any(b'\r\n' in line for line in lines):
|
|
||||||
endline = '\r\n'
|
|
||||||
else:
|
|
||||||
endline = '\n'
|
|
||||||
# handle missing trailing newline
|
|
||||||
if lines and not lines[-1].endswith(endline.encode("utf-8")):
|
|
||||||
lines[-1] = lines[-1] + endline.encode("utf-8")
|
|
||||||
for entry in self.entries:
|
|
||||||
try:
|
|
||||||
logger.debug('Removing entry: %s', entry)
|
|
||||||
lines.remove((entry + endline).encode("utf-8"))
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
with open(self.file, 'wb') as fh:
|
|
||||||
fh.writelines(lines)
|
|
||||||
|
|
||||||
def rollback(self):
|
|
||||||
if self._saved_lines is None:
|
|
||||||
logger.error(
|
|
||||||
'Cannot roll back changes to %s, none were made', self.file
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
logger.debug('Rolling %s back to previous state', self.file)
|
|
||||||
with open(self.file, 'wb') as fh:
|
|
||||||
fh.writelines(self._saved_lines)
|
|
||||||
return True
|
|
@ -1,393 +0,0 @@
|
|||||||
"""Dependency Resolution
|
|
||||||
|
|
||||||
The dependency resolution in pip is performed as follows:
|
|
||||||
|
|
||||||
for top-level requirements:
|
|
||||||
a. only one spec allowed per project, regardless of conflicts or not.
|
|
||||||
otherwise a "double requirement" exception is raised
|
|
||||||
b. they override sub-dependency requirements.
|
|
||||||
for sub-dependencies
|
|
||||||
a. "first found, wins" (where the order is breadth first)
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from collections import defaultdict
|
|
||||||
from itertools import chain
|
|
||||||
|
|
||||||
from pip._internal.exceptions import (
|
|
||||||
BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors,
|
|
||||||
UnsupportedPythonVersion,
|
|
||||||
)
|
|
||||||
from pip._internal.req.constructors import install_req_from_req_string
|
|
||||||
from pip._internal.utils.logging import indent_log
|
|
||||||
from pip._internal.utils.misc import dist_in_usersite, ensure_dir
|
|
||||||
from pip._internal.utils.packaging import check_dist_requires_python
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import Optional, DefaultDict, List, Set # noqa: F401
|
|
||||||
from pip._internal.download import PipSession # noqa: F401
|
|
||||||
from pip._internal.req.req_install import InstallRequirement # noqa: F401
|
|
||||||
from pip._internal.index import PackageFinder # noqa: F401
|
|
||||||
from pip._internal.req.req_set import RequirementSet # noqa: F401
|
|
||||||
from pip._internal.operations.prepare import ( # noqa: F401
|
|
||||||
DistAbstraction, RequirementPreparer
|
|
||||||
)
|
|
||||||
from pip._internal.cache import WheelCache # noqa: F401
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Resolver(object):
|
|
||||||
"""Resolves which packages need to be installed/uninstalled to perform \
|
|
||||||
the requested operation without breaking the requirements of any package.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
preparer, # type: RequirementPreparer
|
|
||||||
session, # type: PipSession
|
|
||||||
finder, # type: PackageFinder
|
|
||||||
wheel_cache, # type: Optional[WheelCache]
|
|
||||||
use_user_site, # type: bool
|
|
||||||
ignore_dependencies, # type: bool
|
|
||||||
ignore_installed, # type: bool
|
|
||||||
ignore_requires_python, # type: bool
|
|
||||||
force_reinstall, # type: bool
|
|
||||||
isolated, # type: bool
|
|
||||||
upgrade_strategy, # type: str
|
|
||||||
use_pep517=None # type: Optional[bool]
|
|
||||||
):
|
|
||||||
# type: (...) -> None
|
|
||||||
super(Resolver, self).__init__()
|
|
||||||
assert upgrade_strategy in self._allowed_strategies
|
|
||||||
|
|
||||||
self.preparer = preparer
|
|
||||||
self.finder = finder
|
|
||||||
self.session = session
|
|
||||||
|
|
||||||
# NOTE: This would eventually be replaced with a cache that can give
|
|
||||||
# information about both sdist and wheels transparently.
|
|
||||||
self.wheel_cache = wheel_cache
|
|
||||||
|
|
||||||
# This is set in resolve
|
|
||||||
self.require_hashes = None # type: Optional[bool]
|
|
||||||
|
|
||||||
self.upgrade_strategy = upgrade_strategy
|
|
||||||
self.force_reinstall = force_reinstall
|
|
||||||
self.isolated = isolated
|
|
||||||
self.ignore_dependencies = ignore_dependencies
|
|
||||||
self.ignore_installed = ignore_installed
|
|
||||||
self.ignore_requires_python = ignore_requires_python
|
|
||||||
self.use_user_site = use_user_site
|
|
||||||
self.use_pep517 = use_pep517
|
|
||||||
|
|
||||||
self._discovered_dependencies = \
|
|
||||||
defaultdict(list) # type: DefaultDict[str, List]
|
|
||||||
|
|
||||||
def resolve(self, requirement_set):
|
|
||||||
# type: (RequirementSet) -> None
|
|
||||||
"""Resolve what operations need to be done
|
|
||||||
|
|
||||||
As a side-effect of this method, the packages (and their dependencies)
|
|
||||||
are downloaded, unpacked and prepared for installation. This
|
|
||||||
preparation is done by ``pip.operations.prepare``.
|
|
||||||
|
|
||||||
Once PyPI has static dependency metadata available, it would be
|
|
||||||
possible to move the preparation to become a step separated from
|
|
||||||
dependency resolution.
|
|
||||||
"""
|
|
||||||
# make the wheelhouse
|
|
||||||
if self.preparer.wheel_download_dir:
|
|
||||||
ensure_dir(self.preparer.wheel_download_dir)
|
|
||||||
|
|
||||||
# If any top-level requirement has a hash specified, enter
|
|
||||||
# hash-checking mode, which requires hashes from all.
|
|
||||||
root_reqs = (
|
|
||||||
requirement_set.unnamed_requirements +
|
|
||||||
list(requirement_set.requirements.values())
|
|
||||||
)
|
|
||||||
self.require_hashes = (
|
|
||||||
requirement_set.require_hashes or
|
|
||||||
any(req.has_hash_options for req in root_reqs)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Display where finder is looking for packages
|
|
||||||
locations = self.finder.get_formatted_locations()
|
|
||||||
if locations:
|
|
||||||
logger.info(locations)
|
|
||||||
|
|
||||||
# Actually prepare the files, and collect any exceptions. Most hash
|
|
||||||
# exceptions cannot be checked ahead of time, because
|
|
||||||
# req.populate_link() needs to be called before we can make decisions
|
|
||||||
# based on link type.
|
|
||||||
discovered_reqs = [] # type: List[InstallRequirement]
|
|
||||||
hash_errors = HashErrors()
|
|
||||||
for req in chain(root_reqs, discovered_reqs):
|
|
||||||
try:
|
|
||||||
discovered_reqs.extend(
|
|
||||||
self._resolve_one(requirement_set, req)
|
|
||||||
)
|
|
||||||
except HashError as exc:
|
|
||||||
exc.req = req
|
|
||||||
hash_errors.append(exc)
|
|
||||||
|
|
||||||
if hash_errors:
|
|
||||||
raise hash_errors
|
|
||||||
|
|
||||||
def _is_upgrade_allowed(self, req):
|
|
||||||
# type: (InstallRequirement) -> bool
|
|
||||||
if self.upgrade_strategy == "to-satisfy-only":
|
|
||||||
return False
|
|
||||||
elif self.upgrade_strategy == "eager":
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
assert self.upgrade_strategy == "only-if-needed"
|
|
||||||
return req.is_direct
|
|
||||||
|
|
||||||
def _set_req_to_reinstall(self, req):
|
|
||||||
# type: (InstallRequirement) -> None
|
|
||||||
"""
|
|
||||||
Set a requirement to be installed.
|
|
||||||
"""
|
|
||||||
# Don't uninstall the conflict if doing a user install and the
|
|
||||||
# conflict is not a user install.
|
|
||||||
if not self.use_user_site or dist_in_usersite(req.satisfied_by):
|
|
||||||
req.conflicts_with = req.satisfied_by
|
|
||||||
req.satisfied_by = None
|
|
||||||
|
|
||||||
# XXX: Stop passing requirement_set for options
|
|
||||||
def _check_skip_installed(self, req_to_install):
|
|
||||||
# type: (InstallRequirement) -> Optional[str]
|
|
||||||
"""Check if req_to_install should be skipped.
|
|
||||||
|
|
||||||
This will check if the req is installed, and whether we should upgrade
|
|
||||||
or reinstall it, taking into account all the relevant user options.
|
|
||||||
|
|
||||||
After calling this req_to_install will only have satisfied_by set to
|
|
||||||
None if the req_to_install is to be upgraded/reinstalled etc. Any
|
|
||||||
other value will be a dist recording the current thing installed that
|
|
||||||
satisfies the requirement.
|
|
||||||
|
|
||||||
Note that for vcs urls and the like we can't assess skipping in this
|
|
||||||
routine - we simply identify that we need to pull the thing down,
|
|
||||||
then later on it is pulled down and introspected to assess upgrade/
|
|
||||||
reinstalls etc.
|
|
||||||
|
|
||||||
:return: A text reason for why it was skipped, or None.
|
|
||||||
"""
|
|
||||||
if self.ignore_installed:
|
|
||||||
return None
|
|
||||||
|
|
||||||
req_to_install.check_if_exists(self.use_user_site)
|
|
||||||
if not req_to_install.satisfied_by:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if self.force_reinstall:
|
|
||||||
self._set_req_to_reinstall(req_to_install)
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not self._is_upgrade_allowed(req_to_install):
|
|
||||||
if self.upgrade_strategy == "only-if-needed":
|
|
||||||
return 'already satisfied, skipping upgrade'
|
|
||||||
return 'already satisfied'
|
|
||||||
|
|
||||||
# Check for the possibility of an upgrade. For link-based
|
|
||||||
# requirements we have to pull the tree down and inspect to assess
|
|
||||||
# the version #, so it's handled way down.
|
|
||||||
if not req_to_install.link:
|
|
||||||
try:
|
|
||||||
self.finder.find_requirement(req_to_install, upgrade=True)
|
|
||||||
except BestVersionAlreadyInstalled:
|
|
||||||
# Then the best version is installed.
|
|
||||||
return 'already up-to-date'
|
|
||||||
except DistributionNotFound:
|
|
||||||
# No distribution found, so we squash the error. It will
|
|
||||||
# be raised later when we re-try later to do the install.
|
|
||||||
# Why don't we just raise here?
|
|
||||||
pass
|
|
||||||
|
|
||||||
self._set_req_to_reinstall(req_to_install)
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _get_abstract_dist_for(self, req):
|
|
||||||
# type: (InstallRequirement) -> DistAbstraction
|
|
||||||
"""Takes a InstallRequirement and returns a single AbstractDist \
|
|
||||||
representing a prepared variant of the same.
|
|
||||||
"""
|
|
||||||
assert self.require_hashes is not None, (
|
|
||||||
"require_hashes should have been set in Resolver.resolve()"
|
|
||||||
)
|
|
||||||
|
|
||||||
if req.editable:
|
|
||||||
return self.preparer.prepare_editable_requirement(
|
|
||||||
req, self.require_hashes, self.use_user_site, self.finder,
|
|
||||||
)
|
|
||||||
|
|
||||||
# satisfied_by is only evaluated by calling _check_skip_installed,
|
|
||||||
# so it must be None here.
|
|
||||||
assert req.satisfied_by is None
|
|
||||||
skip_reason = self._check_skip_installed(req)
|
|
||||||
|
|
||||||
if req.satisfied_by:
|
|
||||||
return self.preparer.prepare_installed_requirement(
|
|
||||||
req, self.require_hashes, skip_reason
|
|
||||||
)
|
|
||||||
|
|
||||||
upgrade_allowed = self._is_upgrade_allowed(req)
|
|
||||||
abstract_dist = self.preparer.prepare_linked_requirement(
|
|
||||||
req, self.session, self.finder, upgrade_allowed,
|
|
||||||
self.require_hashes
|
|
||||||
)
|
|
||||||
|
|
||||||
# NOTE
|
|
||||||
# The following portion is for determining if a certain package is
|
|
||||||
# going to be re-installed/upgraded or not and reporting to the user.
|
|
||||||
# This should probably get cleaned up in a future refactor.
|
|
||||||
|
|
||||||
# req.req is only avail after unpack for URL
|
|
||||||
# pkgs repeat check_if_exists to uninstall-on-upgrade
|
|
||||||
# (#14)
|
|
||||||
if not self.ignore_installed:
|
|
||||||
req.check_if_exists(self.use_user_site)
|
|
||||||
|
|
||||||
if req.satisfied_by:
|
|
||||||
should_modify = (
|
|
||||||
self.upgrade_strategy != "to-satisfy-only" or
|
|
||||||
self.force_reinstall or
|
|
||||||
self.ignore_installed or
|
|
||||||
req.link.scheme == 'file'
|
|
||||||
)
|
|
||||||
if should_modify:
|
|
||||||
self._set_req_to_reinstall(req)
|
|
||||||
else:
|
|
||||||
logger.info(
|
|
||||||
'Requirement already satisfied (use --upgrade to upgrade):'
|
|
||||||
' %s', req,
|
|
||||||
)
|
|
||||||
|
|
||||||
return abstract_dist
|
|
||||||
|
|
||||||
def _resolve_one(
|
|
||||||
self,
|
|
||||||
requirement_set, # type: RequirementSet
|
|
||||||
req_to_install # type: InstallRequirement
|
|
||||||
):
|
|
||||||
# type: (...) -> List[InstallRequirement]
|
|
||||||
"""Prepare a single requirements file.
|
|
||||||
|
|
||||||
:return: A list of additional InstallRequirements to also install.
|
|
||||||
"""
|
|
||||||
# Tell user what we are doing for this requirement:
|
|
||||||
# obtain (editable), skipping, processing (local url), collecting
|
|
||||||
# (remote url or package name)
|
|
||||||
if req_to_install.constraint or req_to_install.prepared:
|
|
||||||
return []
|
|
||||||
|
|
||||||
req_to_install.prepared = True
|
|
||||||
|
|
||||||
# register tmp src for cleanup in case something goes wrong
|
|
||||||
requirement_set.reqs_to_cleanup.append(req_to_install)
|
|
||||||
|
|
||||||
abstract_dist = self._get_abstract_dist_for(req_to_install)
|
|
||||||
|
|
||||||
# Parse and return dependencies
|
|
||||||
dist = abstract_dist.dist()
|
|
||||||
try:
|
|
||||||
check_dist_requires_python(dist)
|
|
||||||
except UnsupportedPythonVersion as err:
|
|
||||||
if self.ignore_requires_python:
|
|
||||||
logger.warning(err.args[0])
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
|
|
||||||
more_reqs = [] # type: List[InstallRequirement]
|
|
||||||
|
|
||||||
def add_req(subreq, extras_requested):
|
|
||||||
sub_install_req = install_req_from_req_string(
|
|
||||||
str(subreq),
|
|
||||||
req_to_install,
|
|
||||||
isolated=self.isolated,
|
|
||||||
wheel_cache=self.wheel_cache,
|
|
||||||
use_pep517=self.use_pep517
|
|
||||||
)
|
|
||||||
parent_req_name = req_to_install.name
|
|
||||||
to_scan_again, add_to_parent = requirement_set.add_requirement(
|
|
||||||
sub_install_req,
|
|
||||||
parent_req_name=parent_req_name,
|
|
||||||
extras_requested=extras_requested,
|
|
||||||
)
|
|
||||||
if parent_req_name and add_to_parent:
|
|
||||||
self._discovered_dependencies[parent_req_name].append(
|
|
||||||
add_to_parent
|
|
||||||
)
|
|
||||||
more_reqs.extend(to_scan_again)
|
|
||||||
|
|
||||||
with indent_log():
|
|
||||||
# We add req_to_install before its dependencies, so that we
|
|
||||||
# can refer to it when adding dependencies.
|
|
||||||
if not requirement_set.has_requirement(req_to_install.name):
|
|
||||||
# 'unnamed' requirements will get added here
|
|
||||||
req_to_install.is_direct = True
|
|
||||||
requirement_set.add_requirement(
|
|
||||||
req_to_install, parent_req_name=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not self.ignore_dependencies:
|
|
||||||
if req_to_install.extras:
|
|
||||||
logger.debug(
|
|
||||||
"Installing extra requirements: %r",
|
|
||||||
','.join(req_to_install.extras),
|
|
||||||
)
|
|
||||||
missing_requested = sorted(
|
|
||||||
set(req_to_install.extras) - set(dist.extras)
|
|
||||||
)
|
|
||||||
for missing in missing_requested:
|
|
||||||
logger.warning(
|
|
||||||
'%s does not provide the extra \'%s\'',
|
|
||||||
dist, missing
|
|
||||||
)
|
|
||||||
|
|
||||||
available_requested = sorted(
|
|
||||||
set(dist.extras) & set(req_to_install.extras)
|
|
||||||
)
|
|
||||||
for subreq in dist.requires(available_requested):
|
|
||||||
add_req(subreq, extras_requested=available_requested)
|
|
||||||
|
|
||||||
if not req_to_install.editable and not req_to_install.satisfied_by:
|
|
||||||
# XXX: --no-install leads this to report 'Successfully
|
|
||||||
# downloaded' for only non-editable reqs, even though we took
|
|
||||||
# action on them.
|
|
||||||
requirement_set.successfully_downloaded.append(req_to_install)
|
|
||||||
|
|
||||||
return more_reqs
|
|
||||||
|
|
||||||
def get_installation_order(self, req_set):
|
|
||||||
# type: (RequirementSet) -> List[InstallRequirement]
|
|
||||||
"""Create the installation order.
|
|
||||||
|
|
||||||
The installation order is topological - requirements are installed
|
|
||||||
before the requiring thing. We break cycles at an arbitrary point,
|
|
||||||
and make no other guarantees.
|
|
||||||
"""
|
|
||||||
# The current implementation, which we may change at any point
|
|
||||||
# installs the user specified things in the order given, except when
|
|
||||||
# dependencies must come earlier to achieve topological order.
|
|
||||||
order = []
|
|
||||||
ordered_reqs = set() # type: Set[InstallRequirement]
|
|
||||||
|
|
||||||
def schedule(req):
|
|
||||||
if req.satisfied_by or req in ordered_reqs:
|
|
||||||
return
|
|
||||||
if req.constraint:
|
|
||||||
return
|
|
||||||
ordered_reqs.add(req)
|
|
||||||
for dep in self._discovered_dependencies[req.name]:
|
|
||||||
schedule(dep)
|
|
||||||
order.append(req)
|
|
||||||
|
|
||||||
for install_req in req_set.requirements.values():
|
|
||||||
schedule(install_req)
|
|
||||||
return order
|
|
@ -1,270 +0,0 @@
|
|||||||
"""
|
|
||||||
This code was taken from https://github.com/ActiveState/appdirs and modified
|
|
||||||
to suit our purposes.
|
|
||||||
"""
|
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pip._vendor.six import PY2, text_type
|
|
||||||
|
|
||||||
from pip._internal.utils.compat import WINDOWS, expanduser
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import ( # noqa: F401
|
|
||||||
List, Union
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def user_cache_dir(appname):
|
|
||||||
# type: (str) -> str
|
|
||||||
r"""
|
|
||||||
Return full path to the user-specific cache dir for this application.
|
|
||||||
|
|
||||||
"appname" is the name of application.
|
|
||||||
|
|
||||||
Typical user cache directories are:
|
|
||||||
macOS: ~/Library/Caches/<AppName>
|
|
||||||
Unix: ~/.cache/<AppName> (XDG default)
|
|
||||||
Windows: C:\Users\<username>\AppData\Local\<AppName>\Cache
|
|
||||||
|
|
||||||
On Windows the only suggestion in the MSDN docs is that local settings go
|
|
||||||
in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the
|
|
||||||
non-roaming app data dir (the default returned by `user_data_dir`). Apps
|
|
||||||
typically put cache data somewhere *under* the given dir here. Some
|
|
||||||
examples:
|
|
||||||
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
|
|
||||||
...\Acme\SuperApp\Cache\1.0
|
|
||||||
|
|
||||||
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
|
|
||||||
"""
|
|
||||||
if WINDOWS:
|
|
||||||
# Get the base path
|
|
||||||
path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
|
|
||||||
|
|
||||||
# When using Python 2, return paths as bytes on Windows like we do on
|
|
||||||
# other operating systems. See helper function docs for more details.
|
|
||||||
if PY2 and isinstance(path, text_type):
|
|
||||||
path = _win_path_to_bytes(path)
|
|
||||||
|
|
||||||
# Add our app name and Cache directory to it
|
|
||||||
path = os.path.join(path, appname, "Cache")
|
|
||||||
elif sys.platform == "darwin":
|
|
||||||
# Get the base path
|
|
||||||
path = expanduser("~/Library/Caches")
|
|
||||||
|
|
||||||
# Add our app name to it
|
|
||||||
path = os.path.join(path, appname)
|
|
||||||
else:
|
|
||||||
# Get the base path
|
|
||||||
path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache"))
|
|
||||||
|
|
||||||
# Add our app name to it
|
|
||||||
path = os.path.join(path, appname)
|
|
||||||
|
|
||||||
return path
|
|
||||||
|
|
||||||
|
|
||||||
def user_data_dir(appname, roaming=False):
|
|
||||||
# type: (str, bool) -> str
|
|
||||||
r"""
|
|
||||||
Return full path to the user-specific data dir for this application.
|
|
||||||
|
|
||||||
"appname" is the name of application.
|
|
||||||
If None, just the system directory is returned.
|
|
||||||
"roaming" (boolean, default False) can be set True to use the Windows
|
|
||||||
roaming appdata directory. That means that for users on a Windows
|
|
||||||
network setup for roaming profiles, this user data will be
|
|
||||||
sync'd on login. See
|
|
||||||
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
|
||||||
for a discussion of issues.
|
|
||||||
|
|
||||||
Typical user data directories are:
|
|
||||||
macOS: ~/Library/Application Support/<AppName>
|
|
||||||
if it exists, else ~/.config/<AppName>
|
|
||||||
Unix: ~/.local/share/<AppName> # or in
|
|
||||||
$XDG_DATA_HOME, if defined
|
|
||||||
Win XP (not roaming): C:\Documents and Settings\<username>\ ...
|
|
||||||
...Application Data\<AppName>
|
|
||||||
Win XP (roaming): C:\Documents and Settings\<username>\Local ...
|
|
||||||
...Settings\Application Data\<AppName>
|
|
||||||
Win 7 (not roaming): C:\\Users\<username>\AppData\Local\<AppName>
|
|
||||||
Win 7 (roaming): C:\\Users\<username>\AppData\Roaming\<AppName>
|
|
||||||
|
|
||||||
For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
|
|
||||||
That means, by default "~/.local/share/<AppName>".
|
|
||||||
"""
|
|
||||||
if WINDOWS:
|
|
||||||
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
|
|
||||||
path = os.path.join(os.path.normpath(_get_win_folder(const)), appname)
|
|
||||||
elif sys.platform == "darwin":
|
|
||||||
path = os.path.join(
|
|
||||||
expanduser('~/Library/Application Support/'),
|
|
||||||
appname,
|
|
||||||
) if os.path.isdir(os.path.join(
|
|
||||||
expanduser('~/Library/Application Support/'),
|
|
||||||
appname,
|
|
||||||
)
|
|
||||||
) else os.path.join(
|
|
||||||
expanduser('~/.config/'),
|
|
||||||
appname,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
path = os.path.join(
|
|
||||||
os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")),
|
|
||||||
appname,
|
|
||||||
)
|
|
||||||
|
|
||||||
return path
|
|
||||||
|
|
||||||
|
|
||||||
def user_config_dir(appname, roaming=True):
|
|
||||||
# type: (str, bool) -> str
|
|
||||||
"""Return full path to the user-specific config dir for this application.
|
|
||||||
|
|
||||||
"appname" is the name of application.
|
|
||||||
If None, just the system directory is returned.
|
|
||||||
"roaming" (boolean, default True) can be set False to not use the
|
|
||||||
Windows roaming appdata directory. That means that for users on a
|
|
||||||
Windows network setup for roaming profiles, this user data will be
|
|
||||||
sync'd on login. See
|
|
||||||
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
|
|
||||||
for a discussion of issues.
|
|
||||||
|
|
||||||
Typical user data directories are:
|
|
||||||
macOS: same as user_data_dir
|
|
||||||
Unix: ~/.config/<AppName>
|
|
||||||
Win *: same as user_data_dir
|
|
||||||
|
|
||||||
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
|
|
||||||
That means, by default "~/.config/<AppName>".
|
|
||||||
"""
|
|
||||||
if WINDOWS:
|
|
||||||
path = user_data_dir(appname, roaming=roaming)
|
|
||||||
elif sys.platform == "darwin":
|
|
||||||
path = user_data_dir(appname)
|
|
||||||
else:
|
|
||||||
path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config"))
|
|
||||||
path = os.path.join(path, appname)
|
|
||||||
|
|
||||||
return path
|
|
||||||
|
|
||||||
|
|
||||||
# for the discussion regarding site_config_dirs locations
|
|
||||||
# see <https://github.com/pypa/pip/issues/1733>
|
|
||||||
def site_config_dirs(appname):
|
|
||||||
# type: (str) -> List[str]
|
|
||||||
r"""Return a list of potential user-shared config dirs for this application.
|
|
||||||
|
|
||||||
"appname" is the name of application.
|
|
||||||
|
|
||||||
Typical user config directories are:
|
|
||||||
macOS: /Library/Application Support/<AppName>/
|
|
||||||
Unix: /etc or $XDG_CONFIG_DIRS[i]/<AppName>/ for each value in
|
|
||||||
$XDG_CONFIG_DIRS
|
|
||||||
Win XP: C:\Documents and Settings\All Users\Application ...
|
|
||||||
...Data\<AppName>\
|
|
||||||
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory
|
|
||||||
on Vista.)
|
|
||||||
Win 7: Hidden, but writeable on Win 7:
|
|
||||||
C:\ProgramData\<AppName>\
|
|
||||||
"""
|
|
||||||
if WINDOWS:
|
|
||||||
path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
|
|
||||||
pathlist = [os.path.join(path, appname)]
|
|
||||||
elif sys.platform == 'darwin':
|
|
||||||
pathlist = [os.path.join('/Library/Application Support', appname)]
|
|
||||||
else:
|
|
||||||
# try looking in $XDG_CONFIG_DIRS
|
|
||||||
xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
|
|
||||||
if xdg_config_dirs:
|
|
||||||
pathlist = [
|
|
||||||
os.path.join(expanduser(x), appname)
|
|
||||||
for x in xdg_config_dirs.split(os.pathsep)
|
|
||||||
]
|
|
||||||
else:
|
|
||||||
pathlist = []
|
|
||||||
|
|
||||||
# always look in /etc directly as well
|
|
||||||
pathlist.append('/etc')
|
|
||||||
|
|
||||||
return pathlist
|
|
||||||
|
|
||||||
|
|
||||||
# -- Windows support functions --
|
|
||||||
|
|
||||||
def _get_win_folder_from_registry(csidl_name):
|
|
||||||
# type: (str) -> str
|
|
||||||
"""
|
|
||||||
This is a fallback technique at best. I'm not sure if using the
|
|
||||||
registry for this guarantees us the correct answer for all CSIDL_*
|
|
||||||
names.
|
|
||||||
"""
|
|
||||||
import _winreg
|
|
||||||
|
|
||||||
shell_folder_name = {
|
|
||||||
"CSIDL_APPDATA": "AppData",
|
|
||||||
"CSIDL_COMMON_APPDATA": "Common AppData",
|
|
||||||
"CSIDL_LOCAL_APPDATA": "Local AppData",
|
|
||||||
}[csidl_name]
|
|
||||||
|
|
||||||
key = _winreg.OpenKey(
|
|
||||||
_winreg.HKEY_CURRENT_USER,
|
|
||||||
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
|
|
||||||
)
|
|
||||||
directory, _type = _winreg.QueryValueEx(key, shell_folder_name)
|
|
||||||
return directory
|
|
||||||
|
|
||||||
|
|
||||||
def _get_win_folder_with_ctypes(csidl_name):
|
|
||||||
# type: (str) -> str
|
|
||||||
csidl_const = {
|
|
||||||
"CSIDL_APPDATA": 26,
|
|
||||||
"CSIDL_COMMON_APPDATA": 35,
|
|
||||||
"CSIDL_LOCAL_APPDATA": 28,
|
|
||||||
}[csidl_name]
|
|
||||||
|
|
||||||
buf = ctypes.create_unicode_buffer(1024)
|
|
||||||
ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
|
|
||||||
|
|
||||||
# Downgrade to short path name if have highbit chars. See
|
|
||||||
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
|
|
||||||
has_high_char = False
|
|
||||||
for c in buf:
|
|
||||||
if ord(c) > 255:
|
|
||||||
has_high_char = True
|
|
||||||
break
|
|
||||||
if has_high_char:
|
|
||||||
buf2 = ctypes.create_unicode_buffer(1024)
|
|
||||||
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
|
|
||||||
buf = buf2
|
|
||||||
|
|
||||||
return buf.value
|
|
||||||
|
|
||||||
|
|
||||||
if WINDOWS:
|
|
||||||
try:
|
|
||||||
import ctypes
|
|
||||||
_get_win_folder = _get_win_folder_with_ctypes
|
|
||||||
except ImportError:
|
|
||||||
_get_win_folder = _get_win_folder_from_registry
|
|
||||||
|
|
||||||
|
|
||||||
def _win_path_to_bytes(path):
|
|
||||||
"""Encode Windows paths to bytes. Only used on Python 2.
|
|
||||||
|
|
||||||
Motivation is to be consistent with other operating systems where paths
|
|
||||||
are also returned as bytes. This avoids problems mixing bytes and Unicode
|
|
||||||
elsewhere in the codebase. For more details and discussion see
|
|
||||||
<https://github.com/pypa/pip/issues/3463>.
|
|
||||||
|
|
||||||
If encoding using ASCII and MBCS fails, return the original Unicode path.
|
|
||||||
"""
|
|
||||||
for encoding in ('ASCII', 'MBCS'):
|
|
||||||
try:
|
|
||||||
return path.encode(encoding)
|
|
||||||
except (UnicodeEncodeError, LookupError):
|
|
||||||
pass
|
|
||||||
return path
|
|
@ -1,264 +0,0 @@
|
|||||||
"""Stuff that differs in different Python versions and platform
|
|
||||||
distributions."""
|
|
||||||
from __future__ import absolute_import, division
|
|
||||||
|
|
||||||
import codecs
|
|
||||||
import locale
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pip._vendor.six import text_type
|
|
||||||
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import Tuple, Text # noqa: F401
|
|
||||||
|
|
||||||
try:
|
|
||||||
import ipaddress
|
|
||||||
except ImportError:
|
|
||||||
try:
|
|
||||||
from pip._vendor import ipaddress # type: ignore
|
|
||||||
except ImportError:
|
|
||||||
import ipaddr as ipaddress # type: ignore
|
|
||||||
ipaddress.ip_address = ipaddress.IPAddress # type: ignore
|
|
||||||
ipaddress.ip_network = ipaddress.IPNetwork # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"ipaddress", "uses_pycache", "console_to_str", "native_str",
|
|
||||||
"get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", "get_terminal_size",
|
|
||||||
"get_extension_suffixes",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 4):
|
|
||||||
uses_pycache = True
|
|
||||||
from importlib.util import cache_from_source
|
|
||||||
else:
|
|
||||||
import imp
|
|
||||||
|
|
||||||
try:
|
|
||||||
cache_from_source = imp.cache_from_source # type: ignore
|
|
||||||
except AttributeError:
|
|
||||||
# does not use __pycache__
|
|
||||||
cache_from_source = None
|
|
||||||
|
|
||||||
uses_pycache = cache_from_source is not None
|
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 5):
|
|
||||||
backslashreplace_decode = "backslashreplace"
|
|
||||||
else:
|
|
||||||
# In version 3.4 and older, backslashreplace exists
|
|
||||||
# but does not support use for decoding.
|
|
||||||
# We implement our own replace handler for this
|
|
||||||
# situation, so that we can consistently use
|
|
||||||
# backslash replacement for all versions.
|
|
||||||
def backslashreplace_decode_fn(err):
|
|
||||||
raw_bytes = (err.object[i] for i in range(err.start, err.end))
|
|
||||||
if sys.version_info[0] == 2:
|
|
||||||
# Python 2 gave us characters - convert to numeric bytes
|
|
||||||
raw_bytes = (ord(b) for b in raw_bytes)
|
|
||||||
return u"".join(u"\\x%x" % c for c in raw_bytes), err.end
|
|
||||||
codecs.register_error(
|
|
||||||
"backslashreplace_decode",
|
|
||||||
backslashreplace_decode_fn,
|
|
||||||
)
|
|
||||||
backslashreplace_decode = "backslashreplace_decode"
|
|
||||||
|
|
||||||
|
|
||||||
def console_to_str(data):
|
|
||||||
# type: (bytes) -> Text
|
|
||||||
"""Return a string, safe for output, of subprocess output.
|
|
||||||
|
|
||||||
We assume the data is in the locale preferred encoding.
|
|
||||||
If it won't decode properly, we warn the user but decode as
|
|
||||||
best we can.
|
|
||||||
|
|
||||||
We also ensure that the output can be safely written to
|
|
||||||
standard output without encoding errors.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# First, get the encoding we assume. This is the preferred
|
|
||||||
# encoding for the locale, unless that is not found, or
|
|
||||||
# it is ASCII, in which case assume UTF-8
|
|
||||||
encoding = locale.getpreferredencoding()
|
|
||||||
if (not encoding) or codecs.lookup(encoding).name == "ascii":
|
|
||||||
encoding = "utf-8"
|
|
||||||
|
|
||||||
# Now try to decode the data - if we fail, warn the user and
|
|
||||||
# decode with replacement.
|
|
||||||
try:
|
|
||||||
decoded_data = data.decode(encoding)
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
logger.warning(
|
|
||||||
"Subprocess output does not appear to be encoded as %s",
|
|
||||||
encoding,
|
|
||||||
)
|
|
||||||
decoded_data = data.decode(encoding, errors=backslashreplace_decode)
|
|
||||||
|
|
||||||
# Make sure we can print the output, by encoding it to the output
|
|
||||||
# encoding with replacement of unencodable characters, and then
|
|
||||||
# decoding again.
|
|
||||||
# We use stderr's encoding because it's less likely to be
|
|
||||||
# redirected and if we don't find an encoding we skip this
|
|
||||||
# step (on the assumption that output is wrapped by something
|
|
||||||
# that won't fail).
|
|
||||||
# The double getattr is to deal with the possibility that we're
|
|
||||||
# being called in a situation where sys.__stderr__ doesn't exist,
|
|
||||||
# or doesn't have an encoding attribute. Neither of these cases
|
|
||||||
# should occur in normal pip use, but there's no harm in checking
|
|
||||||
# in case people use pip in (unsupported) unusual situations.
|
|
||||||
output_encoding = getattr(getattr(sys, "__stderr__", None),
|
|
||||||
"encoding", None)
|
|
||||||
|
|
||||||
if output_encoding:
|
|
||||||
output_encoded = decoded_data.encode(
|
|
||||||
output_encoding,
|
|
||||||
errors="backslashreplace"
|
|
||||||
)
|
|
||||||
decoded_data = output_encoded.decode(output_encoding)
|
|
||||||
|
|
||||||
return decoded_data
|
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info >= (3,):
|
|
||||||
def native_str(s, replace=False):
|
|
||||||
# type: (str, bool) -> str
|
|
||||||
if isinstance(s, bytes):
|
|
||||||
return s.decode('utf-8', 'replace' if replace else 'strict')
|
|
||||||
return s
|
|
||||||
|
|
||||||
else:
|
|
||||||
def native_str(s, replace=False):
|
|
||||||
# type: (str, bool) -> str
|
|
||||||
# Replace is ignored -- unicode to UTF-8 can't fail
|
|
||||||
if isinstance(s, text_type):
|
|
||||||
return s.encode('utf-8')
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
def get_path_uid(path):
|
|
||||||
# type: (str) -> int
|
|
||||||
"""
|
|
||||||
Return path's uid.
|
|
||||||
|
|
||||||
Does not follow symlinks:
|
|
||||||
https://github.com/pypa/pip/pull/935#discussion_r5307003
|
|
||||||
|
|
||||||
Placed this function in compat due to differences on AIX and
|
|
||||||
Jython, that should eventually go away.
|
|
||||||
|
|
||||||
:raises OSError: When path is a symlink or can't be read.
|
|
||||||
"""
|
|
||||||
if hasattr(os, 'O_NOFOLLOW'):
|
|
||||||
fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
|
|
||||||
file_uid = os.fstat(fd).st_uid
|
|
||||||
os.close(fd)
|
|
||||||
else: # AIX and Jython
|
|
||||||
# WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
|
|
||||||
if not os.path.islink(path):
|
|
||||||
# older versions of Jython don't have `os.fstat`
|
|
||||||
file_uid = os.stat(path).st_uid
|
|
||||||
else:
|
|
||||||
# raise OSError for parity with os.O_NOFOLLOW above
|
|
||||||
raise OSError(
|
|
||||||
"%s is a symlink; Will not return uid for symlinks" % path
|
|
||||||
)
|
|
||||||
return file_uid
|
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 4):
|
|
||||||
from importlib.machinery import EXTENSION_SUFFIXES
|
|
||||||
|
|
||||||
def get_extension_suffixes():
|
|
||||||
return EXTENSION_SUFFIXES
|
|
||||||
else:
|
|
||||||
from imp import get_suffixes
|
|
||||||
|
|
||||||
def get_extension_suffixes():
|
|
||||||
return [suffix[0] for suffix in get_suffixes()]
|
|
||||||
|
|
||||||
|
|
||||||
def expanduser(path):
|
|
||||||
# type: (str) -> str
|
|
||||||
"""
|
|
||||||
Expand ~ and ~user constructions.
|
|
||||||
|
|
||||||
Includes a workaround for https://bugs.python.org/issue14768
|
|
||||||
"""
|
|
||||||
expanded = os.path.expanduser(path)
|
|
||||||
if path.startswith('~/') and expanded.startswith('//'):
|
|
||||||
expanded = expanded[1:]
|
|
||||||
return expanded
|
|
||||||
|
|
||||||
|
|
||||||
# packages in the stdlib that may have installation metadata, but should not be
|
|
||||||
# considered 'installed'. this theoretically could be determined based on
|
|
||||||
# dist.location (py27:`sysconfig.get_paths()['stdlib']`,
|
|
||||||
# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
|
|
||||||
# make this ineffective, so hard-coding
|
|
||||||
stdlib_pkgs = {"python", "wsgiref", "argparse"}
|
|
||||||
|
|
||||||
|
|
||||||
# windows detection, covers cpython and ironpython
|
|
||||||
WINDOWS = (sys.platform.startswith("win") or
|
|
||||||
(sys.platform == 'cli' and os.name == 'nt'))
|
|
||||||
|
|
||||||
|
|
||||||
def samefile(file1, file2):
|
|
||||||
# type: (str, str) -> bool
|
|
||||||
"""Provide an alternative for os.path.samefile on Windows/Python2"""
|
|
||||||
if hasattr(os.path, 'samefile'):
|
|
||||||
return os.path.samefile(file1, file2)
|
|
||||||
else:
|
|
||||||
path1 = os.path.normcase(os.path.abspath(file1))
|
|
||||||
path2 = os.path.normcase(os.path.abspath(file2))
|
|
||||||
return path1 == path2
|
|
||||||
|
|
||||||
|
|
||||||
if hasattr(shutil, 'get_terminal_size'):
|
|
||||||
def get_terminal_size():
|
|
||||||
# type: () -> Tuple[int, int]
|
|
||||||
"""
|
|
||||||
Returns a tuple (x, y) representing the width(x) and the height(y)
|
|
||||||
in characters of the terminal window.
|
|
||||||
"""
|
|
||||||
return tuple(shutil.get_terminal_size()) # type: ignore
|
|
||||||
else:
|
|
||||||
def get_terminal_size():
|
|
||||||
# type: () -> Tuple[int, int]
|
|
||||||
"""
|
|
||||||
Returns a tuple (x, y) representing the width(x) and the height(y)
|
|
||||||
in characters of the terminal window.
|
|
||||||
"""
|
|
||||||
def ioctl_GWINSZ(fd):
|
|
||||||
try:
|
|
||||||
import fcntl
|
|
||||||
import termios
|
|
||||||
import struct
|
|
||||||
cr = struct.unpack_from(
|
|
||||||
'hh',
|
|
||||||
fcntl.ioctl(fd, termios.TIOCGWINSZ, '12345678')
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
if cr == (0, 0):
|
|
||||||
return None
|
|
||||||
return cr
|
|
||||||
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
|
|
||||||
if not cr:
|
|
||||||
try:
|
|
||||||
fd = os.open(os.ctermid(), os.O_RDONLY)
|
|
||||||
cr = ioctl_GWINSZ(fd)
|
|
||||||
os.close(fd)
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
if not cr:
|
|
||||||
cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
|
|
||||||
return int(cr[1]), int(cr[0])
|
|
@ -1,90 +0,0 @@
|
|||||||
"""
|
|
||||||
A module that implements tooling to enable easy warnings about deprecations.
|
|
||||||
"""
|
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
from pip._vendor.packaging.version import parse
|
|
||||||
|
|
||||||
from pip import __version__ as current_version
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import Any, Optional # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
class PipDeprecationWarning(Warning):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
_original_showwarning = None # type: Any
|
|
||||||
|
|
||||||
|
|
||||||
# Warnings <-> Logging Integration
|
|
||||||
def _showwarning(message, category, filename, lineno, file=None, line=None):
|
|
||||||
if file is not None:
|
|
||||||
if _original_showwarning is not None:
|
|
||||||
_original_showwarning(
|
|
||||||
message, category, filename, lineno, file, line,
|
|
||||||
)
|
|
||||||
elif issubclass(category, PipDeprecationWarning):
|
|
||||||
# We use a specially named logger which will handle all of the
|
|
||||||
# deprecation messages for pip.
|
|
||||||
logger = logging.getLogger("pip._internal.deprecations")
|
|
||||||
logger.warning(message)
|
|
||||||
else:
|
|
||||||
_original_showwarning(
|
|
||||||
message, category, filename, lineno, file, line,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def install_warning_logger():
|
|
||||||
# type: () -> None
|
|
||||||
# Enable our Deprecation Warnings
|
|
||||||
warnings.simplefilter("default", PipDeprecationWarning, append=True)
|
|
||||||
|
|
||||||
global _original_showwarning
|
|
||||||
|
|
||||||
if _original_showwarning is None:
|
|
||||||
_original_showwarning = warnings.showwarning
|
|
||||||
warnings.showwarning = _showwarning
|
|
||||||
|
|
||||||
|
|
||||||
def deprecated(reason, replacement, gone_in, issue=None):
|
|
||||||
# type: (str, Optional[str], Optional[str], Optional[int]) -> None
|
|
||||||
"""Helper to deprecate existing functionality.
|
|
||||||
|
|
||||||
reason:
|
|
||||||
Textual reason shown to the user about why this functionality has
|
|
||||||
been deprecated.
|
|
||||||
replacement:
|
|
||||||
Textual suggestion shown to the user about what alternative
|
|
||||||
functionality they can use.
|
|
||||||
gone_in:
|
|
||||||
The version of pip does this functionality should get removed in.
|
|
||||||
Raises errors if pip's current version is greater than or equal to
|
|
||||||
this.
|
|
||||||
issue:
|
|
||||||
Issue number on the tracker that would serve as a useful place for
|
|
||||||
users to find related discussion and provide feedback.
|
|
||||||
|
|
||||||
Always pass replacement, gone_in and issue as keyword arguments for clarity
|
|
||||||
at the call site.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Construct a nice message.
|
|
||||||
# This is purposely eagerly formatted as we want it to appear as if someone
|
|
||||||
# typed this entire message out.
|
|
||||||
message = "DEPRECATION: " + reason
|
|
||||||
if replacement is not None:
|
|
||||||
message += " A possible replacement is {}.".format(replacement)
|
|
||||||
if issue is not None:
|
|
||||||
url = "https://github.com/pypa/pip/issues/" + str(issue)
|
|
||||||
message += " You can find discussion regarding this at {}.".format(url)
|
|
||||||
|
|
||||||
# Raise as an error if it has to be removed.
|
|
||||||
if gone_in is not None and parse(current_version) >= parse(gone_in):
|
|
||||||
raise PipDeprecationWarning(message)
|
|
||||||
warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)
|
|
@ -1,39 +0,0 @@
|
|||||||
import codecs
|
|
||||||
import locale
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import List, Tuple, Text # noqa: F401
|
|
||||||
|
|
||||||
BOMS = [
|
|
||||||
(codecs.BOM_UTF8, 'utf8'),
|
|
||||||
(codecs.BOM_UTF16, 'utf16'),
|
|
||||||
(codecs.BOM_UTF16_BE, 'utf16-be'),
|
|
||||||
(codecs.BOM_UTF16_LE, 'utf16-le'),
|
|
||||||
(codecs.BOM_UTF32, 'utf32'),
|
|
||||||
(codecs.BOM_UTF32_BE, 'utf32-be'),
|
|
||||||
(codecs.BOM_UTF32_LE, 'utf32-le'),
|
|
||||||
] # type: List[Tuple[bytes, Text]]
|
|
||||||
|
|
||||||
ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)')
|
|
||||||
|
|
||||||
|
|
||||||
def auto_decode(data):
|
|
||||||
# type: (bytes) -> Text
|
|
||||||
"""Check a bytes string for a BOM to correctly detect the encoding
|
|
||||||
|
|
||||||
Fallback to locale.getpreferredencoding(False) like open() on Python3"""
|
|
||||||
for bom, encoding in BOMS:
|
|
||||||
if data.startswith(bom):
|
|
||||||
return data[len(bom):].decode(encoding)
|
|
||||||
# Lets check the first two lines as in PEP263
|
|
||||||
for line in data.split(b'\n')[:2]:
|
|
||||||
if line[0:1] == b'#' and ENCODING_RE.search(line):
|
|
||||||
encoding = ENCODING_RE.search(line).groups()[0].decode('ascii')
|
|
||||||
return data.decode(encoding)
|
|
||||||
return data.decode(
|
|
||||||
locale.getpreferredencoding(False) or sys.getdefaultencoding(),
|
|
||||||
)
|
|
@ -1,30 +0,0 @@
|
|||||||
import os
|
|
||||||
import os.path
|
|
||||||
|
|
||||||
from pip._internal.utils.compat import get_path_uid
|
|
||||||
|
|
||||||
|
|
||||||
def check_path_owner(path):
|
|
||||||
# type: (str) -> bool
|
|
||||||
# If we don't have a way to check the effective uid of this process, then
|
|
||||||
# we'll just assume that we own the directory.
|
|
||||||
if not hasattr(os, "geteuid"):
|
|
||||||
return True
|
|
||||||
|
|
||||||
previous = None
|
|
||||||
while path != previous:
|
|
||||||
if os.path.lexists(path):
|
|
||||||
# Check if path is writable by current user.
|
|
||||||
if os.geteuid() == 0:
|
|
||||||
# Special handling for root user in order to handle properly
|
|
||||||
# cases where users use sudo without -H flag.
|
|
||||||
try:
|
|
||||||
path_uid = get_path_uid(path)
|
|
||||||
except OSError:
|
|
||||||
return False
|
|
||||||
return path_uid == 0
|
|
||||||
else:
|
|
||||||
return os.access(path, os.W_OK)
|
|
||||||
else:
|
|
||||||
previous, path = path, os.path.dirname(path)
|
|
||||||
return False # assume we don't own the path
|
|
@ -1,93 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import ctypes
|
|
||||||
import re
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import Optional, Tuple # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
def glibc_version_string():
|
|
||||||
# type: () -> Optional[str]
|
|
||||||
"Returns glibc version string, or None if not using glibc."
|
|
||||||
|
|
||||||
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
|
||||||
# manpage says, "If filename is NULL, then the returned handle is for the
|
|
||||||
# main program". This way we can let the linker do the work to figure out
|
|
||||||
# which libc our process is actually using.
|
|
||||||
process_namespace = ctypes.CDLL(None)
|
|
||||||
try:
|
|
||||||
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
|
||||||
except AttributeError:
|
|
||||||
# Symbol doesn't exist -> therefore, we are not linked to
|
|
||||||
# glibc.
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Call gnu_get_libc_version, which returns a string like "2.5"
|
|
||||||
gnu_get_libc_version.restype = ctypes.c_char_p
|
|
||||||
version_str = gnu_get_libc_version()
|
|
||||||
# py2 / py3 compatibility:
|
|
||||||
if not isinstance(version_str, str):
|
|
||||||
version_str = version_str.decode("ascii")
|
|
||||||
|
|
||||||
return version_str
|
|
||||||
|
|
||||||
|
|
||||||
# Separated out from have_compatible_glibc for easier unit testing
|
|
||||||
def check_glibc_version(version_str, required_major, minimum_minor):
|
|
||||||
# type: (str, int, int) -> bool
|
|
||||||
# Parse string and check against requested version.
|
|
||||||
#
|
|
||||||
# We use a regexp instead of str.split because we want to discard any
|
|
||||||
# random junk that might come after the minor version -- this might happen
|
|
||||||
# in patched/forked versions of glibc (e.g. Linaro's version of glibc
|
|
||||||
# uses version strings like "2.20-2014.11"). See gh-3588.
|
|
||||||
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
|
|
||||||
if not m:
|
|
||||||
warnings.warn("Expected glibc version with 2 components major.minor,"
|
|
||||||
" got: %s" % version_str, RuntimeWarning)
|
|
||||||
return False
|
|
||||||
return (int(m.group("major")) == required_major and
|
|
||||||
int(m.group("minor")) >= minimum_minor)
|
|
||||||
|
|
||||||
|
|
||||||
def have_compatible_glibc(required_major, minimum_minor):
|
|
||||||
# type: (int, int) -> bool
|
|
||||||
version_str = glibc_version_string() # type: Optional[str]
|
|
||||||
if version_str is None:
|
|
||||||
return False
|
|
||||||
return check_glibc_version(version_str, required_major, minimum_minor)
|
|
||||||
|
|
||||||
|
|
||||||
# platform.libc_ver regularly returns completely nonsensical glibc
|
|
||||||
# versions. E.g. on my computer, platform says:
|
|
||||||
#
|
|
||||||
# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
|
|
||||||
# ('glibc', '2.7')
|
|
||||||
# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
|
|
||||||
# ('glibc', '2.9')
|
|
||||||
#
|
|
||||||
# But the truth is:
|
|
||||||
#
|
|
||||||
# ~$ ldd --version
|
|
||||||
# ldd (Debian GLIBC 2.22-11) 2.22
|
|
||||||
#
|
|
||||||
# This is unfortunate, because it means that the linehaul data on libc
|
|
||||||
# versions that was generated by pip 8.1.2 and earlier is useless and
|
|
||||||
# misleading. Solution: instead of using platform, use our code that actually
|
|
||||||
# works.
|
|
||||||
def libc_ver():
|
|
||||||
# type: () -> Tuple[str, str]
|
|
||||||
"""Try to determine the glibc version
|
|
||||||
|
|
||||||
Returns a tuple of strings (lib, version) which default to empty strings
|
|
||||||
in case the lookup fails.
|
|
||||||
"""
|
|
||||||
glibc_version = glibc_version_string()
|
|
||||||
if glibc_version is None:
|
|
||||||
return ("", "")
|
|
||||||
else:
|
|
||||||
return ("glibc", glibc_version)
|
|
@ -1,115 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import hashlib
|
|
||||||
|
|
||||||
from pip._vendor.six import iteritems, iterkeys, itervalues
|
|
||||||
|
|
||||||
from pip._internal.exceptions import (
|
|
||||||
HashMismatch, HashMissing, InstallationError,
|
|
||||||
)
|
|
||||||
from pip._internal.utils.misc import read_chunks
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import ( # noqa: F401
|
|
||||||
Dict, List, BinaryIO, NoReturn, Iterator
|
|
||||||
)
|
|
||||||
from pip._vendor.six import PY3
|
|
||||||
if PY3:
|
|
||||||
from hashlib import _Hash # noqa: F401
|
|
||||||
else:
|
|
||||||
from hashlib import _hash as _Hash # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
# The recommended hash algo of the moment. Change this whenever the state of
|
|
||||||
# the art changes; it won't hurt backward compatibility.
|
|
||||||
FAVORITE_HASH = 'sha256'
|
|
||||||
|
|
||||||
|
|
||||||
# Names of hashlib algorithms allowed by the --hash option and ``pip hash``
|
|
||||||
# Currently, those are the ones at least as collision-resistant as sha256.
|
|
||||||
STRONG_HASHES = ['sha256', 'sha384', 'sha512']
|
|
||||||
|
|
||||||
|
|
||||||
class Hashes(object):
|
|
||||||
"""A wrapper that builds multiple hashes at once and checks them against
|
|
||||||
known-good values
|
|
||||||
|
|
||||||
"""
|
|
||||||
def __init__(self, hashes=None):
|
|
||||||
# type: (Dict[str, List[str]]) -> None
|
|
||||||
"""
|
|
||||||
:param hashes: A dict of algorithm names pointing to lists of allowed
|
|
||||||
hex digests
|
|
||||||
"""
|
|
||||||
self._allowed = {} if hashes is None else hashes
|
|
||||||
|
|
||||||
def check_against_chunks(self, chunks):
|
|
||||||
# type: (Iterator[bytes]) -> None
|
|
||||||
"""Check good hashes against ones built from iterable of chunks of
|
|
||||||
data.
|
|
||||||
|
|
||||||
Raise HashMismatch if none match.
|
|
||||||
|
|
||||||
"""
|
|
||||||
gots = {}
|
|
||||||
for hash_name in iterkeys(self._allowed):
|
|
||||||
try:
|
|
||||||
gots[hash_name] = hashlib.new(hash_name)
|
|
||||||
except (ValueError, TypeError):
|
|
||||||
raise InstallationError('Unknown hash name: %s' % hash_name)
|
|
||||||
|
|
||||||
for chunk in chunks:
|
|
||||||
for hash in itervalues(gots):
|
|
||||||
hash.update(chunk)
|
|
||||||
|
|
||||||
for hash_name, got in iteritems(gots):
|
|
||||||
if got.hexdigest() in self._allowed[hash_name]:
|
|
||||||
return
|
|
||||||
self._raise(gots)
|
|
||||||
|
|
||||||
def _raise(self, gots):
|
|
||||||
# type: (Dict[str, _Hash]) -> NoReturn
|
|
||||||
raise HashMismatch(self._allowed, gots)
|
|
||||||
|
|
||||||
def check_against_file(self, file):
|
|
||||||
# type: (BinaryIO) -> None
|
|
||||||
"""Check good hashes against a file-like object
|
|
||||||
|
|
||||||
Raise HashMismatch if none match.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return self.check_against_chunks(read_chunks(file))
|
|
||||||
|
|
||||||
def check_against_path(self, path):
|
|
||||||
# type: (str) -> None
|
|
||||||
with open(path, 'rb') as file:
|
|
||||||
return self.check_against_file(file)
|
|
||||||
|
|
||||||
def __nonzero__(self):
|
|
||||||
# type: () -> bool
|
|
||||||
"""Return whether I know any known-good hashes."""
|
|
||||||
return bool(self._allowed)
|
|
||||||
|
|
||||||
def __bool__(self):
|
|
||||||
# type: () -> bool
|
|
||||||
return self.__nonzero__()
|
|
||||||
|
|
||||||
|
|
||||||
class MissingHashes(Hashes):
|
|
||||||
"""A workalike for Hashes used when we're missing a hash for a requirement
|
|
||||||
|
|
||||||
It computes the actual hash of the requirement and raises a HashMissing
|
|
||||||
exception showing it to the user.
|
|
||||||
|
|
||||||
"""
|
|
||||||
def __init__(self):
|
|
||||||
# type: () -> None
|
|
||||||
"""Don't offer the ``hashes`` kwarg."""
|
|
||||||
# Pass our favorite hash in to generate a "gotten hash". With the
|
|
||||||
# empty list, it will never match, so an error will always raise.
|
|
||||||
super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []})
|
|
||||||
|
|
||||||
def _raise(self, gots):
|
|
||||||
# type: (Dict[str, _Hash]) -> NoReturn
|
|
||||||
raise HashMissing(gots[FAVORITE_HASH].hexdigest())
|
|
@ -1,318 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import contextlib
|
|
||||||
import errno
|
|
||||||
import logging
|
|
||||||
import logging.handlers
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pip._vendor.six import PY2
|
|
||||||
|
|
||||||
from pip._internal.utils.compat import WINDOWS
|
|
||||||
from pip._internal.utils.misc import ensure_dir
|
|
||||||
|
|
||||||
try:
|
|
||||||
import threading
|
|
||||||
except ImportError:
|
|
||||||
import dummy_threading as threading # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
from pip._vendor import colorama
|
|
||||||
# Lots of different errors can come from this, including SystemError and
|
|
||||||
# ImportError.
|
|
||||||
except Exception:
|
|
||||||
colorama = None
|
|
||||||
|
|
||||||
|
|
||||||
_log_state = threading.local()
|
|
||||||
_log_state.indentation = 0
|
|
||||||
|
|
||||||
|
|
||||||
class BrokenStdoutLoggingError(Exception):
|
|
||||||
"""
|
|
||||||
Raised if BrokenPipeError occurs for the stdout stream while logging.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# BrokenPipeError does not exist in Python 2 and, in addition, manifests
|
|
||||||
# differently in Windows and non-Windows.
|
|
||||||
if WINDOWS:
|
|
||||||
# In Windows, a broken pipe can show up as EINVAL rather than EPIPE:
|
|
||||||
# https://bugs.python.org/issue19612
|
|
||||||
# https://bugs.python.org/issue30418
|
|
||||||
if PY2:
|
|
||||||
def _is_broken_pipe_error(exc_class, exc):
|
|
||||||
"""See the docstring for non-Windows Python 3 below."""
|
|
||||||
return (exc_class is IOError and
|
|
||||||
exc.errno in (errno.EINVAL, errno.EPIPE))
|
|
||||||
else:
|
|
||||||
# In Windows, a broken pipe IOError became OSError in Python 3.
|
|
||||||
def _is_broken_pipe_error(exc_class, exc):
|
|
||||||
"""See the docstring for non-Windows Python 3 below."""
|
|
||||||
return ((exc_class is BrokenPipeError) or # noqa: F821
|
|
||||||
(exc_class is OSError and
|
|
||||||
exc.errno in (errno.EINVAL, errno.EPIPE)))
|
|
||||||
elif PY2:
|
|
||||||
def _is_broken_pipe_error(exc_class, exc):
|
|
||||||
"""See the docstring for non-Windows Python 3 below."""
|
|
||||||
return (exc_class is IOError and exc.errno == errno.EPIPE)
|
|
||||||
else:
|
|
||||||
# Then we are in the non-Windows Python 3 case.
|
|
||||||
def _is_broken_pipe_error(exc_class, exc):
|
|
||||||
"""
|
|
||||||
Return whether an exception is a broken pipe error.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
exc_class: an exception class.
|
|
||||||
exc: an exception instance.
|
|
||||||
"""
|
|
||||||
return (exc_class is BrokenPipeError) # noqa: F821
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def indent_log(num=2):
|
|
||||||
"""
|
|
||||||
A context manager which will cause the log output to be indented for any
|
|
||||||
log messages emitted inside it.
|
|
||||||
"""
|
|
||||||
_log_state.indentation += num
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
_log_state.indentation -= num
|
|
||||||
|
|
||||||
|
|
||||||
def get_indentation():
|
|
||||||
return getattr(_log_state, 'indentation', 0)
|
|
||||||
|
|
||||||
|
|
||||||
class IndentingFormatter(logging.Formatter):
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
A logging.Formatter obeying containing indent_log contexts.
|
|
||||||
|
|
||||||
:param add_timestamp: A bool indicating output lines should be prefixed
|
|
||||||
with their record's timestamp.
|
|
||||||
"""
|
|
||||||
self.add_timestamp = kwargs.pop("add_timestamp", False)
|
|
||||||
super(IndentingFormatter, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
def format(self, record):
|
|
||||||
"""
|
|
||||||
Calls the standard formatter, but will indent all of the log messages
|
|
||||||
by our current indentation level.
|
|
||||||
"""
|
|
||||||
formatted = super(IndentingFormatter, self).format(record)
|
|
||||||
prefix = ''
|
|
||||||
if self.add_timestamp:
|
|
||||||
prefix = self.formatTime(record, "%Y-%m-%dT%H:%M:%S ")
|
|
||||||
prefix += " " * get_indentation()
|
|
||||||
formatted = "".join([
|
|
||||||
prefix + line
|
|
||||||
for line in formatted.splitlines(True)
|
|
||||||
])
|
|
||||||
return formatted
|
|
||||||
|
|
||||||
|
|
||||||
def _color_wrap(*colors):
|
|
||||||
def wrapped(inp):
|
|
||||||
return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
|
|
||||||
return wrapped
|
|
||||||
|
|
||||||
|
|
||||||
class ColorizedStreamHandler(logging.StreamHandler):
|
|
||||||
|
|
||||||
# Don't build up a list of colors if we don't have colorama
|
|
||||||
if colorama:
|
|
||||||
COLORS = [
|
|
||||||
# This needs to be in order from highest logging level to lowest.
|
|
||||||
(logging.ERROR, _color_wrap(colorama.Fore.RED)),
|
|
||||||
(logging.WARNING, _color_wrap(colorama.Fore.YELLOW)),
|
|
||||||
]
|
|
||||||
else:
|
|
||||||
COLORS = []
|
|
||||||
|
|
||||||
def __init__(self, stream=None, no_color=None):
|
|
||||||
logging.StreamHandler.__init__(self, stream)
|
|
||||||
self._no_color = no_color
|
|
||||||
|
|
||||||
if WINDOWS and colorama:
|
|
||||||
self.stream = colorama.AnsiToWin32(self.stream)
|
|
||||||
|
|
||||||
def _using_stdout(self):
|
|
||||||
"""
|
|
||||||
Return whether the handler is using sys.stdout.
|
|
||||||
"""
|
|
||||||
if WINDOWS and colorama:
|
|
||||||
# Then self.stream is an AnsiToWin32 object.
|
|
||||||
return self.stream.wrapped is sys.stdout
|
|
||||||
|
|
||||||
return self.stream is sys.stdout
|
|
||||||
|
|
||||||
def should_color(self):
|
|
||||||
# Don't colorize things if we do not have colorama or if told not to
|
|
||||||
if not colorama or self._no_color:
|
|
||||||
return False
|
|
||||||
|
|
||||||
real_stream = (
|
|
||||||
self.stream if not isinstance(self.stream, colorama.AnsiToWin32)
|
|
||||||
else self.stream.wrapped
|
|
||||||
)
|
|
||||||
|
|
||||||
# If the stream is a tty we should color it
|
|
||||||
if hasattr(real_stream, "isatty") and real_stream.isatty():
|
|
||||||
return True
|
|
||||||
|
|
||||||
# If we have an ANSI term we should color it
|
|
||||||
if os.environ.get("TERM") == "ANSI":
|
|
||||||
return True
|
|
||||||
|
|
||||||
# If anything else we should not color it
|
|
||||||
return False
|
|
||||||
|
|
||||||
def format(self, record):
|
|
||||||
msg = logging.StreamHandler.format(self, record)
|
|
||||||
|
|
||||||
if self.should_color():
|
|
||||||
for level, color in self.COLORS:
|
|
||||||
if record.levelno >= level:
|
|
||||||
msg = color(msg)
|
|
||||||
break
|
|
||||||
|
|
||||||
return msg
|
|
||||||
|
|
||||||
# The logging module says handleError() can be customized.
|
|
||||||
def handleError(self, record):
|
|
||||||
exc_class, exc = sys.exc_info()[:2]
|
|
||||||
# If a broken pipe occurred while calling write() or flush() on the
|
|
||||||
# stdout stream in logging's Handler.emit(), then raise our special
|
|
||||||
# exception so we can handle it in main() instead of logging the
|
|
||||||
# broken pipe error and continuing.
|
|
||||||
if (exc_class and self._using_stdout() and
|
|
||||||
_is_broken_pipe_error(exc_class, exc)):
|
|
||||||
raise BrokenStdoutLoggingError()
|
|
||||||
|
|
||||||
return super(ColorizedStreamHandler, self).handleError(record)
|
|
||||||
|
|
||||||
|
|
||||||
class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
ensure_dir(os.path.dirname(self.baseFilename))
|
|
||||||
return logging.handlers.RotatingFileHandler._open(self)
|
|
||||||
|
|
||||||
|
|
||||||
class MaxLevelFilter(logging.Filter):
|
|
||||||
|
|
||||||
def __init__(self, level):
|
|
||||||
self.level = level
|
|
||||||
|
|
||||||
def filter(self, record):
|
|
||||||
return record.levelno < self.level
|
|
||||||
|
|
||||||
|
|
||||||
def setup_logging(verbosity, no_color, user_log_file):
|
|
||||||
"""Configures and sets up all of the logging
|
|
||||||
|
|
||||||
Returns the requested logging level, as its integer value.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Determine the level to be logging at.
|
|
||||||
if verbosity >= 1:
|
|
||||||
level = "DEBUG"
|
|
||||||
elif verbosity == -1:
|
|
||||||
level = "WARNING"
|
|
||||||
elif verbosity == -2:
|
|
||||||
level = "ERROR"
|
|
||||||
elif verbosity <= -3:
|
|
||||||
level = "CRITICAL"
|
|
||||||
else:
|
|
||||||
level = "INFO"
|
|
||||||
|
|
||||||
level_number = getattr(logging, level)
|
|
||||||
|
|
||||||
# The "root" logger should match the "console" level *unless* we also need
|
|
||||||
# to log to a user log file.
|
|
||||||
include_user_log = user_log_file is not None
|
|
||||||
if include_user_log:
|
|
||||||
additional_log_file = user_log_file
|
|
||||||
root_level = "DEBUG"
|
|
||||||
else:
|
|
||||||
additional_log_file = "/dev/null"
|
|
||||||
root_level = level
|
|
||||||
|
|
||||||
# Disable any logging besides WARNING unless we have DEBUG level logging
|
|
||||||
# enabled for vendored libraries.
|
|
||||||
vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
|
|
||||||
|
|
||||||
# Shorthands for clarity
|
|
||||||
log_streams = {
|
|
||||||
"stdout": "ext://sys.stdout",
|
|
||||||
"stderr": "ext://sys.stderr",
|
|
||||||
}
|
|
||||||
handler_classes = {
|
|
||||||
"stream": "pip._internal.utils.logging.ColorizedStreamHandler",
|
|
||||||
"file": "pip._internal.utils.logging.BetterRotatingFileHandler",
|
|
||||||
}
|
|
||||||
|
|
||||||
logging.config.dictConfig({
|
|
||||||
"version": 1,
|
|
||||||
"disable_existing_loggers": False,
|
|
||||||
"filters": {
|
|
||||||
"exclude_warnings": {
|
|
||||||
"()": "pip._internal.utils.logging.MaxLevelFilter",
|
|
||||||
"level": logging.WARNING,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"formatters": {
|
|
||||||
"indent": {
|
|
||||||
"()": IndentingFormatter,
|
|
||||||
"format": "%(message)s",
|
|
||||||
},
|
|
||||||
"indent_with_timestamp": {
|
|
||||||
"()": IndentingFormatter,
|
|
||||||
"format": "%(message)s",
|
|
||||||
"add_timestamp": True,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"handlers": {
|
|
||||||
"console": {
|
|
||||||
"level": level,
|
|
||||||
"class": handler_classes["stream"],
|
|
||||||
"no_color": no_color,
|
|
||||||
"stream": log_streams["stdout"],
|
|
||||||
"filters": ["exclude_warnings"],
|
|
||||||
"formatter": "indent",
|
|
||||||
},
|
|
||||||
"console_errors": {
|
|
||||||
"level": "WARNING",
|
|
||||||
"class": handler_classes["stream"],
|
|
||||||
"no_color": no_color,
|
|
||||||
"stream": log_streams["stderr"],
|
|
||||||
"formatter": "indent",
|
|
||||||
},
|
|
||||||
"user_log": {
|
|
||||||
"level": "DEBUG",
|
|
||||||
"class": handler_classes["file"],
|
|
||||||
"filename": additional_log_file,
|
|
||||||
"delay": True,
|
|
||||||
"formatter": "indent_with_timestamp",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"root": {
|
|
||||||
"level": root_level,
|
|
||||||
"handlers": ["console", "console_errors"] + (
|
|
||||||
["user_log"] if include_user_log else []
|
|
||||||
),
|
|
||||||
},
|
|
||||||
"loggers": {
|
|
||||||
"pip._vendor": {
|
|
||||||
"level": vendored_log_level
|
|
||||||
}
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
return level_number
|
|
File diff suppressed because it is too large
Load Diff
@ -1,40 +0,0 @@
|
|||||||
"""Utilities for defining models
|
|
||||||
"""
|
|
||||||
|
|
||||||
import operator
|
|
||||||
|
|
||||||
|
|
||||||
class KeyBasedCompareMixin(object):
|
|
||||||
"""Provides comparision capabilities that is based on a key
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, key, defining_class):
|
|
||||||
self._compare_key = key
|
|
||||||
self._defining_class = defining_class
|
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return hash(self._compare_key)
|
|
||||||
|
|
||||||
def __lt__(self, other):
|
|
||||||
return self._compare(other, operator.__lt__)
|
|
||||||
|
|
||||||
def __le__(self, other):
|
|
||||||
return self._compare(other, operator.__le__)
|
|
||||||
|
|
||||||
def __gt__(self, other):
|
|
||||||
return self._compare(other, operator.__gt__)
|
|
||||||
|
|
||||||
def __ge__(self, other):
|
|
||||||
return self._compare(other, operator.__ge__)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return self._compare(other, operator.__eq__)
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return self._compare(other, operator.__ne__)
|
|
||||||
|
|
||||||
def _compare(self, other, method):
|
|
||||||
if not isinstance(other, self._defining_class):
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
return method(self._compare_key, other._compare_key)
|
|
@ -1,164 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import os.path
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pip._vendor import lockfile, pkg_resources
|
|
||||||
from pip._vendor.packaging import version as packaging_version
|
|
||||||
|
|
||||||
from pip._internal.index import PackageFinder
|
|
||||||
from pip._internal.utils.compat import WINDOWS
|
|
||||||
from pip._internal.utils.filesystem import check_path_owner
|
|
||||||
from pip._internal.utils.misc import ensure_dir, get_installed_version
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
import optparse # noqa: F401
|
|
||||||
from typing import Any, Dict # noqa: F401
|
|
||||||
from pip._internal.download import PipSession # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class SelfCheckState(object):
|
|
||||||
def __init__(self, cache_dir):
|
|
||||||
# type: (str) -> None
|
|
||||||
self.state = {} # type: Dict[str, Any]
|
|
||||||
self.statefile_path = None
|
|
||||||
|
|
||||||
# Try to load the existing state
|
|
||||||
if cache_dir:
|
|
||||||
self.statefile_path = os.path.join(cache_dir, "selfcheck.json")
|
|
||||||
try:
|
|
||||||
with open(self.statefile_path) as statefile:
|
|
||||||
self.state = json.load(statefile)[sys.prefix]
|
|
||||||
except (IOError, ValueError, KeyError):
|
|
||||||
# Explicitly suppressing exceptions, since we don't want to
|
|
||||||
# error out if the cache file is invalid.
|
|
||||||
pass
|
|
||||||
|
|
||||||
def save(self, pypi_version, current_time):
|
|
||||||
# type: (str, datetime.datetime) -> None
|
|
||||||
# If we do not have a path to cache in, don't bother saving.
|
|
||||||
if not self.statefile_path:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Check to make sure that we own the directory
|
|
||||||
if not check_path_owner(os.path.dirname(self.statefile_path)):
|
|
||||||
return
|
|
||||||
|
|
||||||
# Now that we've ensured the directory is owned by this user, we'll go
|
|
||||||
# ahead and make sure that all our directories are created.
|
|
||||||
ensure_dir(os.path.dirname(self.statefile_path))
|
|
||||||
|
|
||||||
# Attempt to write out our version check file
|
|
||||||
with lockfile.LockFile(self.statefile_path):
|
|
||||||
if os.path.exists(self.statefile_path):
|
|
||||||
with open(self.statefile_path) as statefile:
|
|
||||||
state = json.load(statefile)
|
|
||||||
else:
|
|
||||||
state = {}
|
|
||||||
|
|
||||||
state[sys.prefix] = {
|
|
||||||
"last_check": current_time.strftime(SELFCHECK_DATE_FMT),
|
|
||||||
"pypi_version": pypi_version,
|
|
||||||
}
|
|
||||||
|
|
||||||
with open(self.statefile_path, "w") as statefile:
|
|
||||||
json.dump(state, statefile, sort_keys=True,
|
|
||||||
separators=(",", ":"))
|
|
||||||
|
|
||||||
|
|
||||||
def was_installed_by_pip(pkg):
|
|
||||||
# type: (str) -> bool
|
|
||||||
"""Checks whether pkg was installed by pip
|
|
||||||
|
|
||||||
This is used not to display the upgrade message when pip is in fact
|
|
||||||
installed by system package manager, such as dnf on Fedora.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
dist = pkg_resources.get_distribution(pkg)
|
|
||||||
return (dist.has_metadata('INSTALLER') and
|
|
||||||
'pip' in dist.get_metadata_lines('INSTALLER'))
|
|
||||||
except pkg_resources.DistributionNotFound:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def pip_version_check(session, options):
|
|
||||||
# type: (PipSession, optparse.Values) -> None
|
|
||||||
"""Check for an update for pip.
|
|
||||||
|
|
||||||
Limit the frequency of checks to once per week. State is stored either in
|
|
||||||
the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
|
|
||||||
of the pip script path.
|
|
||||||
"""
|
|
||||||
installed_version = get_installed_version("pip")
|
|
||||||
if not installed_version:
|
|
||||||
return
|
|
||||||
|
|
||||||
pip_version = packaging_version.parse(installed_version)
|
|
||||||
pypi_version = None
|
|
||||||
|
|
||||||
try:
|
|
||||||
state = SelfCheckState(cache_dir=options.cache_dir)
|
|
||||||
|
|
||||||
current_time = datetime.datetime.utcnow()
|
|
||||||
# Determine if we need to refresh the state
|
|
||||||
if "last_check" in state.state and "pypi_version" in state.state:
|
|
||||||
last_check = datetime.datetime.strptime(
|
|
||||||
state.state["last_check"],
|
|
||||||
SELFCHECK_DATE_FMT
|
|
||||||
)
|
|
||||||
if (current_time - last_check).total_seconds() < 7 * 24 * 60 * 60:
|
|
||||||
pypi_version = state.state["pypi_version"]
|
|
||||||
|
|
||||||
# Refresh the version if we need to or just see if we need to warn
|
|
||||||
if pypi_version is None:
|
|
||||||
# Lets use PackageFinder to see what the latest pip version is
|
|
||||||
finder = PackageFinder(
|
|
||||||
find_links=options.find_links,
|
|
||||||
index_urls=[options.index_url] + options.extra_index_urls,
|
|
||||||
allow_all_prereleases=False, # Explicitly set to False
|
|
||||||
trusted_hosts=options.trusted_hosts,
|
|
||||||
session=session,
|
|
||||||
)
|
|
||||||
all_candidates = finder.find_all_candidates("pip")
|
|
||||||
if not all_candidates:
|
|
||||||
return
|
|
||||||
pypi_version = str(
|
|
||||||
max(all_candidates, key=lambda c: c.version).version
|
|
||||||
)
|
|
||||||
|
|
||||||
# save that we've performed a check
|
|
||||||
state.save(pypi_version, current_time)
|
|
||||||
|
|
||||||
remote_version = packaging_version.parse(pypi_version)
|
|
||||||
|
|
||||||
# Determine if our pypi_version is older
|
|
||||||
if (pip_version < remote_version and
|
|
||||||
pip_version.base_version != remote_version.base_version and
|
|
||||||
was_installed_by_pip('pip')):
|
|
||||||
# Advise "python -m pip" on Windows to avoid issues
|
|
||||||
# with overwriting pip.exe.
|
|
||||||
if WINDOWS:
|
|
||||||
pip_cmd = "python -m pip"
|
|
||||||
else:
|
|
||||||
pip_cmd = "pip"
|
|
||||||
logger.warning(
|
|
||||||
"You are using pip version %s, however version %s is "
|
|
||||||
"available.\nYou should consider upgrading via the "
|
|
||||||
"'%s install --upgrade pip' command.",
|
|
||||||
pip_version, pypi_version, pip_cmd
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
logger.debug(
|
|
||||||
"There was an error checking the latest version of pip",
|
|
||||||
exc_info=True,
|
|
||||||
)
|
|
@ -1,85 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import sys
|
|
||||||
from email.parser import FeedParser
|
|
||||||
|
|
||||||
from pip._vendor import pkg_resources
|
|
||||||
from pip._vendor.packaging import specifiers, version
|
|
||||||
|
|
||||||
from pip._internal import exceptions
|
|
||||||
from pip._internal.utils.misc import display_path
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import Optional # noqa: F401
|
|
||||||
from email.message import Message # noqa: F401
|
|
||||||
from pip._vendor.pkg_resources import Distribution # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def check_requires_python(requires_python):
|
|
||||||
# type: (Optional[str]) -> bool
|
|
||||||
"""
|
|
||||||
Check if the python version in use match the `requires_python` specifier.
|
|
||||||
|
|
||||||
Returns `True` if the version of python in use matches the requirement.
|
|
||||||
Returns `False` if the version of python in use does not matches the
|
|
||||||
requirement.
|
|
||||||
|
|
||||||
Raises an InvalidSpecifier if `requires_python` have an invalid format.
|
|
||||||
"""
|
|
||||||
if requires_python is None:
|
|
||||||
# The package provides no information
|
|
||||||
return True
|
|
||||||
requires_python_specifier = specifiers.SpecifierSet(requires_python)
|
|
||||||
|
|
||||||
# We only use major.minor.micro
|
|
||||||
python_version = version.parse('.'.join(map(str, sys.version_info[:3])))
|
|
||||||
return python_version in requires_python_specifier
|
|
||||||
|
|
||||||
|
|
||||||
def get_metadata(dist):
|
|
||||||
# type: (Distribution) -> Message
|
|
||||||
if (isinstance(dist, pkg_resources.DistInfoDistribution) and
|
|
||||||
dist.has_metadata('METADATA')):
|
|
||||||
metadata = dist.get_metadata('METADATA')
|
|
||||||
elif dist.has_metadata('PKG-INFO'):
|
|
||||||
metadata = dist.get_metadata('PKG-INFO')
|
|
||||||
else:
|
|
||||||
logger.warning("No metadata found in %s", display_path(dist.location))
|
|
||||||
metadata = ''
|
|
||||||
|
|
||||||
feed_parser = FeedParser()
|
|
||||||
feed_parser.feed(metadata)
|
|
||||||
return feed_parser.close()
|
|
||||||
|
|
||||||
|
|
||||||
def check_dist_requires_python(dist):
|
|
||||||
pkg_info_dict = get_metadata(dist)
|
|
||||||
requires_python = pkg_info_dict.get('Requires-Python')
|
|
||||||
try:
|
|
||||||
if not check_requires_python(requires_python):
|
|
||||||
raise exceptions.UnsupportedPythonVersion(
|
|
||||||
"%s requires Python '%s' but the running Python is %s" % (
|
|
||||||
dist.project_name,
|
|
||||||
requires_python,
|
|
||||||
'.'.join(map(str, sys.version_info[:3])),)
|
|
||||||
)
|
|
||||||
except specifiers.InvalidSpecifier as e:
|
|
||||||
logger.warning(
|
|
||||||
"Package %s has an invalid Requires-Python entry %s - %s",
|
|
||||||
dist.project_name, requires_python, e,
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
def get_installer(dist):
|
|
||||||
# type: (Distribution) -> str
|
|
||||||
if dist.has_metadata('INSTALLER'):
|
|
||||||
for line in dist.get_metadata_lines('INSTALLER'):
|
|
||||||
if line.strip():
|
|
||||||
return line.strip()
|
|
||||||
return ''
|
|
@ -1,8 +0,0 @@
|
|||||||
# Shim to wrap setup.py invocation with setuptools
|
|
||||||
SETUPTOOLS_SHIM = (
|
|
||||||
"import setuptools, tokenize;__file__=%r;"
|
|
||||||
"f=getattr(tokenize, 'open', open)(__file__);"
|
|
||||||
"code=f.read().replace('\\r\\n', '\\n');"
|
|
||||||
"f.close();"
|
|
||||||
"exec(compile(code, __file__, 'exec'))"
|
|
||||||
)
|
|
@ -1,155 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import errno
|
|
||||||
import itertools
|
|
||||||
import logging
|
|
||||||
import os.path
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
from pip._internal.utils.misc import rmtree
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class TempDirectory(object):
|
|
||||||
"""Helper class that owns and cleans up a temporary directory.
|
|
||||||
|
|
||||||
This class can be used as a context manager or as an OO representation of a
|
|
||||||
temporary directory.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
path
|
|
||||||
Location to the created temporary directory or None
|
|
||||||
delete
|
|
||||||
Whether the directory should be deleted when exiting
|
|
||||||
(when used as a contextmanager)
|
|
||||||
|
|
||||||
Methods:
|
|
||||||
create()
|
|
||||||
Creates a temporary directory and stores its path in the path
|
|
||||||
attribute.
|
|
||||||
cleanup()
|
|
||||||
Deletes the temporary directory and sets path attribute to None
|
|
||||||
|
|
||||||
When used as a context manager, a temporary directory is created on
|
|
||||||
entering the context and, if the delete attribute is True, on exiting the
|
|
||||||
context the created directory is deleted.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, path=None, delete=None, kind="temp"):
|
|
||||||
super(TempDirectory, self).__init__()
|
|
||||||
|
|
||||||
if path is None and delete is None:
|
|
||||||
# If we were not given an explicit directory, and we were not given
|
|
||||||
# an explicit delete option, then we'll default to deleting.
|
|
||||||
delete = True
|
|
||||||
|
|
||||||
self.path = path
|
|
||||||
self.delete = delete
|
|
||||||
self.kind = kind
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<{} {!r}>".format(self.__class__.__name__, self.path)
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
self.create()
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, exc, value, tb):
|
|
||||||
if self.delete:
|
|
||||||
self.cleanup()
|
|
||||||
|
|
||||||
def create(self):
|
|
||||||
"""Create a temporary directory and store its path in self.path
|
|
||||||
"""
|
|
||||||
if self.path is not None:
|
|
||||||
logger.debug(
|
|
||||||
"Skipped creation of temporary directory: {}".format(self.path)
|
|
||||||
)
|
|
||||||
return
|
|
||||||
# We realpath here because some systems have their default tmpdir
|
|
||||||
# symlinked to another directory. This tends to confuse build
|
|
||||||
# scripts, so we canonicalize the path by traversing potential
|
|
||||||
# symlinks here.
|
|
||||||
self.path = os.path.realpath(
|
|
||||||
tempfile.mkdtemp(prefix="pip-{}-".format(self.kind))
|
|
||||||
)
|
|
||||||
logger.debug("Created temporary directory: {}".format(self.path))
|
|
||||||
|
|
||||||
def cleanup(self):
|
|
||||||
"""Remove the temporary directory created and reset state
|
|
||||||
"""
|
|
||||||
if self.path is not None and os.path.exists(self.path):
|
|
||||||
rmtree(self.path)
|
|
||||||
self.path = None
|
|
||||||
|
|
||||||
|
|
||||||
class AdjacentTempDirectory(TempDirectory):
|
|
||||||
"""Helper class that creates a temporary directory adjacent to a real one.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
original
|
|
||||||
The original directory to create a temp directory for.
|
|
||||||
path
|
|
||||||
After calling create() or entering, contains the full
|
|
||||||
path to the temporary directory.
|
|
||||||
delete
|
|
||||||
Whether the directory should be deleted when exiting
|
|
||||||
(when used as a contextmanager)
|
|
||||||
|
|
||||||
"""
|
|
||||||
# The characters that may be used to name the temp directory
|
|
||||||
# We always prepend a ~ and then rotate through these until
|
|
||||||
# a usable name is found.
|
|
||||||
# pkg_resources raises a different error for .dist-info folder
|
|
||||||
# with leading '-' and invalid metadata
|
|
||||||
LEADING_CHARS = "-~.=%0123456789"
|
|
||||||
|
|
||||||
def __init__(self, original, delete=None):
|
|
||||||
super(AdjacentTempDirectory, self).__init__(delete=delete)
|
|
||||||
self.original = original.rstrip('/\\')
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _generate_names(cls, name):
|
|
||||||
"""Generates a series of temporary names.
|
|
||||||
|
|
||||||
The algorithm replaces the leading characters in the name
|
|
||||||
with ones that are valid filesystem characters, but are not
|
|
||||||
valid package names (for both Python and pip definitions of
|
|
||||||
package).
|
|
||||||
"""
|
|
||||||
for i in range(1, len(name)):
|
|
||||||
for candidate in itertools.combinations_with_replacement(
|
|
||||||
cls.LEADING_CHARS, i - 1):
|
|
||||||
new_name = '~' + ''.join(candidate) + name[i:]
|
|
||||||
if new_name != name:
|
|
||||||
yield new_name
|
|
||||||
|
|
||||||
# If we make it this far, we will have to make a longer name
|
|
||||||
for i in range(len(cls.LEADING_CHARS)):
|
|
||||||
for candidate in itertools.combinations_with_replacement(
|
|
||||||
cls.LEADING_CHARS, i):
|
|
||||||
new_name = '~' + ''.join(candidate) + name
|
|
||||||
if new_name != name:
|
|
||||||
yield new_name
|
|
||||||
|
|
||||||
def create(self):
|
|
||||||
root, name = os.path.split(self.original)
|
|
||||||
for candidate in self._generate_names(name):
|
|
||||||
path = os.path.join(root, candidate)
|
|
||||||
try:
|
|
||||||
os.mkdir(path)
|
|
||||||
except OSError as ex:
|
|
||||||
# Continue if the name exists already
|
|
||||||
if ex.errno != errno.EEXIST:
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
self.path = os.path.realpath(path)
|
|
||||||
break
|
|
||||||
|
|
||||||
if not self.path:
|
|
||||||
# Final fallback on the default behavior.
|
|
||||||
self.path = os.path.realpath(
|
|
||||||
tempfile.mkdtemp(prefix="pip-{}-".format(self.kind))
|
|
||||||
)
|
|
||||||
logger.debug("Created temporary directory: {}".format(self.path))
|
|
@ -1,29 +0,0 @@
|
|||||||
"""For neatly implementing static typing in pip.
|
|
||||||
|
|
||||||
`mypy` - the static type analysis tool we use - uses the `typing` module, which
|
|
||||||
provides core functionality fundamental to mypy's functioning.
|
|
||||||
|
|
||||||
Generally, `typing` would be imported at runtime and used in that fashion -
|
|
||||||
it acts as a no-op at runtime and does not have any run-time overhead by
|
|
||||||
design.
|
|
||||||
|
|
||||||
As it turns out, `typing` is not vendorable - it uses separate sources for
|
|
||||||
Python 2/Python 3. Thus, this codebase can not expect it to be present.
|
|
||||||
To work around this, mypy allows the typing import to be behind a False-y
|
|
||||||
optional to prevent it from running at runtime and type-comments can be used
|
|
||||||
to remove the need for the types to be accessible directly during runtime.
|
|
||||||
|
|
||||||
This module provides the False-y guard in a nicely named fashion so that a
|
|
||||||
curious maintainer can reach here to read this.
|
|
||||||
|
|
||||||
In pip, all static-typing related imports should be guarded as follows:
|
|
||||||
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import ... # noqa: F401
|
|
||||||
|
|
||||||
Ref: https://github.com/python/mypy/issues/3216
|
|
||||||
"""
|
|
||||||
|
|
||||||
MYPY_CHECK_RUNNING = False
|
|
@ -1,441 +0,0 @@
|
|||||||
from __future__ import absolute_import, division
|
|
||||||
|
|
||||||
import contextlib
|
|
||||||
import itertools
|
|
||||||
import logging
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
from signal import SIGINT, default_int_handler, signal
|
|
||||||
|
|
||||||
from pip._vendor import six
|
|
||||||
from pip._vendor.progress.bar import (
|
|
||||||
Bar, ChargingBar, FillingCirclesBar, FillingSquaresBar, IncrementalBar,
|
|
||||||
ShadyBar,
|
|
||||||
)
|
|
||||||
from pip._vendor.progress.helpers import HIDE_CURSOR, SHOW_CURSOR, WritelnMixin
|
|
||||||
from pip._vendor.progress.spinner import Spinner
|
|
||||||
|
|
||||||
from pip._internal.utils.compat import WINDOWS
|
|
||||||
from pip._internal.utils.logging import get_indentation
|
|
||||||
from pip._internal.utils.misc import format_size
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import Any, Iterator, IO # noqa: F401
|
|
||||||
|
|
||||||
try:
|
|
||||||
from pip._vendor import colorama
|
|
||||||
# Lots of different errors can come from this, including SystemError and
|
|
||||||
# ImportError.
|
|
||||||
except Exception:
|
|
||||||
colorama = None
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def _select_progress_class(preferred, fallback):
|
|
||||||
encoding = getattr(preferred.file, "encoding", None)
|
|
||||||
|
|
||||||
# If we don't know what encoding this file is in, then we'll just assume
|
|
||||||
# that it doesn't support unicode and use the ASCII bar.
|
|
||||||
if not encoding:
|
|
||||||
return fallback
|
|
||||||
|
|
||||||
# Collect all of the possible characters we want to use with the preferred
|
|
||||||
# bar.
|
|
||||||
characters = [
|
|
||||||
getattr(preferred, "empty_fill", six.text_type()),
|
|
||||||
getattr(preferred, "fill", six.text_type()),
|
|
||||||
]
|
|
||||||
characters += list(getattr(preferred, "phases", []))
|
|
||||||
|
|
||||||
# Try to decode the characters we're using for the bar using the encoding
|
|
||||||
# of the given file, if this works then we'll assume that we can use the
|
|
||||||
# fancier bar and if not we'll fall back to the plaintext bar.
|
|
||||||
try:
|
|
||||||
six.text_type().join(characters).encode(encoding)
|
|
||||||
except UnicodeEncodeError:
|
|
||||||
return fallback
|
|
||||||
else:
|
|
||||||
return preferred
|
|
||||||
|
|
||||||
|
|
||||||
_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any
|
|
||||||
|
|
||||||
|
|
||||||
class InterruptibleMixin(object):
|
|
||||||
"""
|
|
||||||
Helper to ensure that self.finish() gets called on keyboard interrupt.
|
|
||||||
|
|
||||||
This allows downloads to be interrupted without leaving temporary state
|
|
||||||
(like hidden cursors) behind.
|
|
||||||
|
|
||||||
This class is similar to the progress library's existing SigIntMixin
|
|
||||||
helper, but as of version 1.2, that helper has the following problems:
|
|
||||||
|
|
||||||
1. It calls sys.exit().
|
|
||||||
2. It discards the existing SIGINT handler completely.
|
|
||||||
3. It leaves its own handler in place even after an uninterrupted finish,
|
|
||||||
which will have unexpected delayed effects if the user triggers an
|
|
||||||
unrelated keyboard interrupt some time after a progress-displaying
|
|
||||||
download has already completed, for example.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Save the original SIGINT handler for later.
|
|
||||||
"""
|
|
||||||
super(InterruptibleMixin, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
self.original_handler = signal(SIGINT, self.handle_sigint)
|
|
||||||
|
|
||||||
# If signal() returns None, the previous handler was not installed from
|
|
||||||
# Python, and we cannot restore it. This probably should not happen,
|
|
||||||
# but if it does, we must restore something sensible instead, at least.
|
|
||||||
# The least bad option should be Python's default SIGINT handler, which
|
|
||||||
# just raises KeyboardInterrupt.
|
|
||||||
if self.original_handler is None:
|
|
||||||
self.original_handler = default_int_handler
|
|
||||||
|
|
||||||
def finish(self):
|
|
||||||
"""
|
|
||||||
Restore the original SIGINT handler after finishing.
|
|
||||||
|
|
||||||
This should happen regardless of whether the progress display finishes
|
|
||||||
normally, or gets interrupted.
|
|
||||||
"""
|
|
||||||
super(InterruptibleMixin, self).finish()
|
|
||||||
signal(SIGINT, self.original_handler)
|
|
||||||
|
|
||||||
def handle_sigint(self, signum, frame):
|
|
||||||
"""
|
|
||||||
Call self.finish() before delegating to the original SIGINT handler.
|
|
||||||
|
|
||||||
This handler should only be in place while the progress display is
|
|
||||||
active.
|
|
||||||
"""
|
|
||||||
self.finish()
|
|
||||||
self.original_handler(signum, frame)
|
|
||||||
|
|
||||||
|
|
||||||
class SilentBar(Bar):
|
|
||||||
|
|
||||||
def update(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class BlueEmojiBar(IncrementalBar):
|
|
||||||
|
|
||||||
suffix = "%(percent)d%%"
|
|
||||||
bar_prefix = " "
|
|
||||||
bar_suffix = " "
|
|
||||||
phases = (u"\U0001F539", u"\U0001F537", u"\U0001F535") # type: Any
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadProgressMixin(object):
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super(DownloadProgressMixin, self).__init__(*args, **kwargs)
|
|
||||||
self.message = (" " * (get_indentation() + 2)) + self.message
|
|
||||||
|
|
||||||
@property
|
|
||||||
def downloaded(self):
|
|
||||||
return format_size(self.index)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def download_speed(self):
|
|
||||||
# Avoid zero division errors...
|
|
||||||
if self.avg == 0.0:
|
|
||||||
return "..."
|
|
||||||
return format_size(1 / self.avg) + "/s"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def pretty_eta(self):
|
|
||||||
if self.eta:
|
|
||||||
return "eta %s" % self.eta_td
|
|
||||||
return ""
|
|
||||||
|
|
||||||
def iter(self, it, n=1):
|
|
||||||
for x in it:
|
|
||||||
yield x
|
|
||||||
self.next(n)
|
|
||||||
self.finish()
|
|
||||||
|
|
||||||
|
|
||||||
class WindowsMixin(object):
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
# The Windows terminal does not support the hide/show cursor ANSI codes
|
|
||||||
# even with colorama. So we'll ensure that hide_cursor is False on
|
|
||||||
# Windows.
|
|
||||||
# This call neds to go before the super() call, so that hide_cursor
|
|
||||||
# is set in time. The base progress bar class writes the "hide cursor"
|
|
||||||
# code to the terminal in its init, so if we don't set this soon
|
|
||||||
# enough, we get a "hide" with no corresponding "show"...
|
|
||||||
if WINDOWS and self.hide_cursor:
|
|
||||||
self.hide_cursor = False
|
|
||||||
|
|
||||||
super(WindowsMixin, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
# Check if we are running on Windows and we have the colorama module,
|
|
||||||
# if we do then wrap our file with it.
|
|
||||||
if WINDOWS and colorama:
|
|
||||||
self.file = colorama.AnsiToWin32(self.file)
|
|
||||||
# The progress code expects to be able to call self.file.isatty()
|
|
||||||
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
|
|
||||||
# add it.
|
|
||||||
self.file.isatty = lambda: self.file.wrapped.isatty()
|
|
||||||
# The progress code expects to be able to call self.file.flush()
|
|
||||||
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
|
|
||||||
# add it.
|
|
||||||
self.file.flush = lambda: self.file.wrapped.flush()
|
|
||||||
|
|
||||||
|
|
||||||
class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin,
|
|
||||||
DownloadProgressMixin):
|
|
||||||
|
|
||||||
file = sys.stdout
|
|
||||||
message = "%(percent)d%%"
|
|
||||||
suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
|
|
||||||
|
|
||||||
# NOTE: The "type: ignore" comments on the following classes are there to
|
|
||||||
# work around https://github.com/python/typing/issues/241
|
|
||||||
|
|
||||||
|
|
||||||
class DefaultDownloadProgressBar(BaseDownloadProgressBar,
|
|
||||||
_BaseBar):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadSilentBar(BaseDownloadProgressBar, SilentBar): # type: ignore
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadIncrementalBar(BaseDownloadProgressBar, # type: ignore
|
|
||||||
IncrementalBar):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadChargingBar(BaseDownloadProgressBar, # type: ignore
|
|
||||||
ChargingBar):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadShadyBar(BaseDownloadProgressBar, ShadyBar): # type: ignore
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadFillingSquaresBar(BaseDownloadProgressBar, # type: ignore
|
|
||||||
FillingSquaresBar):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadFillingCirclesBar(BaseDownloadProgressBar, # type: ignore
|
|
||||||
FillingCirclesBar):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, # type: ignore
|
|
||||||
BlueEmojiBar):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin,
|
|
||||||
DownloadProgressMixin, WritelnMixin, Spinner):
|
|
||||||
|
|
||||||
file = sys.stdout
|
|
||||||
suffix = "%(downloaded)s %(download_speed)s"
|
|
||||||
|
|
||||||
def next_phase(self):
|
|
||||||
if not hasattr(self, "_phaser"):
|
|
||||||
self._phaser = itertools.cycle(self.phases)
|
|
||||||
return next(self._phaser)
|
|
||||||
|
|
||||||
def update(self):
|
|
||||||
message = self.message % self
|
|
||||||
phase = self.next_phase()
|
|
||||||
suffix = self.suffix % self
|
|
||||||
line = ''.join([
|
|
||||||
message,
|
|
||||||
" " if message else "",
|
|
||||||
phase,
|
|
||||||
" " if suffix else "",
|
|
||||||
suffix,
|
|
||||||
])
|
|
||||||
|
|
||||||
self.writeln(line)
|
|
||||||
|
|
||||||
|
|
||||||
BAR_TYPES = {
|
|
||||||
"off": (DownloadSilentBar, DownloadSilentBar),
|
|
||||||
"on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
|
|
||||||
"ascii": (DownloadIncrementalBar, DownloadProgressSpinner),
|
|
||||||
"pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
|
|
||||||
"emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def DownloadProgressProvider(progress_bar, max=None):
|
|
||||||
if max is None or max == 0:
|
|
||||||
return BAR_TYPES[progress_bar][1]().iter
|
|
||||||
else:
|
|
||||||
return BAR_TYPES[progress_bar][0](max=max).iter
|
|
||||||
|
|
||||||
|
|
||||||
################################################################
|
|
||||||
# Generic "something is happening" spinners
|
|
||||||
#
|
|
||||||
# We don't even try using progress.spinner.Spinner here because it's actually
|
|
||||||
# simpler to reimplement from scratch than to coerce their code into doing
|
|
||||||
# what we need.
|
|
||||||
################################################################
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def hidden_cursor(file):
|
|
||||||
# type: (IO) -> Iterator[None]
|
|
||||||
# The Windows terminal does not support the hide/show cursor ANSI codes,
|
|
||||||
# even via colorama. So don't even try.
|
|
||||||
if WINDOWS:
|
|
||||||
yield
|
|
||||||
# We don't want to clutter the output with control characters if we're
|
|
||||||
# writing to a file, or if the user is running with --quiet.
|
|
||||||
# See https://github.com/pypa/pip/issues/3418
|
|
||||||
elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
|
|
||||||
yield
|
|
||||||
else:
|
|
||||||
file.write(HIDE_CURSOR)
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
file.write(SHOW_CURSOR)
|
|
||||||
|
|
||||||
|
|
||||||
class RateLimiter(object):
|
|
||||||
def __init__(self, min_update_interval_seconds):
|
|
||||||
# type: (float) -> None
|
|
||||||
self._min_update_interval_seconds = min_update_interval_seconds
|
|
||||||
self._last_update = 0 # type: float
|
|
||||||
|
|
||||||
def ready(self):
|
|
||||||
# type: () -> bool
|
|
||||||
now = time.time()
|
|
||||||
delta = now - self._last_update
|
|
||||||
return delta >= self._min_update_interval_seconds
|
|
||||||
|
|
||||||
def reset(self):
|
|
||||||
# type: () -> None
|
|
||||||
self._last_update = time.time()
|
|
||||||
|
|
||||||
|
|
||||||
class SpinnerInterface(object):
|
|
||||||
def spin(self):
|
|
||||||
# type: () -> None
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
def finish(self, final_status):
|
|
||||||
# type: (str) -> None
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
|
|
||||||
class InteractiveSpinner(SpinnerInterface):
|
|
||||||
def __init__(self, message, file=None, spin_chars="-\\|/",
|
|
||||||
# Empirically, 8 updates/second looks nice
|
|
||||||
min_update_interval_seconds=0.125):
|
|
||||||
self._message = message
|
|
||||||
if file is None:
|
|
||||||
file = sys.stdout
|
|
||||||
self._file = file
|
|
||||||
self._rate_limiter = RateLimiter(min_update_interval_seconds)
|
|
||||||
self._finished = False
|
|
||||||
|
|
||||||
self._spin_cycle = itertools.cycle(spin_chars)
|
|
||||||
|
|
||||||
self._file.write(" " * get_indentation() + self._message + " ... ")
|
|
||||||
self._width = 0
|
|
||||||
|
|
||||||
def _write(self, status):
|
|
||||||
assert not self._finished
|
|
||||||
# Erase what we wrote before by backspacing to the beginning, writing
|
|
||||||
# spaces to overwrite the old text, and then backspacing again
|
|
||||||
backup = "\b" * self._width
|
|
||||||
self._file.write(backup + " " * self._width + backup)
|
|
||||||
# Now we have a blank slate to add our status
|
|
||||||
self._file.write(status)
|
|
||||||
self._width = len(status)
|
|
||||||
self._file.flush()
|
|
||||||
self._rate_limiter.reset()
|
|
||||||
|
|
||||||
def spin(self):
|
|
||||||
# type: () -> None
|
|
||||||
if self._finished:
|
|
||||||
return
|
|
||||||
if not self._rate_limiter.ready():
|
|
||||||
return
|
|
||||||
self._write(next(self._spin_cycle))
|
|
||||||
|
|
||||||
def finish(self, final_status):
|
|
||||||
# type: (str) -> None
|
|
||||||
if self._finished:
|
|
||||||
return
|
|
||||||
self._write(final_status)
|
|
||||||
self._file.write("\n")
|
|
||||||
self._file.flush()
|
|
||||||
self._finished = True
|
|
||||||
|
|
||||||
|
|
||||||
# Used for dumb terminals, non-interactive installs (no tty), etc.
|
|
||||||
# We still print updates occasionally (once every 60 seconds by default) to
|
|
||||||
# act as a keep-alive for systems like Travis-CI that take lack-of-output as
|
|
||||||
# an indication that a task has frozen.
|
|
||||||
class NonInteractiveSpinner(SpinnerInterface):
|
|
||||||
def __init__(self, message, min_update_interval_seconds=60):
|
|
||||||
# type: (str, float) -> None
|
|
||||||
self._message = message
|
|
||||||
self._finished = False
|
|
||||||
self._rate_limiter = RateLimiter(min_update_interval_seconds)
|
|
||||||
self._update("started")
|
|
||||||
|
|
||||||
def _update(self, status):
|
|
||||||
assert not self._finished
|
|
||||||
self._rate_limiter.reset()
|
|
||||||
logger.info("%s: %s", self._message, status)
|
|
||||||
|
|
||||||
def spin(self):
|
|
||||||
# type: () -> None
|
|
||||||
if self._finished:
|
|
||||||
return
|
|
||||||
if not self._rate_limiter.ready():
|
|
||||||
return
|
|
||||||
self._update("still running...")
|
|
||||||
|
|
||||||
def finish(self, final_status):
|
|
||||||
# type: (str) -> None
|
|
||||||
if self._finished:
|
|
||||||
return
|
|
||||||
self._update("finished with status '%s'" % (final_status,))
|
|
||||||
self._finished = True
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def open_spinner(message):
|
|
||||||
# type: (str) -> Iterator[SpinnerInterface]
|
|
||||||
# Interactive spinner goes directly to sys.stdout rather than being routed
|
|
||||||
# through the logging system, but it acts like it has level INFO,
|
|
||||||
# i.e. it's only displayed if we're at level INFO or better.
|
|
||||||
# Non-interactive spinner goes through the logging system, so it is always
|
|
||||||
# in sync with logging configuration.
|
|
||||||
if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
|
|
||||||
spinner = InteractiveSpinner(message) # type: SpinnerInterface
|
|
||||||
else:
|
|
||||||
spinner = NonInteractiveSpinner(message)
|
|
||||||
try:
|
|
||||||
with hidden_cursor(sys.stdout):
|
|
||||||
yield spinner
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
spinner.finish("canceled")
|
|
||||||
raise
|
|
||||||
except Exception:
|
|
||||||
spinner.finish("error")
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
spinner.finish("done")
|
|
@ -1,534 +0,0 @@
|
|||||||
"""Handles all VCS (version control) support"""
|
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import errno
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
|
||||||
|
|
||||||
from pip._internal.exceptions import BadCommand
|
|
||||||
from pip._internal.utils.misc import (
|
|
||||||
display_path, backup_dir, call_subprocess, rmtree, ask_path_exists,
|
|
||||||
)
|
|
||||||
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
|
|
||||||
|
|
||||||
if MYPY_CHECK_RUNNING:
|
|
||||||
from typing import ( # noqa: F401
|
|
||||||
Any, Dict, Iterable, List, Mapping, Optional, Text, Tuple, Type
|
|
||||||
)
|
|
||||||
from pip._internal.utils.ui import SpinnerInterface # noqa: F401
|
|
||||||
|
|
||||||
AuthInfo = Tuple[Optional[str], Optional[str]]
|
|
||||||
|
|
||||||
__all__ = ['vcs']
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class RemoteNotFoundError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class RevOptions(object):
|
|
||||||
|
|
||||||
"""
|
|
||||||
Encapsulates a VCS-specific revision to install, along with any VCS
|
|
||||||
install options.
|
|
||||||
|
|
||||||
Instances of this class should be treated as if immutable.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, vcs, rev=None, extra_args=None):
|
|
||||||
# type: (VersionControl, Optional[str], Optional[List[str]]) -> None
|
|
||||||
"""
|
|
||||||
Args:
|
|
||||||
vcs: a VersionControl object.
|
|
||||||
rev: the name of the revision to install.
|
|
||||||
extra_args: a list of extra options.
|
|
||||||
"""
|
|
||||||
if extra_args is None:
|
|
||||||
extra_args = []
|
|
||||||
|
|
||||||
self.extra_args = extra_args
|
|
||||||
self.rev = rev
|
|
||||||
self.vcs = vcs
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '<RevOptions {}: rev={!r}>'.format(self.vcs.name, self.rev)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def arg_rev(self):
|
|
||||||
# type: () -> Optional[str]
|
|
||||||
if self.rev is None:
|
|
||||||
return self.vcs.default_arg_rev
|
|
||||||
|
|
||||||
return self.rev
|
|
||||||
|
|
||||||
def to_args(self):
|
|
||||||
# type: () -> List[str]
|
|
||||||
"""
|
|
||||||
Return the VCS-specific command arguments.
|
|
||||||
"""
|
|
||||||
args = [] # type: List[str]
|
|
||||||
rev = self.arg_rev
|
|
||||||
if rev is not None:
|
|
||||||
args += self.vcs.get_base_rev_args(rev)
|
|
||||||
args += self.extra_args
|
|
||||||
|
|
||||||
return args
|
|
||||||
|
|
||||||
def to_display(self):
|
|
||||||
# type: () -> str
|
|
||||||
if not self.rev:
|
|
||||||
return ''
|
|
||||||
|
|
||||||
return ' (to revision {})'.format(self.rev)
|
|
||||||
|
|
||||||
def make_new(self, rev):
|
|
||||||
# type: (str) -> RevOptions
|
|
||||||
"""
|
|
||||||
Make a copy of the current instance, but with a new rev.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
rev: the name of the revision for the new object.
|
|
||||||
"""
|
|
||||||
return self.vcs.make_rev_options(rev, extra_args=self.extra_args)
|
|
||||||
|
|
||||||
|
|
||||||
class VcsSupport(object):
|
|
||||||
_registry = {} # type: Dict[str, Type[VersionControl]]
|
|
||||||
schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
# type: () -> None
|
|
||||||
# Register more schemes with urlparse for various version control
|
|
||||||
# systems
|
|
||||||
urllib_parse.uses_netloc.extend(self.schemes)
|
|
||||||
# Python >= 2.7.4, 3.3 doesn't have uses_fragment
|
|
||||||
if getattr(urllib_parse, 'uses_fragment', None):
|
|
||||||
urllib_parse.uses_fragment.extend(self.schemes)
|
|
||||||
super(VcsSupport, self).__init__()
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return self._registry.__iter__()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def backends(self):
|
|
||||||
# type: () -> List[Type[VersionControl]]
|
|
||||||
return list(self._registry.values())
|
|
||||||
|
|
||||||
@property
|
|
||||||
def dirnames(self):
|
|
||||||
# type: () -> List[str]
|
|
||||||
return [backend.dirname for backend in self.backends]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def all_schemes(self):
|
|
||||||
# type: () -> List[str]
|
|
||||||
schemes = [] # type: List[str]
|
|
||||||
for backend in self.backends:
|
|
||||||
schemes.extend(backend.schemes)
|
|
||||||
return schemes
|
|
||||||
|
|
||||||
def register(self, cls):
|
|
||||||
# type: (Type[VersionControl]) -> None
|
|
||||||
if not hasattr(cls, 'name'):
|
|
||||||
logger.warning('Cannot register VCS %s', cls.__name__)
|
|
||||||
return
|
|
||||||
if cls.name not in self._registry:
|
|
||||||
self._registry[cls.name] = cls
|
|
||||||
logger.debug('Registered VCS backend: %s', cls.name)
|
|
||||||
|
|
||||||
def unregister(self, cls=None, name=None):
|
|
||||||
# type: (Optional[Type[VersionControl]], Optional[str]) -> None
|
|
||||||
if name in self._registry:
|
|
||||||
del self._registry[name]
|
|
||||||
elif cls in self._registry.values():
|
|
||||||
del self._registry[cls.name]
|
|
||||||
else:
|
|
||||||
logger.warning('Cannot unregister because no class or name given')
|
|
||||||
|
|
||||||
def get_backend_type(self, location):
|
|
||||||
# type: (str) -> Optional[Type[VersionControl]]
|
|
||||||
"""
|
|
||||||
Return the type of the version control backend if found at given
|
|
||||||
location, e.g. vcs.get_backend_type('/path/to/vcs/checkout')
|
|
||||||
"""
|
|
||||||
for vc_type in self._registry.values():
|
|
||||||
if vc_type.controls_location(location):
|
|
||||||
logger.debug('Determine that %s uses VCS: %s',
|
|
||||||
location, vc_type.name)
|
|
||||||
return vc_type
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_backend(self, name):
|
|
||||||
# type: (str) -> Optional[Type[VersionControl]]
|
|
||||||
name = name.lower()
|
|
||||||
if name in self._registry:
|
|
||||||
return self._registry[name]
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
vcs = VcsSupport()
|
|
||||||
|
|
||||||
|
|
||||||
class VersionControl(object):
|
|
||||||
name = ''
|
|
||||||
dirname = ''
|
|
||||||
repo_name = ''
|
|
||||||
# List of supported schemes for this Version Control
|
|
||||||
schemes = () # type: Tuple[str, ...]
|
|
||||||
# Iterable of environment variable names to pass to call_subprocess().
|
|
||||||
unset_environ = () # type: Tuple[str, ...]
|
|
||||||
default_arg_rev = None # type: Optional[str]
|
|
||||||
|
|
||||||
def __init__(self, url=None, *args, **kwargs):
|
|
||||||
self.url = url
|
|
||||||
super(VersionControl, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
def get_base_rev_args(self, rev):
|
|
||||||
"""
|
|
||||||
Return the base revision arguments for a vcs command.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
rev: the name of a revision to install. Cannot be None.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def make_rev_options(self, rev=None, extra_args=None):
|
|
||||||
# type: (Optional[str], Optional[List[str]]) -> RevOptions
|
|
||||||
"""
|
|
||||||
Return a RevOptions object.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
rev: the name of a revision to install.
|
|
||||||
extra_args: a list of extra options.
|
|
||||||
"""
|
|
||||||
return RevOptions(self, rev, extra_args=extra_args)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _is_local_repository(cls, repo):
|
|
||||||
# type: (str) -> bool
|
|
||||||
"""
|
|
||||||
posix absolute paths start with os.path.sep,
|
|
||||||
win32 ones start with drive (like c:\\folder)
|
|
||||||
"""
|
|
||||||
drive, tail = os.path.splitdrive(repo)
|
|
||||||
return repo.startswith(os.path.sep) or bool(drive)
|
|
||||||
|
|
||||||
def export(self, location):
|
|
||||||
"""
|
|
||||||
Export the repository at the url to the destination location
|
|
||||||
i.e. only download the files, without vcs informations
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def get_netloc_and_auth(self, netloc, scheme):
|
|
||||||
"""
|
|
||||||
Parse the repository URL's netloc, and return the new netloc to use
|
|
||||||
along with auth information.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
netloc: the original repository URL netloc.
|
|
||||||
scheme: the repository URL's scheme without the vcs prefix.
|
|
||||||
|
|
||||||
This is mainly for the Subversion class to override, so that auth
|
|
||||||
information can be provided via the --username and --password options
|
|
||||||
instead of through the URL. For other subclasses like Git without
|
|
||||||
such an option, auth information must stay in the URL.
|
|
||||||
|
|
||||||
Returns: (netloc, (username, password)).
|
|
||||||
"""
|
|
||||||
return netloc, (None, None)
|
|
||||||
|
|
||||||
def get_url_rev_and_auth(self, url):
|
|
||||||
# type: (str) -> Tuple[str, Optional[str], AuthInfo]
|
|
||||||
"""
|
|
||||||
Parse the repository URL to use, and return the URL, revision,
|
|
||||||
and auth info to use.
|
|
||||||
|
|
||||||
Returns: (url, rev, (username, password)).
|
|
||||||
"""
|
|
||||||
scheme, netloc, path, query, frag = urllib_parse.urlsplit(url)
|
|
||||||
if '+' not in scheme:
|
|
||||||
raise ValueError(
|
|
||||||
"Sorry, {!r} is a malformed VCS url. "
|
|
||||||
"The format is <vcs>+<protocol>://<url>, "
|
|
||||||
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url)
|
|
||||||
)
|
|
||||||
# Remove the vcs prefix.
|
|
||||||
scheme = scheme.split('+', 1)[1]
|
|
||||||
netloc, user_pass = self.get_netloc_and_auth(netloc, scheme)
|
|
||||||
rev = None
|
|
||||||
if '@' in path:
|
|
||||||
path, rev = path.rsplit('@', 1)
|
|
||||||
url = urllib_parse.urlunsplit((scheme, netloc, path, query, ''))
|
|
||||||
return url, rev, user_pass
|
|
||||||
|
|
||||||
def make_rev_args(self, username, password):
|
|
||||||
"""
|
|
||||||
Return the RevOptions "extra arguments" to use in obtain().
|
|
||||||
"""
|
|
||||||
return []
|
|
||||||
|
|
||||||
def get_url_rev_options(self, url):
|
|
||||||
# type: (str) -> Tuple[str, RevOptions]
|
|
||||||
"""
|
|
||||||
Return the URL and RevOptions object to use in obtain() and in
|
|
||||||
some cases export(), as a tuple (url, rev_options).
|
|
||||||
"""
|
|
||||||
url, rev, user_pass = self.get_url_rev_and_auth(url)
|
|
||||||
username, password = user_pass
|
|
||||||
extra_args = self.make_rev_args(username, password)
|
|
||||||
rev_options = self.make_rev_options(rev, extra_args=extra_args)
|
|
||||||
|
|
||||||
return url, rev_options
|
|
||||||
|
|
||||||
def normalize_url(self, url):
|
|
||||||
# type: (str) -> str
|
|
||||||
"""
|
|
||||||
Normalize a URL for comparison by unquoting it and removing any
|
|
||||||
trailing slash.
|
|
||||||
"""
|
|
||||||
return urllib_parse.unquote(url).rstrip('/')
|
|
||||||
|
|
||||||
def compare_urls(self, url1, url2):
|
|
||||||
# type: (str, str) -> bool
|
|
||||||
"""
|
|
||||||
Compare two repo URLs for identity, ignoring incidental differences.
|
|
||||||
"""
|
|
||||||
return (self.normalize_url(url1) == self.normalize_url(url2))
|
|
||||||
|
|
||||||
def fetch_new(self, dest, url, rev_options):
|
|
||||||
"""
|
|
||||||
Fetch a revision from a repository, in the case that this is the
|
|
||||||
first fetch from the repository.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
dest: the directory to fetch the repository to.
|
|
||||||
rev_options: a RevOptions object.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def switch(self, dest, url, rev_options):
|
|
||||||
"""
|
|
||||||
Switch the repo at ``dest`` to point to ``URL``.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
rev_options: a RevOptions object.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def update(self, dest, url, rev_options):
|
|
||||||
"""
|
|
||||||
Update an already-existing repo to the given ``rev_options``.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
rev_options: a RevOptions object.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def is_commit_id_equal(self, dest, name):
|
|
||||||
"""
|
|
||||||
Return whether the id of the current commit equals the given name.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
dest: the repository directory.
|
|
||||||
name: a string name.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def obtain(self, dest):
|
|
||||||
# type: (str) -> None
|
|
||||||
"""
|
|
||||||
Install or update in editable mode the package represented by this
|
|
||||||
VersionControl object.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
dest: the repository directory in which to install or update.
|
|
||||||
"""
|
|
||||||
url, rev_options = self.get_url_rev_options(self.url)
|
|
||||||
|
|
||||||
if not os.path.exists(dest):
|
|
||||||
self.fetch_new(dest, url, rev_options)
|
|
||||||
return
|
|
||||||
|
|
||||||
rev_display = rev_options.to_display()
|
|
||||||
if self.is_repository_directory(dest):
|
|
||||||
existing_url = self.get_remote_url(dest)
|
|
||||||
if self.compare_urls(existing_url, url):
|
|
||||||
logger.debug(
|
|
||||||
'%s in %s exists, and has correct URL (%s)',
|
|
||||||
self.repo_name.title(),
|
|
||||||
display_path(dest),
|
|
||||||
url,
|
|
||||||
)
|
|
||||||
if not self.is_commit_id_equal(dest, rev_options.rev):
|
|
||||||
logger.info(
|
|
||||||
'Updating %s %s%s',
|
|
||||||
display_path(dest),
|
|
||||||
self.repo_name,
|
|
||||||
rev_display,
|
|
||||||
)
|
|
||||||
self.update(dest, url, rev_options)
|
|
||||||
else:
|
|
||||||
logger.info('Skipping because already up-to-date.')
|
|
||||||
return
|
|
||||||
|
|
||||||
logger.warning(
|
|
||||||
'%s %s in %s exists with URL %s',
|
|
||||||
self.name,
|
|
||||||
self.repo_name,
|
|
||||||
display_path(dest),
|
|
||||||
existing_url,
|
|
||||||
)
|
|
||||||
prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
|
|
||||||
('s', 'i', 'w', 'b'))
|
|
||||||
else:
|
|
||||||
logger.warning(
|
|
||||||
'Directory %s already exists, and is not a %s %s.',
|
|
||||||
dest,
|
|
||||||
self.name,
|
|
||||||
self.repo_name,
|
|
||||||
)
|
|
||||||
# https://github.com/python/mypy/issues/1174
|
|
||||||
prompt = ('(i)gnore, (w)ipe, (b)ackup ', # type: ignore
|
|
||||||
('i', 'w', 'b'))
|
|
||||||
|
|
||||||
logger.warning(
|
|
||||||
'The plan is to install the %s repository %s',
|
|
||||||
self.name,
|
|
||||||
url,
|
|
||||||
)
|
|
||||||
response = ask_path_exists('What to do? %s' % prompt[0], prompt[1])
|
|
||||||
|
|
||||||
if response == 'a':
|
|
||||||
sys.exit(-1)
|
|
||||||
|
|
||||||
if response == 'w':
|
|
||||||
logger.warning('Deleting %s', display_path(dest))
|
|
||||||
rmtree(dest)
|
|
||||||
self.fetch_new(dest, url, rev_options)
|
|
||||||
return
|
|
||||||
|
|
||||||
if response == 'b':
|
|
||||||
dest_dir = backup_dir(dest)
|
|
||||||
logger.warning(
|
|
||||||
'Backing up %s to %s', display_path(dest), dest_dir,
|
|
||||||
)
|
|
||||||
shutil.move(dest, dest_dir)
|
|
||||||
self.fetch_new(dest, url, rev_options)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Do nothing if the response is "i".
|
|
||||||
if response == 's':
|
|
||||||
logger.info(
|
|
||||||
'Switching %s %s to %s%s',
|
|
||||||
self.repo_name,
|
|
||||||
display_path(dest),
|
|
||||||
url,
|
|
||||||
rev_display,
|
|
||||||
)
|
|
||||||
self.switch(dest, url, rev_options)
|
|
||||||
|
|
||||||
def unpack(self, location):
|
|
||||||
# type: (str) -> None
|
|
||||||
"""
|
|
||||||
Clean up current location and download the url repository
|
|
||||||
(and vcs infos) into location
|
|
||||||
"""
|
|
||||||
if os.path.exists(location):
|
|
||||||
rmtree(location)
|
|
||||||
self.obtain(location)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_src_requirement(cls, location, project_name):
|
|
||||||
"""
|
|
||||||
Return a string representing the requirement needed to
|
|
||||||
redownload the files currently present in location, something
|
|
||||||
like:
|
|
||||||
{repository_url}@{revision}#egg={project_name}-{version_identifier}
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_remote_url(cls, location):
|
|
||||||
"""
|
|
||||||
Return the url used at location
|
|
||||||
|
|
||||||
Raises RemoteNotFoundError if the repository does not have a remote
|
|
||||||
url configured.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_revision(cls, location):
|
|
||||||
"""
|
|
||||||
Return the current commit id of the files at the given location.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def run_command(
|
|
||||||
cls,
|
|
||||||
cmd, # type: List[str]
|
|
||||||
show_stdout=True, # type: bool
|
|
||||||
cwd=None, # type: Optional[str]
|
|
||||||
on_returncode='raise', # type: str
|
|
||||||
extra_ok_returncodes=None, # type: Optional[Iterable[int]]
|
|
||||||
command_desc=None, # type: Optional[str]
|
|
||||||
extra_environ=None, # type: Optional[Mapping[str, Any]]
|
|
||||||
spinner=None # type: Optional[SpinnerInterface]
|
|
||||||
):
|
|
||||||
# type: (...) -> Optional[Text]
|
|
||||||
"""
|
|
||||||
Run a VCS subcommand
|
|
||||||
This is simply a wrapper around call_subprocess that adds the VCS
|
|
||||||
command name, and checks that the VCS is available
|
|
||||||
"""
|
|
||||||
cmd = [cls.name] + cmd
|
|
||||||
try:
|
|
||||||
return call_subprocess(cmd, show_stdout, cwd,
|
|
||||||
on_returncode=on_returncode,
|
|
||||||
extra_ok_returncodes=extra_ok_returncodes,
|
|
||||||
command_desc=command_desc,
|
|
||||||
extra_environ=extra_environ,
|
|
||||||
unset_environ=cls.unset_environ,
|
|
||||||
spinner=spinner)
|
|
||||||
except OSError as e:
|
|
||||||
# errno.ENOENT = no such file or directory
|
|
||||||
# In other words, the VCS executable isn't available
|
|
||||||
if e.errno == errno.ENOENT:
|
|
||||||
raise BadCommand(
|
|
||||||
'Cannot find command %r - do you have '
|
|
||||||
'%r installed and in your '
|
|
||||||
'PATH?' % (cls.name, cls.name))
|
|
||||||
else:
|
|
||||||
raise # re-raise exception if a different error occurred
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def is_repository_directory(cls, path):
|
|
||||||
# type: (str) -> bool
|
|
||||||
"""
|
|
||||||
Return whether a directory path is a repository directory.
|
|
||||||
"""
|
|
||||||
logger.debug('Checking in %s for %s (%s)...',
|
|
||||||
path, cls.dirname, cls.name)
|
|
||||||
return os.path.exists(os.path.join(path, cls.dirname))
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def controls_location(cls, location):
|
|
||||||
# type: (str) -> bool
|
|
||||||
"""
|
|
||||||
Check if a location is controlled by the vcs.
|
|
||||||
It is meant to be overridden to implement smarter detection
|
|
||||||
mechanisms for specific vcs.
|
|
||||||
|
|
||||||
This can do more than is_repository_directory() alone. For example,
|
|
||||||
the Git override checks that Git is actually available.
|
|
||||||
"""
|
|
||||||
return cls.is_repository_directory(location)
|
|
@ -1,114 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
|
||||||
|
|
||||||
from pip._internal.download import path_to_url
|
|
||||||
from pip._internal.utils.misc import (
|
|
||||||
display_path, make_vcs_requirement_url, rmtree,
|
|
||||||
)
|
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
|
||||||
from pip._internal.vcs import VersionControl, vcs
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Bazaar(VersionControl):
|
|
||||||
name = 'bzr'
|
|
||||||
dirname = '.bzr'
|
|
||||||
repo_name = 'branch'
|
|
||||||
schemes = (
|
|
||||||
'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp',
|
|
||||||
'bzr+lp',
|
|
||||||
)
|
|
||||||
|
|
||||||
def __init__(self, url=None, *args, **kwargs):
|
|
||||||
super(Bazaar, self).__init__(url, *args, **kwargs)
|
|
||||||
# This is only needed for python <2.7.5
|
|
||||||
# Register lp but do not expose as a scheme to support bzr+lp.
|
|
||||||
if getattr(urllib_parse, 'uses_fragment', None):
|
|
||||||
urllib_parse.uses_fragment.extend(['lp'])
|
|
||||||
|
|
||||||
def get_base_rev_args(self, rev):
|
|
||||||
return ['-r', rev]
|
|
||||||
|
|
||||||
def export(self, location):
|
|
||||||
"""
|
|
||||||
Export the Bazaar repository at the url to the destination location
|
|
||||||
"""
|
|
||||||
# Remove the location to make sure Bazaar can export it correctly
|
|
||||||
if os.path.exists(location):
|
|
||||||
rmtree(location)
|
|
||||||
|
|
||||||
with TempDirectory(kind="export") as temp_dir:
|
|
||||||
self.unpack(temp_dir.path)
|
|
||||||
|
|
||||||
self.run_command(
|
|
||||||
['export', location],
|
|
||||||
cwd=temp_dir.path, show_stdout=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
def fetch_new(self, dest, url, rev_options):
|
|
||||||
rev_display = rev_options.to_display()
|
|
||||||
logger.info(
|
|
||||||
'Checking out %s%s to %s',
|
|
||||||
url,
|
|
||||||
rev_display,
|
|
||||||
display_path(dest),
|
|
||||||
)
|
|
||||||
cmd_args = ['branch', '-q'] + rev_options.to_args() + [url, dest]
|
|
||||||
self.run_command(cmd_args)
|
|
||||||
|
|
||||||
def switch(self, dest, url, rev_options):
|
|
||||||
self.run_command(['switch', url], cwd=dest)
|
|
||||||
|
|
||||||
def update(self, dest, url, rev_options):
|
|
||||||
cmd_args = ['pull', '-q'] + rev_options.to_args()
|
|
||||||
self.run_command(cmd_args, cwd=dest)
|
|
||||||
|
|
||||||
def get_url_rev_and_auth(self, url):
|
|
||||||
# hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
|
|
||||||
url, rev, user_pass = super(Bazaar, self).get_url_rev_and_auth(url)
|
|
||||||
if url.startswith('ssh://'):
|
|
||||||
url = 'bzr+' + url
|
|
||||||
return url, rev, user_pass
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_remote_url(cls, location):
|
|
||||||
urls = cls.run_command(['info'], show_stdout=False, cwd=location)
|
|
||||||
for line in urls.splitlines():
|
|
||||||
line = line.strip()
|
|
||||||
for x in ('checkout of branch: ',
|
|
||||||
'parent branch: '):
|
|
||||||
if line.startswith(x):
|
|
||||||
repo = line.split(x)[1]
|
|
||||||
if cls._is_local_repository(repo):
|
|
||||||
return path_to_url(repo)
|
|
||||||
return repo
|
|
||||||
return None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_revision(cls, location):
|
|
||||||
revision = cls.run_command(
|
|
||||||
['revno'], show_stdout=False, cwd=location,
|
|
||||||
)
|
|
||||||
return revision.splitlines()[-1]
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_src_requirement(cls, location, project_name):
|
|
||||||
repo = cls.get_remote_url(location)
|
|
||||||
if not repo:
|
|
||||||
return None
|
|
||||||
if not repo.lower().startswith('bzr:'):
|
|
||||||
repo = 'bzr+' + repo
|
|
||||||
current_rev = cls.get_revision(location)
|
|
||||||
return make_vcs_requirement_url(repo, current_rev, project_name)
|
|
||||||
|
|
||||||
def is_commit_id_equal(self, dest, name):
|
|
||||||
"""Always assume the versions don't match"""
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
vcs.register(Bazaar)
|
|
@ -1,369 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os.path
|
|
||||||
import re
|
|
||||||
|
|
||||||
from pip._vendor.packaging.version import parse as parse_version
|
|
||||||
from pip._vendor.six.moves.urllib import parse as urllib_parse
|
|
||||||
from pip._vendor.six.moves.urllib import request as urllib_request
|
|
||||||
|
|
||||||
from pip._internal.exceptions import BadCommand
|
|
||||||
from pip._internal.utils.compat import samefile
|
|
||||||
from pip._internal.utils.misc import (
|
|
||||||
display_path, make_vcs_requirement_url, redact_password_from_url,
|
|
||||||
)
|
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
|
||||||
from pip._internal.vcs import RemoteNotFoundError, VersionControl, vcs
|
|
||||||
|
|
||||||
urlsplit = urllib_parse.urlsplit
|
|
||||||
urlunsplit = urllib_parse.urlunsplit
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
HASH_REGEX = re.compile('[a-fA-F0-9]{40}')
|
|
||||||
|
|
||||||
|
|
||||||
def looks_like_hash(sha):
|
|
||||||
return bool(HASH_REGEX.match(sha))
|
|
||||||
|
|
||||||
|
|
||||||
class Git(VersionControl):
|
|
||||||
name = 'git'
|
|
||||||
dirname = '.git'
|
|
||||||
repo_name = 'clone'
|
|
||||||
schemes = (
|
|
||||||
'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file',
|
|
||||||
)
|
|
||||||
# Prevent the user's environment variables from interfering with pip:
|
|
||||||
# https://github.com/pypa/pip/issues/1130
|
|
||||||
unset_environ = ('GIT_DIR', 'GIT_WORK_TREE')
|
|
||||||
default_arg_rev = 'HEAD'
|
|
||||||
|
|
||||||
def __init__(self, url=None, *args, **kwargs):
|
|
||||||
|
|
||||||
# Works around an apparent Git bug
|
|
||||||
# (see https://article.gmane.org/gmane.comp.version-control.git/146500)
|
|
||||||
if url:
|
|
||||||
scheme, netloc, path, query, fragment = urlsplit(url)
|
|
||||||
if scheme.endswith('file'):
|
|
||||||
initial_slashes = path[:-len(path.lstrip('/'))]
|
|
||||||
newpath = (
|
|
||||||
initial_slashes +
|
|
||||||
urllib_request.url2pathname(path)
|
|
||||||
.replace('\\', '/').lstrip('/')
|
|
||||||
)
|
|
||||||
url = urlunsplit((scheme, netloc, newpath, query, fragment))
|
|
||||||
after_plus = scheme.find('+') + 1
|
|
||||||
url = scheme[:after_plus] + urlunsplit(
|
|
||||||
(scheme[after_plus:], netloc, newpath, query, fragment),
|
|
||||||
)
|
|
||||||
|
|
||||||
super(Git, self).__init__(url, *args, **kwargs)
|
|
||||||
|
|
||||||
def get_base_rev_args(self, rev):
|
|
||||||
return [rev]
|
|
||||||
|
|
||||||
def get_git_version(self):
|
|
||||||
VERSION_PFX = 'git version '
|
|
||||||
version = self.run_command(['version'], show_stdout=False)
|
|
||||||
if version.startswith(VERSION_PFX):
|
|
||||||
version = version[len(VERSION_PFX):].split()[0]
|
|
||||||
else:
|
|
||||||
version = ''
|
|
||||||
# get first 3 positions of the git version becasue
|
|
||||||
# on windows it is x.y.z.windows.t, and this parses as
|
|
||||||
# LegacyVersion which always smaller than a Version.
|
|
||||||
version = '.'.join(version.split('.')[:3])
|
|
||||||
return parse_version(version)
|
|
||||||
|
|
||||||
def get_current_branch(self, location):
|
|
||||||
"""
|
|
||||||
Return the current branch, or None if HEAD isn't at a branch
|
|
||||||
(e.g. detached HEAD).
|
|
||||||
"""
|
|
||||||
# git-symbolic-ref exits with empty stdout if "HEAD" is a detached
|
|
||||||
# HEAD rather than a symbolic ref. In addition, the -q causes the
|
|
||||||
# command to exit with status code 1 instead of 128 in this case
|
|
||||||
# and to suppress the message to stderr.
|
|
||||||
args = ['symbolic-ref', '-q', 'HEAD']
|
|
||||||
output = self.run_command(
|
|
||||||
args, extra_ok_returncodes=(1, ), show_stdout=False, cwd=location,
|
|
||||||
)
|
|
||||||
ref = output.strip()
|
|
||||||
|
|
||||||
if ref.startswith('refs/heads/'):
|
|
||||||
return ref[len('refs/heads/'):]
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def export(self, location):
|
|
||||||
"""Export the Git repository at the url to the destination location"""
|
|
||||||
if not location.endswith('/'):
|
|
||||||
location = location + '/'
|
|
||||||
|
|
||||||
with TempDirectory(kind="export") as temp_dir:
|
|
||||||
self.unpack(temp_dir.path)
|
|
||||||
self.run_command(
|
|
||||||
['checkout-index', '-a', '-f', '--prefix', location],
|
|
||||||
show_stdout=False, cwd=temp_dir.path
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_revision_sha(self, dest, rev):
|
|
||||||
"""
|
|
||||||
Return (sha_or_none, is_branch), where sha_or_none is a commit hash
|
|
||||||
if the revision names a remote branch or tag, otherwise None.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
dest: the repository directory.
|
|
||||||
rev: the revision name.
|
|
||||||
"""
|
|
||||||
# Pass rev to pre-filter the list.
|
|
||||||
output = self.run_command(['show-ref', rev], cwd=dest,
|
|
||||||
show_stdout=False, on_returncode='ignore')
|
|
||||||
refs = {}
|
|
||||||
for line in output.strip().splitlines():
|
|
||||||
try:
|
|
||||||
sha, ref = line.split()
|
|
||||||
except ValueError:
|
|
||||||
# Include the offending line to simplify troubleshooting if
|
|
||||||
# this error ever occurs.
|
|
||||||
raise ValueError('unexpected show-ref line: {!r}'.format(line))
|
|
||||||
|
|
||||||
refs[ref] = sha
|
|
||||||
|
|
||||||
branch_ref = 'refs/remotes/origin/{}'.format(rev)
|
|
||||||
tag_ref = 'refs/tags/{}'.format(rev)
|
|
||||||
|
|
||||||
sha = refs.get(branch_ref)
|
|
||||||
if sha is not None:
|
|
||||||
return (sha, True)
|
|
||||||
|
|
||||||
sha = refs.get(tag_ref)
|
|
||||||
|
|
||||||
return (sha, False)
|
|
||||||
|
|
||||||
def resolve_revision(self, dest, url, rev_options):
|
|
||||||
"""
|
|
||||||
Resolve a revision to a new RevOptions object with the SHA1 of the
|
|
||||||
branch, tag, or ref if found.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
rev_options: a RevOptions object.
|
|
||||||
"""
|
|
||||||
rev = rev_options.arg_rev
|
|
||||||
sha, is_branch = self.get_revision_sha(dest, rev)
|
|
||||||
|
|
||||||
if sha is not None:
|
|
||||||
rev_options = rev_options.make_new(sha)
|
|
||||||
rev_options.branch_name = rev if is_branch else None
|
|
||||||
|
|
||||||
return rev_options
|
|
||||||
|
|
||||||
# Do not show a warning for the common case of something that has
|
|
||||||
# the form of a Git commit hash.
|
|
||||||
if not looks_like_hash(rev):
|
|
||||||
logger.warning(
|
|
||||||
"Did not find branch or tag '%s', assuming revision or ref.",
|
|
||||||
rev,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not rev.startswith('refs/'):
|
|
||||||
return rev_options
|
|
||||||
|
|
||||||
# If it looks like a ref, we have to fetch it explicitly.
|
|
||||||
self.run_command(
|
|
||||||
['fetch', '-q', url] + rev_options.to_args(),
|
|
||||||
cwd=dest,
|
|
||||||
)
|
|
||||||
# Change the revision to the SHA of the ref we fetched
|
|
||||||
sha = self.get_revision(dest, rev='FETCH_HEAD')
|
|
||||||
rev_options = rev_options.make_new(sha)
|
|
||||||
|
|
||||||
return rev_options
|
|
||||||
|
|
||||||
def is_commit_id_equal(self, dest, name):
|
|
||||||
"""
|
|
||||||
Return whether the current commit hash equals the given name.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
dest: the repository directory.
|
|
||||||
name: a string name.
|
|
||||||
"""
|
|
||||||
if not name:
|
|
||||||
# Then avoid an unnecessary subprocess call.
|
|
||||||
return False
|
|
||||||
|
|
||||||
return self.get_revision(dest) == name
|
|
||||||
|
|
||||||
def fetch_new(self, dest, url, rev_options):
|
|
||||||
rev_display = rev_options.to_display()
|
|
||||||
logger.info(
|
|
||||||
'Cloning %s%s to %s', redact_password_from_url(url),
|
|
||||||
rev_display, display_path(dest),
|
|
||||||
)
|
|
||||||
self.run_command(['clone', '-q', url, dest])
|
|
||||||
|
|
||||||
if rev_options.rev:
|
|
||||||
# Then a specific revision was requested.
|
|
||||||
rev_options = self.resolve_revision(dest, url, rev_options)
|
|
||||||
branch_name = getattr(rev_options, 'branch_name', None)
|
|
||||||
if branch_name is None:
|
|
||||||
# Only do a checkout if the current commit id doesn't match
|
|
||||||
# the requested revision.
|
|
||||||
if not self.is_commit_id_equal(dest, rev_options.rev):
|
|
||||||
cmd_args = ['checkout', '-q'] + rev_options.to_args()
|
|
||||||
self.run_command(cmd_args, cwd=dest)
|
|
||||||
elif self.get_current_branch(dest) != branch_name:
|
|
||||||
# Then a specific branch was requested, and that branch
|
|
||||||
# is not yet checked out.
|
|
||||||
track_branch = 'origin/{}'.format(branch_name)
|
|
||||||
cmd_args = [
|
|
||||||
'checkout', '-b', branch_name, '--track', track_branch,
|
|
||||||
]
|
|
||||||
self.run_command(cmd_args, cwd=dest)
|
|
||||||
|
|
||||||
#: repo may contain submodules
|
|
||||||
self.update_submodules(dest)
|
|
||||||
|
|
||||||
def switch(self, dest, url, rev_options):
|
|
||||||
self.run_command(['config', 'remote.origin.url', url], cwd=dest)
|
|
||||||
cmd_args = ['checkout', '-q'] + rev_options.to_args()
|
|
||||||
self.run_command(cmd_args, cwd=dest)
|
|
||||||
|
|
||||||
self.update_submodules(dest)
|
|
||||||
|
|
||||||
def update(self, dest, url, rev_options):
|
|
||||||
# First fetch changes from the default remote
|
|
||||||
if self.get_git_version() >= parse_version('1.9.0'):
|
|
||||||
# fetch tags in addition to everything else
|
|
||||||
self.run_command(['fetch', '-q', '--tags'], cwd=dest)
|
|
||||||
else:
|
|
||||||
self.run_command(['fetch', '-q'], cwd=dest)
|
|
||||||
# Then reset to wanted revision (maybe even origin/master)
|
|
||||||
rev_options = self.resolve_revision(dest, url, rev_options)
|
|
||||||
cmd_args = ['reset', '--hard', '-q'] + rev_options.to_args()
|
|
||||||
self.run_command(cmd_args, cwd=dest)
|
|
||||||
#: update submodules
|
|
||||||
self.update_submodules(dest)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_remote_url(cls, location):
|
|
||||||
"""
|
|
||||||
Return URL of the first remote encountered.
|
|
||||||
|
|
||||||
Raises RemoteNotFoundError if the repository does not have a remote
|
|
||||||
url configured.
|
|
||||||
"""
|
|
||||||
# We need to pass 1 for extra_ok_returncodes since the command
|
|
||||||
# exits with return code 1 if there are no matching lines.
|
|
||||||
stdout = cls.run_command(
|
|
||||||
['config', '--get-regexp', r'remote\..*\.url'],
|
|
||||||
extra_ok_returncodes=(1, ), show_stdout=False, cwd=location,
|
|
||||||
)
|
|
||||||
remotes = stdout.splitlines()
|
|
||||||
try:
|
|
||||||
found_remote = remotes[0]
|
|
||||||
except IndexError:
|
|
||||||
raise RemoteNotFoundError
|
|
||||||
|
|
||||||
for remote in remotes:
|
|
||||||
if remote.startswith('remote.origin.url '):
|
|
||||||
found_remote = remote
|
|
||||||
break
|
|
||||||
url = found_remote.split(' ')[1]
|
|
||||||
return url.strip()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_revision(cls, location, rev=None):
|
|
||||||
if rev is None:
|
|
||||||
rev = 'HEAD'
|
|
||||||
current_rev = cls.run_command(
|
|
||||||
['rev-parse', rev], show_stdout=False, cwd=location,
|
|
||||||
)
|
|
||||||
return current_rev.strip()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _get_subdirectory(cls, location):
|
|
||||||
"""Return the relative path of setup.py to the git repo root."""
|
|
||||||
# find the repo root
|
|
||||||
git_dir = cls.run_command(['rev-parse', '--git-dir'],
|
|
||||||
show_stdout=False, cwd=location).strip()
|
|
||||||
if not os.path.isabs(git_dir):
|
|
||||||
git_dir = os.path.join(location, git_dir)
|
|
||||||
root_dir = os.path.join(git_dir, '..')
|
|
||||||
# find setup.py
|
|
||||||
orig_location = location
|
|
||||||
while not os.path.exists(os.path.join(location, 'setup.py')):
|
|
||||||
last_location = location
|
|
||||||
location = os.path.dirname(location)
|
|
||||||
if location == last_location:
|
|
||||||
# We've traversed up to the root of the filesystem without
|
|
||||||
# finding setup.py
|
|
||||||
logger.warning(
|
|
||||||
"Could not find setup.py for directory %s (tried all "
|
|
||||||
"parent directories)",
|
|
||||||
orig_location,
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
# relative path of setup.py to repo root
|
|
||||||
if samefile(root_dir, location):
|
|
||||||
return None
|
|
||||||
return os.path.relpath(location, root_dir)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_src_requirement(cls, location, project_name):
|
|
||||||
repo = cls.get_remote_url(location)
|
|
||||||
if not repo.lower().startswith('git:'):
|
|
||||||
repo = 'git+' + repo
|
|
||||||
current_rev = cls.get_revision(location)
|
|
||||||
subdir = cls._get_subdirectory(location)
|
|
||||||
req = make_vcs_requirement_url(repo, current_rev, project_name,
|
|
||||||
subdir=subdir)
|
|
||||||
|
|
||||||
return req
|
|
||||||
|
|
||||||
def get_url_rev_and_auth(self, url):
|
|
||||||
"""
|
|
||||||
Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
|
|
||||||
That's required because although they use SSH they sometimes don't
|
|
||||||
work with a ssh:// scheme (e.g. GitHub). But we need a scheme for
|
|
||||||
parsing. Hence we remove it again afterwards and return it as a stub.
|
|
||||||
"""
|
|
||||||
if '://' not in url:
|
|
||||||
assert 'file:' not in url
|
|
||||||
url = url.replace('git+', 'git+ssh://')
|
|
||||||
url, rev, user_pass = super(Git, self).get_url_rev_and_auth(url)
|
|
||||||
url = url.replace('ssh://', '')
|
|
||||||
else:
|
|
||||||
url, rev, user_pass = super(Git, self).get_url_rev_and_auth(url)
|
|
||||||
|
|
||||||
return url, rev, user_pass
|
|
||||||
|
|
||||||
def update_submodules(self, location):
|
|
||||||
if not os.path.exists(os.path.join(location, '.gitmodules')):
|
|
||||||
return
|
|
||||||
self.run_command(
|
|
||||||
['submodule', 'update', '--init', '--recursive', '-q'],
|
|
||||||
cwd=location,
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def controls_location(cls, location):
|
|
||||||
if super(Git, cls).controls_location(location):
|
|
||||||
return True
|
|
||||||
try:
|
|
||||||
r = cls.run_command(['rev-parse'],
|
|
||||||
cwd=location,
|
|
||||||
show_stdout=False,
|
|
||||||
on_returncode='ignore')
|
|
||||||
return not r
|
|
||||||
except BadCommand:
|
|
||||||
logger.debug("could not determine if %s is under git control "
|
|
||||||
"because git is not available", location)
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
vcs.register(Git)
|
|
@ -1,103 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
from pip._vendor.six.moves import configparser
|
|
||||||
|
|
||||||
from pip._internal.download import path_to_url
|
|
||||||
from pip._internal.utils.misc import display_path, make_vcs_requirement_url
|
|
||||||
from pip._internal.utils.temp_dir import TempDirectory
|
|
||||||
from pip._internal.vcs import VersionControl, vcs
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Mercurial(VersionControl):
|
|
||||||
name = 'hg'
|
|
||||||
dirname = '.hg'
|
|
||||||
repo_name = 'clone'
|
|
||||||
schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http')
|
|
||||||
|
|
||||||
def get_base_rev_args(self, rev):
|
|
||||||
return [rev]
|
|
||||||
|
|
||||||
def export(self, location):
|
|
||||||
"""Export the Hg repository at the url to the destination location"""
|
|
||||||
with TempDirectory(kind="export") as temp_dir:
|
|
||||||
self.unpack(temp_dir.path)
|
|
||||||
|
|
||||||
self.run_command(
|
|
||||||
['archive', location], show_stdout=False, cwd=temp_dir.path
|
|
||||||
)
|
|
||||||
|
|
||||||
def fetch_new(self, dest, url, rev_options):
|
|
||||||
rev_display = rev_options.to_display()
|
|
||||||
logger.info(
|
|
||||||
'Cloning hg %s%s to %s',
|
|
||||||
url,
|
|
||||||
rev_display,
|
|
||||||
display_path(dest),
|
|
||||||
)
|
|
||||||
self.run_command(['clone', '--noupdate', '-q', url, dest])
|
|
||||||
cmd_args = ['update', '-q'] + rev_options.to_args()
|
|
||||||
self.run_command(cmd_args, cwd=dest)
|
|
||||||
|
|
||||||
def switch(self, dest, url, rev_options):
|
|
||||||
repo_config = os.path.join(dest, self.dirname, 'hgrc')
|
|
||||||
config = configparser.SafeConfigParser()
|
|
||||||
try:
|
|
||||||
config.read(repo_config)
|
|
||||||
config.set('paths', 'default', url)
|
|
||||||
with open(repo_config, 'w') as config_file:
|
|
||||||
config.write(config_file)
|
|
||||||
except (OSError, configparser.NoSectionError) as exc:
|
|
||||||
logger.warning(
|
|
||||||
'Could not switch Mercurial repository to %s: %s', url, exc,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
cmd_args = ['update', '-q'] + rev_options.to_args()
|
|
||||||
self.run_command(cmd_args, cwd=dest)
|
|
||||||
|
|
||||||
def update(self, dest, url, rev_options):
|
|
||||||
self.run_command(['pull', '-q'], cwd=dest)
|
|
||||||
cmd_args = ['update', '-q'] + rev_options.to_args()
|
|
||||||
self.run_command(cmd_args, cwd=dest)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_remote_url(cls, location):
|
|
||||||
url = cls.run_command(
|
|
||||||
['showconfig', 'paths.default'],
|
|
||||||
show_stdout=False, cwd=location).strip()
|
|
||||||
if cls._is_local_repository(url):
|
|
||||||
url = path_to_url(url)
|
|
||||||
return url.strip()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_revision(cls, location):
|
|
||||||
current_revision = cls.run_command(
|
|
||||||
['parents', '--template={rev}'],
|
|
||||||
show_stdout=False, cwd=location).strip()
|
|
||||||
return current_revision
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_revision_hash(cls, location):
|
|
||||||
current_rev_hash = cls.run_command(
|
|
||||||
['parents', '--template={node}'],
|
|
||||||
show_stdout=False, cwd=location).strip()
|
|
||||||
return current_rev_hash
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_src_requirement(cls, location, project_name):
|
|
||||||
repo = cls.get_remote_url(location)
|
|
||||||
if not repo.lower().startswith('hg:'):
|
|
||||||
repo = 'hg+' + repo
|
|
||||||
current_rev_hash = cls.get_revision_hash(location)
|
|
||||||
return make_vcs_requirement_url(repo, current_rev_hash, project_name)
|
|
||||||
|
|
||||||
def is_commit_id_equal(self, dest, name):
|
|
||||||
"""Always assume the versions don't match"""
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
vcs.register(Mercurial)
|
|
@ -1,200 +0,0 @@
|
|||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
|
|
||||||
from pip._internal.utils.logging import indent_log
|
|
||||||
from pip._internal.utils.misc import (
|
|
||||||
display_path, make_vcs_requirement_url, rmtree, split_auth_from_netloc,
|
|
||||||
)
|
|
||||||
from pip._internal.vcs import VersionControl, vcs
|
|
||||||
|
|
||||||
_svn_xml_url_re = re.compile('url="([^"]+)"')
|
|
||||||
_svn_rev_re = re.compile(r'committed-rev="(\d+)"')
|
|
||||||
_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
|
|
||||||
_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Subversion(VersionControl):
|
|
||||||
name = 'svn'
|
|
||||||
dirname = '.svn'
|
|
||||||
repo_name = 'checkout'
|
|
||||||
schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
|
|
||||||
|
|
||||||
def get_base_rev_args(self, rev):
|
|
||||||
return ['-r', rev]
|
|
||||||
|
|
||||||
def export(self, location):
|
|
||||||
"""Export the svn repository at the url to the destination location"""
|
|
||||||
url, rev_options = self.get_url_rev_options(self.url)
|
|
||||||
|
|
||||||
logger.info('Exporting svn repository %s to %s', url, location)
|
|
||||||
with indent_log():
|
|
||||||
if os.path.exists(location):
|
|
||||||
# Subversion doesn't like to check out over an existing
|
|
||||||
# directory --force fixes this, but was only added in svn 1.5
|
|
||||||
rmtree(location)
|
|
||||||
cmd_args = ['export'] + rev_options.to_args() + [url, location]
|
|
||||||
self.run_command(cmd_args, show_stdout=False)
|
|
||||||
|
|
||||||
def fetch_new(self, dest, url, rev_options):
|
|
||||||
rev_display = rev_options.to_display()
|
|
||||||
logger.info(
|
|
||||||
'Checking out %s%s to %s',
|
|
||||||
url,
|
|
||||||
rev_display,
|
|
||||||
display_path(dest),
|
|
||||||
)
|
|
||||||
cmd_args = ['checkout', '-q'] + rev_options.to_args() + [url, dest]
|
|
||||||
self.run_command(cmd_args)
|
|
||||||
|
|
||||||
def switch(self, dest, url, rev_options):
|
|
||||||
cmd_args = ['switch'] + rev_options.to_args() + [url, dest]
|
|
||||||
self.run_command(cmd_args)
|
|
||||||
|
|
||||||
def update(self, dest, url, rev_options):
|
|
||||||
cmd_args = ['update'] + rev_options.to_args() + [dest]
|
|
||||||
self.run_command(cmd_args)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_revision(cls, location):
|
|
||||||
"""
|
|
||||||
Return the maximum revision for all files under a given location
|
|
||||||
"""
|
|
||||||
# Note: taken from setuptools.command.egg_info
|
|
||||||
revision = 0
|
|
||||||
|
|
||||||
for base, dirs, files in os.walk(location):
|
|
||||||
if cls.dirname not in dirs:
|
|
||||||
dirs[:] = []
|
|
||||||
continue # no sense walking uncontrolled subdirs
|
|
||||||
dirs.remove(cls.dirname)
|
|
||||||
entries_fn = os.path.join(base, cls.dirname, 'entries')
|
|
||||||
if not os.path.exists(entries_fn):
|
|
||||||
# FIXME: should we warn?
|
|
||||||
continue
|
|
||||||
|
|
||||||
dirurl, localrev = cls._get_svn_url_rev(base)
|
|
||||||
|
|
||||||
if base == location:
|
|
||||||
base = dirurl + '/' # save the root url
|
|
||||||
elif not dirurl or not dirurl.startswith(base):
|
|
||||||
dirs[:] = []
|
|
||||||
continue # not part of the same svn tree, skip it
|
|
||||||
revision = max(revision, localrev)
|
|
||||||
return revision
|
|
||||||
|
|
||||||
def get_netloc_and_auth(self, netloc, scheme):
|
|
||||||
"""
|
|
||||||
This override allows the auth information to be passed to svn via the
|
|
||||||
--username and --password options instead of via the URL.
|
|
||||||
"""
|
|
||||||
if scheme == 'ssh':
|
|
||||||
# The --username and --password options can't be used for
|
|
||||||
# svn+ssh URLs, so keep the auth information in the URL.
|
|
||||||
return super(Subversion, self).get_netloc_and_auth(
|
|
||||||
netloc, scheme)
|
|
||||||
|
|
||||||
return split_auth_from_netloc(netloc)
|
|
||||||
|
|
||||||
def get_url_rev_and_auth(self, url):
|
|
||||||
# hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
|
|
||||||
url, rev, user_pass = super(Subversion, self).get_url_rev_and_auth(url)
|
|
||||||
if url.startswith('ssh://'):
|
|
||||||
url = 'svn+' + url
|
|
||||||
return url, rev, user_pass
|
|
||||||
|
|
||||||
def make_rev_args(self, username, password):
|
|
||||||
extra_args = []
|
|
||||||
if username:
|
|
||||||
extra_args += ['--username', username]
|
|
||||||
if password:
|
|
||||||
extra_args += ['--password', password]
|
|
||||||
|
|
||||||
return extra_args
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_remote_url(cls, location):
|
|
||||||
# In cases where the source is in a subdirectory, not alongside
|
|
||||||
# setup.py we have to look up in the location until we find a real
|
|
||||||
# setup.py
|
|
||||||
orig_location = location
|
|
||||||
while not os.path.exists(os.path.join(location, 'setup.py')):
|
|
||||||
last_location = location
|
|
||||||
location = os.path.dirname(location)
|
|
||||||
if location == last_location:
|
|
||||||
# We've traversed up to the root of the filesystem without
|
|
||||||
# finding setup.py
|
|
||||||
logger.warning(
|
|
||||||
"Could not find setup.py for directory %s (tried all "
|
|
||||||
"parent directories)",
|
|
||||||
orig_location,
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
return cls._get_svn_url_rev(location)[0]
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _get_svn_url_rev(cls, location):
|
|
||||||
from pip._internal.exceptions import InstallationError
|
|
||||||
|
|
||||||
entries_path = os.path.join(location, cls.dirname, 'entries')
|
|
||||||
if os.path.exists(entries_path):
|
|
||||||
with open(entries_path) as f:
|
|
||||||
data = f.read()
|
|
||||||
else: # subversion >= 1.7 does not have the 'entries' file
|
|
||||||
data = ''
|
|
||||||
|
|
||||||
if (data.startswith('8') or
|
|
||||||
data.startswith('9') or
|
|
||||||
data.startswith('10')):
|
|
||||||
data = list(map(str.splitlines, data.split('\n\x0c\n')))
|
|
||||||
del data[0][0] # get rid of the '8'
|
|
||||||
url = data[0][3]
|
|
||||||
revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
|
|
||||||
elif data.startswith('<?xml'):
|
|
||||||
match = _svn_xml_url_re.search(data)
|
|
||||||
if not match:
|
|
||||||
raise ValueError('Badly formatted data: %r' % data)
|
|
||||||
url = match.group(1) # get repository URL
|
|
||||||
revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
# subversion >= 1.7
|
|
||||||
xml = cls.run_command(
|
|
||||||
['info', '--xml', location],
|
|
||||||
show_stdout=False,
|
|
||||||
)
|
|
||||||
url = _svn_info_xml_url_re.search(xml).group(1)
|
|
||||||
revs = [
|
|
||||||
int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)
|
|
||||||
]
|
|
||||||
except InstallationError:
|
|
||||||
url, revs = None, []
|
|
||||||
|
|
||||||
if revs:
|
|
||||||
rev = max(revs)
|
|
||||||
else:
|
|
||||||
rev = 0
|
|
||||||
|
|
||||||
return url, rev
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_src_requirement(cls, location, project_name):
|
|
||||||
repo = cls.get_remote_url(location)
|
|
||||||
if repo is None:
|
|
||||||
return None
|
|
||||||
repo = 'svn+' + repo
|
|
||||||
rev = cls.get_revision(location)
|
|
||||||
return make_vcs_requirement_url(repo, rev, project_name)
|
|
||||||
|
|
||||||
def is_commit_id_equal(self, dest, name):
|
|
||||||
"""Always assume the versions don't match"""
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
vcs.register(Subversion)
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user