Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/cpython/bool.pxd +37 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/cpython/buffer.pxd +112 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/cpython/cobject.pxd +36 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/cpython/codecs.pxd +121 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/cpython/fileobject.pxd +57 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/cpython/mapping.pxd +63 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/cpython/ref.pxd +50 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/cpython/set.pxd +119 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/cpython/slice.pxd +70 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/cpython/weakref.pxd +42 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/__pycache__/nx_pylab.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/nx_pydot.py +454 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/__init__.py +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/__pycache__/test_latex.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/__pycache__/test_layout.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/__pycache__/test_pydot.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/tests/__pycache__/test_convert_numpy.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/tests/__pycache__/test_convert_pandas.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/__pycache__/misc.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/__pycache__/random_sequence.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/backends.py +975 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/mapped_queue.py +298 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/tests/test_decorators.py +491 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/tests/test_heaps.py +131 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/tests/test_random_sequence.py +38 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/tests/test_unionfind.py +55 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/__pycache__/cache.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/__pycache__/configuration.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/__pycache__/pyproject.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/cmdoptions.py +1075 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/main.py +80 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/main_parser.py +134 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/status_codes.py +6 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/base.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/metadata/base.py +688 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__init__.py +6 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/_compat.py +85 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/_envs.py +189 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/auth.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/cache.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-311.pyc +0 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/network/auth.py +566 -0
- tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/network/cache.py +106 -0
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/cpython/bool.pxd
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
cdef extern from "Python.h":
|
| 3 |
+
|
| 4 |
+
############################################################################
|
| 5 |
+
# 7.2.2 Boolean Objects
|
| 6 |
+
############################################################################
|
| 7 |
+
|
| 8 |
+
ctypedef class __builtin__.bool [object PyBoolObject]:
|
| 9 |
+
pass
|
| 10 |
+
|
| 11 |
+
# Booleans in Python are implemented as a subclass of
|
| 12 |
+
# integers. There are only two booleans, Py_False and Py_True. As
|
| 13 |
+
# such, the normal creation and deletion functions don't apply to
|
| 14 |
+
# booleans. The following macros are available, however.
|
| 15 |
+
|
| 16 |
+
bint PyBool_Check(object o)
|
| 17 |
+
# Return true if o is of type PyBool_Type.
|
| 18 |
+
|
| 19 |
+
#PyObject* Py_False
|
| 20 |
+
# The Python False object. This object has no methods. It needs to
|
| 21 |
+
# be treated just like any other object with respect to reference
|
| 22 |
+
# counts.
|
| 23 |
+
|
| 24 |
+
#PyObject* Py_True
|
| 25 |
+
# The Python True object. This object has no methods. It needs to
|
| 26 |
+
# be treated just like any other object with respect to reference
|
| 27 |
+
# counts.
|
| 28 |
+
|
| 29 |
+
# Py_RETURN_FALSE
|
| 30 |
+
# Return Py_False from a function, properly incrementing its reference count.
|
| 31 |
+
|
| 32 |
+
# Py_RETURN_TRUE
|
| 33 |
+
# Return Py_True from a function, properly incrementing its reference count.
|
| 34 |
+
|
| 35 |
+
object PyBool_FromLong(long v)
|
| 36 |
+
# Return value: New reference.
|
| 37 |
+
# Return a new reference to Py_True or Py_False depending on the truth value of v.
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/cpython/buffer.pxd
ADDED
|
@@ -0,0 +1,112 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Please see the Python header files (object.h/abstract.h) for docs
|
| 2 |
+
|
| 3 |
+
cdef extern from "Python.h":
|
| 4 |
+
|
| 5 |
+
cdef enum:
|
| 6 |
+
PyBUF_MAX_NDIM
|
| 7 |
+
|
| 8 |
+
cdef enum:
|
| 9 |
+
PyBUF_SIMPLE,
|
| 10 |
+
PyBUF_WRITABLE,
|
| 11 |
+
PyBUF_WRITEABLE, # backwards compatibility
|
| 12 |
+
PyBUF_FORMAT,
|
| 13 |
+
PyBUF_ND,
|
| 14 |
+
PyBUF_STRIDES,
|
| 15 |
+
PyBUF_C_CONTIGUOUS,
|
| 16 |
+
PyBUF_F_CONTIGUOUS,
|
| 17 |
+
PyBUF_ANY_CONTIGUOUS,
|
| 18 |
+
PyBUF_INDIRECT,
|
| 19 |
+
PyBUF_CONTIG,
|
| 20 |
+
PyBUF_CONTIG_RO,
|
| 21 |
+
PyBUF_STRIDED,
|
| 22 |
+
PyBUF_STRIDED_RO,
|
| 23 |
+
PyBUF_RECORDS,
|
| 24 |
+
PyBUF_RECORDS_RO,
|
| 25 |
+
PyBUF_FULL,
|
| 26 |
+
PyBUF_FULL_RO,
|
| 27 |
+
PyBUF_READ,
|
| 28 |
+
PyBUF_WRITE,
|
| 29 |
+
PyBUF_SHADOW
|
| 30 |
+
|
| 31 |
+
bint PyObject_CheckBuffer(object obj)
|
| 32 |
+
# Return 1 if obj supports the buffer interface otherwise 0.
|
| 33 |
+
|
| 34 |
+
int PyObject_GetBuffer(object obj, Py_buffer *view, int flags) except -1
|
| 35 |
+
# Export obj into a Py_buffer, view. These arguments must never be
|
| 36 |
+
# NULL. The flags argument is a bit field indicating what kind of
|
| 37 |
+
# buffer the caller is prepared to deal with and therefore what
|
| 38 |
+
# kind of buffer the exporter is allowed to return. The buffer
|
| 39 |
+
# interface allows for complicated memory sharing possibilities,
|
| 40 |
+
# but some caller may not be able to handle all the complexity but
|
| 41 |
+
# may want to see if the exporter will let them take a simpler
|
| 42 |
+
# view to its memory.
|
| 43 |
+
|
| 44 |
+
# Some exporters may not be able to share memory in every possible
|
| 45 |
+
# way and may need to raise errors to signal to some consumers
|
| 46 |
+
# that something is just not possible. These errors should be a
|
| 47 |
+
# BufferError unless there is another error that is actually
|
| 48 |
+
# causing the problem. The exporter can use flags information to
|
| 49 |
+
# simplify how much of the Py_buffer structure is filled in with
|
| 50 |
+
# non-default values and/or raise an error if the object can’t
|
| 51 |
+
# support a simpler view of its memory.
|
| 52 |
+
|
| 53 |
+
# 0 is returned on success and -1 on error.
|
| 54 |
+
|
| 55 |
+
void PyBuffer_Release(Py_buffer *view)
|
| 56 |
+
# Release the buffer view. This should be called when the buffer
|
| 57 |
+
# is no longer being used as it may free memory from it.
|
| 58 |
+
|
| 59 |
+
void* PyBuffer_GetPointer(Py_buffer *view, Py_ssize_t *indices)
|
| 60 |
+
# ??
|
| 61 |
+
|
| 62 |
+
Py_ssize_t PyBuffer_SizeFromFormat(char *) # actually const char
|
| 63 |
+
# Return the implied ~Py_buffer.itemsize from the struct-stype
|
| 64 |
+
# ~Py_buffer.format
|
| 65 |
+
|
| 66 |
+
int PyBuffer_ToContiguous(void *buf, Py_buffer *view, Py_ssize_t len, char fort)
|
| 67 |
+
# ??
|
| 68 |
+
|
| 69 |
+
int PyBuffer_FromContiguous(Py_buffer *view, void *buf, Py_ssize_t len, char fort)
|
| 70 |
+
# ??
|
| 71 |
+
|
| 72 |
+
int PyObject_CopyToObject(object obj, void *buf, Py_ssize_t len, char fortran) except -1
|
| 73 |
+
# Copy len bytes of data pointed to by the contiguous chunk of
|
| 74 |
+
# memory pointed to by buf into the buffer exported by obj. The
|
| 75 |
+
# buffer must of course be writable. Return 0 on success and
|
| 76 |
+
# return -1 and raise an error on failure. If the object does not
|
| 77 |
+
# have a writable buffer, then an error is raised. If fortran is
|
| 78 |
+
# 'F', then if the object is multi-dimensional, then the data will
|
| 79 |
+
# be copied into the array in Fortran-style (first dimension
|
| 80 |
+
# varies the fastest). If fortran is 'C', then the data will be
|
| 81 |
+
# copied into the array in C-style (last dimension varies the
|
| 82 |
+
# fastest). If fortran is 'A', then it does not matter and the
|
| 83 |
+
# copy will be made in whatever way is more efficient.
|
| 84 |
+
|
| 85 |
+
int PyObject_CopyData(object dest, object src) except -1
|
| 86 |
+
# Copy the data from the src buffer to the buffer of destination
|
| 87 |
+
|
| 88 |
+
bint PyBuffer_IsContiguous(Py_buffer *view, char fort)
|
| 89 |
+
# Return 1 if the memory defined by the view is C-style (fortran
|
| 90 |
+
# is 'C') or Fortran-style (fortran is 'F') contiguous or either
|
| 91 |
+
# one (fortran is 'A'). Return 0 otherwise.
|
| 92 |
+
|
| 93 |
+
void PyBuffer_FillContiguousStrides(int ndims,
|
| 94 |
+
Py_ssize_t *shape,
|
| 95 |
+
Py_ssize_t *strides,
|
| 96 |
+
Py_ssize_t itemsize,
|
| 97 |
+
char fort)
|
| 98 |
+
# Fill the strides array with byte-strides of a contiguous
|
| 99 |
+
# (Fortran-style if fort is 'F' or C-style otherwise) array of the
|
| 100 |
+
# given shape with the given number of bytes per element.
|
| 101 |
+
|
| 102 |
+
int PyBuffer_FillInfo(Py_buffer *view, object exporter, void *buf,
|
| 103 |
+
Py_ssize_t len, int readonly, int flags) except -1
|
| 104 |
+
# Fill in a buffer-info structure, view, correctly for an exporter
|
| 105 |
+
# that can only share a contiguous chunk of memory of “unsigned
|
| 106 |
+
# bytes” of the given length. Return 0 on success and -1 (with
|
| 107 |
+
# raising an error) on error.
|
| 108 |
+
|
| 109 |
+
# DEPRECATED HERE: do not cimport from here, cimport from cpython.object instead
|
| 110 |
+
object PyObject_Format(object obj, object format_spec)
|
| 111 |
+
# Takes an arbitrary object and returns the result of calling
|
| 112 |
+
# obj.__format__(format_spec).
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/cpython/cobject.pxd
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
cdef extern from "Python.h":
|
| 3 |
+
|
| 4 |
+
###########################################################################
|
| 5 |
+
# Warning:
|
| 6 |
+
#
|
| 7 |
+
# The CObject API is deprecated as of Python 3.1. Please switch to
|
| 8 |
+
# the new Capsules API.
|
| 9 |
+
###########################################################################
|
| 10 |
+
|
| 11 |
+
int PyCObject_Check(object p)
|
| 12 |
+
# Return true if its argument is a PyCObject.
|
| 13 |
+
|
| 14 |
+
object PyCObject_FromVoidPtr(void* cobj, void (*destr)(void *))
|
| 15 |
+
# Return value: New reference.
|
| 16 |
+
#
|
| 17 |
+
# Create a PyCObject from the void * cobj. The destr function will
|
| 18 |
+
# be called when the object is reclaimed, unless it is NULL.
|
| 19 |
+
|
| 20 |
+
object PyCObject_FromVoidPtrAndDesc(void* cobj, void* desc, void (*destr)(void *, void *))
|
| 21 |
+
# Return value: New reference.
|
| 22 |
+
#
|
| 23 |
+
# Create a PyCObject from the void * cobj. The destr function will
|
| 24 |
+
# be called when the object is reclaimed. The desc argument can be
|
| 25 |
+
# used to pass extra callback data for the destructor function.
|
| 26 |
+
|
| 27 |
+
void* PyCObject_AsVoidPtr(object self) except? NULL
|
| 28 |
+
# Return the object void * that the PyCObject self was created with.
|
| 29 |
+
|
| 30 |
+
void* PyCObject_GetDesc(object self) except? NULL
|
| 31 |
+
# Return the description void * that the PyCObject self was created with.
|
| 32 |
+
|
| 33 |
+
int PyCObject_SetVoidPtr(object self, void* cobj) except 0
|
| 34 |
+
# Set the void pointer inside self to cobj. The PyCObject must not
|
| 35 |
+
# have an associated destructor. Return true on success, false on
|
| 36 |
+
# failure.
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/cpython/codecs.pxd
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
cdef extern from "Python.h":
|
| 2 |
+
|
| 3 |
+
###########################################################################
|
| 4 |
+
# Codec registry and support functions
|
| 5 |
+
###########################################################################
|
| 6 |
+
|
| 7 |
+
int PyCodec_Register(object search_function)
|
| 8 |
+
# Register a new codec search function.
|
| 9 |
+
|
| 10 |
+
# As side effect, this tries to load the encodings package, if not yet
|
| 11 |
+
# done, to make sure that it is always first in the list of search
|
| 12 |
+
# functions.
|
| 13 |
+
|
| 14 |
+
int PyCodec_KnownEncoding(const char *encoding)
|
| 15 |
+
# Return 1 or 0 depending on whether there is a registered codec for the
|
| 16 |
+
# given encoding. This function always succeeds.
|
| 17 |
+
|
| 18 |
+
object PyCodec_Encode(object o, const char *encoding, const char *errors)
|
| 19 |
+
# Return value: New reference.
|
| 20 |
+
# Generic codec based encoding API.
|
| 21 |
+
|
| 22 |
+
# o is passed through the encoder function found for the given encoding
|
| 23 |
+
# using the error handling method defined by errors. errors may be NULL
|
| 24 |
+
# to use the default method defined for the codec. Raises a LookupError
|
| 25 |
+
# if no encoder can be found.
|
| 26 |
+
|
| 27 |
+
object PyCodec_Decode(object o, const char *encoding, const char *errors)
|
| 28 |
+
# Return value: New reference.
|
| 29 |
+
# Generic codec based decoding API.
|
| 30 |
+
|
| 31 |
+
# o is passed through the decoder function found for the given encoding
|
| 32 |
+
# using the error handling method defined by errors. errors may be NULL
|
| 33 |
+
# to use the default method defined for the codec. Raises a LookupError
|
| 34 |
+
# if no encoder can be found.
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
# Codec lookup API
|
| 38 |
+
|
| 39 |
+
# In the following functions, the encoding string is looked up converted
|
| 40 |
+
# to all lower-case characters, which makes encodings looked up through
|
| 41 |
+
# this mechanism effectively case-insensitive. If no codec is found, a
|
| 42 |
+
# KeyError is set and NULL returned.
|
| 43 |
+
|
| 44 |
+
object PyCodec_Encoder(const char *encoding)
|
| 45 |
+
# Return value: New reference.
|
| 46 |
+
# Get an encoder function for the given encoding.
|
| 47 |
+
|
| 48 |
+
object PyCodec_Decoder(const char *encoding)
|
| 49 |
+
# Return value: New reference.
|
| 50 |
+
# Get a decoder function for the given encoding.
|
| 51 |
+
|
| 52 |
+
object PyCodec_IncrementalEncoder(const char *encoding, const char *errors)
|
| 53 |
+
# Return value: New reference.
|
| 54 |
+
# Get an IncrementalEncoder object for the given encoding.
|
| 55 |
+
|
| 56 |
+
object PyCodec_IncrementalDecoder(const char *encoding, const char *errors)
|
| 57 |
+
# Return value: New reference.
|
| 58 |
+
# Get an IncrementalDecoder object for the given encoding.
|
| 59 |
+
|
| 60 |
+
object PyCodec_StreamReader(const char *encoding, object stream, const char *errors)
|
| 61 |
+
# Return value: New reference.
|
| 62 |
+
# Get a StreamReader factory function for the given encoding.
|
| 63 |
+
|
| 64 |
+
object PyCodec_StreamWriter(const char *encoding, object stream, const char *errors)
|
| 65 |
+
# Return value: New reference.
|
| 66 |
+
# Get a StreamWriter factory function for the given encoding.
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
# Registry API for Unicode encoding error handlers
|
| 70 |
+
|
| 71 |
+
int PyCodec_RegisterError(const char *name, object error) except? -1
|
| 72 |
+
# Register the error handling callback function error under the given
|
| 73 |
+
# name. This callback function will be called by a codec when it
|
| 74 |
+
# encounters unencodable characters/undecodable bytes and name is
|
| 75 |
+
# specified as the error parameter in the call to the encode/decode
|
| 76 |
+
# function.
|
| 77 |
+
|
| 78 |
+
# The callback gets a single argument, an instance of
|
| 79 |
+
# UnicodeEncodeError, UnicodeDecodeError or UnicodeTranslateError that
|
| 80 |
+
# holds information about the problematic sequence of characters or bytes
|
| 81 |
+
# and their offset in the original string (see Unicode Exception Objects
|
| 82 |
+
# for functions to extract this information). The callback must either
|
| 83 |
+
# raise the given exception, or return a two-item tuple containing the
|
| 84 |
+
# replacement for the problematic sequence, and an integer giving the
|
| 85 |
+
# offset in the original string at which encoding/decoding should be
|
| 86 |
+
# resumed.
|
| 87 |
+
|
| 88 |
+
# Return 0 on success, -1 on error.
|
| 89 |
+
|
| 90 |
+
object PyCodec_LookupError(const char *name)
|
| 91 |
+
# Return value: New reference.
|
| 92 |
+
# Lookup the error handling callback function registered under name. As a
|
| 93 |
+
# special case NULL can be passed, in which case the error handling
|
| 94 |
+
# callback for "strict" will be returned.
|
| 95 |
+
|
| 96 |
+
object PyCodec_StrictErrors(object exc)
|
| 97 |
+
# Return value: Always NULL.
|
| 98 |
+
# Raise exc as an exception.
|
| 99 |
+
|
| 100 |
+
object PyCodec_IgnoreErrors(object exc)
|
| 101 |
+
# Return value: New reference.
|
| 102 |
+
# Ignore the unicode error, skipping the faulty input.
|
| 103 |
+
|
| 104 |
+
object PyCodec_ReplaceErrors(object exc)
|
| 105 |
+
# Return value: New reference.
|
| 106 |
+
# Replace the unicode encode error with "?" or "U+FFFD".
|
| 107 |
+
|
| 108 |
+
object PyCodec_XMLCharRefReplaceErrors(object exc)
|
| 109 |
+
# Return value: New reference.
|
| 110 |
+
# Replace the unicode encode error with XML character references.
|
| 111 |
+
|
| 112 |
+
object PyCodec_BackslashReplaceErrors(object exc)
|
| 113 |
+
# Return value: New reference.
|
| 114 |
+
# Replace the unicode encode error with backslash escapes ("\x", "\u"
|
| 115 |
+
# and "\U").
|
| 116 |
+
|
| 117 |
+
object PyCodec_NameReplaceErrors(object exc)
|
| 118 |
+
# Return value: New reference.
|
| 119 |
+
# Replace the unicode encode error with "\N{...}" escapes.
|
| 120 |
+
|
| 121 |
+
# New in version 3.5.
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/cpython/fileobject.pxd
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
From https://docs.python.org/3.9/c-api/file.html
|
| 3 |
+
|
| 4 |
+
These APIs are a minimal emulation of the Python 2 C API for built-in file objects,
|
| 5 |
+
which used to rely on the buffered I/O (FILE*) support from the C standard library.
|
| 6 |
+
In Python 3, files and streams use the new io module, which defines several layers
|
| 7 |
+
over the low-level unbuffered I/O of the operating system. The functions described
|
| 8 |
+
below are convenience C wrappers over these new APIs, and meant mostly for internal
|
| 9 |
+
error reporting in the interpreter;
|
| 10 |
+
|
| 11 |
+
third-party code is advised to access the io APIs instead.
|
| 12 |
+
"""
|
| 13 |
+
|
| 14 |
+
cdef extern from "Python.h":
|
| 15 |
+
|
| 16 |
+
###########################################################################
|
| 17 |
+
# File Objects
|
| 18 |
+
###########################################################################
|
| 19 |
+
|
| 20 |
+
object PyFile_FromFd(int fd, const char *name, const char *mode, int buffering,
|
| 21 |
+
const char *encoding, const char *errors, const char *newline, int closefd)
|
| 22 |
+
# Return value: New reference.
|
| 23 |
+
# Create a Python file object from the file descriptor of an already
|
| 24 |
+
# opened file fd. The arguments name, encoding, errors and newline can be
|
| 25 |
+
# NULL to use the defaults; buffering can be -1 to use the default. name
|
| 26 |
+
# is ignored and kept for backward compatibility. Return NULL on failure.
|
| 27 |
+
# For a more comprehensive description of the arguments, please refer to
|
| 28 |
+
# the io.open() function documentation.
|
| 29 |
+
|
| 30 |
+
# Warning: Since Python streams have their own buffering layer, mixing
|
| 31 |
+
# them with OS-level file descriptors can produce various issues (such as
|
| 32 |
+
# unexpected ordering of data).
|
| 33 |
+
|
| 34 |
+
# Changed in version 3.2: Ignore name attribute.
|
| 35 |
+
|
| 36 |
+
object PyFile_GetLine(object p, int n)
|
| 37 |
+
# Return value: New reference.
|
| 38 |
+
# Equivalent to p.readline([n]), this function reads one line from the
|
| 39 |
+
# object p. p may be a file object or any object with a readline()
|
| 40 |
+
# method. If n is 0, exactly one line is read, regardless of the length of
|
| 41 |
+
# the line. If n is greater than 0, no more than n bytes will be read from
|
| 42 |
+
# the file; a partial line can be returned. In both cases, an empty string
|
| 43 |
+
# is returned if the end of the file is reached immediately. If n is less
|
| 44 |
+
# than 0, however, one line is read regardless of length, but EOFError is
|
| 45 |
+
# raised if the end of the file is reached immediately.
|
| 46 |
+
|
| 47 |
+
int PyFile_WriteObject(object obj, object p, int flags) except? -1
|
| 48 |
+
# Write object obj to file object p. The only supported flag for flags
|
| 49 |
+
# is Py_PRINT_RAW; if given, the str() of the object is written instead of
|
| 50 |
+
# the repr(). Return 0 on success or -1 on failure; the appropriate
|
| 51 |
+
# exception will be set.
|
| 52 |
+
|
| 53 |
+
int PyFile_WriteString(const char *s, object p) except? -1
|
| 54 |
+
# Write string s to file object p. Return 0 on success or -1 on failure;
|
| 55 |
+
# the appropriate exception will be set.
|
| 56 |
+
|
| 57 |
+
enum: Py_PRINT_RAW
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/cpython/mapping.pxd
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
cdef extern from "Python.h":
|
| 2 |
+
|
| 3 |
+
############################################################################
|
| 4 |
+
# 6.4 Mapping Protocol
|
| 5 |
+
############################################################################
|
| 6 |
+
|
| 7 |
+
bint PyMapping_Check(object o)
|
| 8 |
+
# Return 1 if the object provides mapping protocol, and 0
|
| 9 |
+
# otherwise. This function always succeeds.
|
| 10 |
+
|
| 11 |
+
Py_ssize_t PyMapping_Length(object o) except -1
|
| 12 |
+
# Returns the number of keys in object o on success, and -1 on
|
| 13 |
+
# failure. For objects that do not provide mapping protocol, this
|
| 14 |
+
# is equivalent to the Python expression "len(o)".
|
| 15 |
+
|
| 16 |
+
int PyMapping_DelItemString(object o, char *key) except -1
|
| 17 |
+
# Remove the mapping for object key from the object o. Return -1
|
| 18 |
+
# on failure. This is equivalent to the Python statement "del
|
| 19 |
+
# o[key]".
|
| 20 |
+
|
| 21 |
+
int PyMapping_DelItem(object o, object key) except -1
|
| 22 |
+
# Remove the mapping for object key from the object o. Return -1
|
| 23 |
+
# on failure. This is equivalent to the Python statement "del
|
| 24 |
+
# o[key]".
|
| 25 |
+
|
| 26 |
+
bint PyMapping_HasKeyString(object o, char *key)
|
| 27 |
+
# On success, return 1 if the mapping object has the key key and 0
|
| 28 |
+
# otherwise. This is equivalent to the Python expression
|
| 29 |
+
# "o.has_key(key)". This function always succeeds.
|
| 30 |
+
|
| 31 |
+
bint PyMapping_HasKey(object o, object key)
|
| 32 |
+
# Return 1 if the mapping object has the key key and 0
|
| 33 |
+
# otherwise. This is equivalent to the Python expression
|
| 34 |
+
# "o.has_key(key)". This function always succeeds.
|
| 35 |
+
|
| 36 |
+
object PyMapping_Keys(object o)
|
| 37 |
+
# Return value: New reference.
|
| 38 |
+
# On success, return a list of the keys in object o. On failure,
|
| 39 |
+
# return NULL. This is equivalent to the Python expression
|
| 40 |
+
# "o.keys()".
|
| 41 |
+
|
| 42 |
+
object PyMapping_Values(object o)
|
| 43 |
+
# Return value: New reference.
|
| 44 |
+
# On success, return a list of the values in object o. On failure,
|
| 45 |
+
# return NULL. This is equivalent to the Python expression
|
| 46 |
+
# "o.values()".
|
| 47 |
+
|
| 48 |
+
object PyMapping_Items(object o)
|
| 49 |
+
# Return value: New reference.
|
| 50 |
+
# On success, return a list of the items in object o, where each
|
| 51 |
+
# item is a tuple containing a key-value pair. On failure, return
|
| 52 |
+
# NULL. This is equivalent to the Python expression "o.items()".
|
| 53 |
+
|
| 54 |
+
object PyMapping_GetItemString(object o, char *key)
|
| 55 |
+
# Return value: New reference.
|
| 56 |
+
# Return element of o corresponding to the object key or NULL on
|
| 57 |
+
# failure. This is the equivalent of the Python expression
|
| 58 |
+
# "o[key]".
|
| 59 |
+
|
| 60 |
+
int PyMapping_SetItemString(object o, char *key, object v) except -1
|
| 61 |
+
# Map the object key to the value v in object o. Returns -1 on
|
| 62 |
+
# failure. This is the equivalent of the Python statement "o[key]
|
| 63 |
+
# = v".
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/cpython/ref.pxd
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .object cimport PyObject, PyTypeObject, Py_TYPE # legacy imports for re-export
|
| 2 |
+
|
| 3 |
+
cdef extern from "Python.h":
|
| 4 |
+
#####################################################################
|
| 5 |
+
# 3. Reference Counts
|
| 6 |
+
#####################################################################
|
| 7 |
+
# The macros in this section are used for managing reference counts of Python objects.
|
| 8 |
+
void Py_INCREF(object o)
|
| 9 |
+
# Increment the reference count for object o. The object must not
|
| 10 |
+
# be NULL; if you aren't sure that it isn't NULL, use
|
| 11 |
+
# Py_XINCREF().
|
| 12 |
+
|
| 13 |
+
void Py_XINCREF(PyObject* o)
|
| 14 |
+
# Increment the reference count for object o. The object may be NULL, in which case the macro has no effect.
|
| 15 |
+
|
| 16 |
+
void Py_DECREF(object o)
|
| 17 |
+
# Decrement the reference count for object o. The object must not
|
| 18 |
+
# be NULL; if you aren't sure that it isn't NULL, use
|
| 19 |
+
# Py_XDECREF(). If the reference count reaches zero, the object's
|
| 20 |
+
# type's deallocation function (which must not be NULL) is
|
| 21 |
+
# invoked.
|
| 22 |
+
|
| 23 |
+
# Warning: The deallocation function can cause arbitrary Python
|
| 24 |
+
# code to be invoked (e.g. when a class instance with a __del__()
|
| 25 |
+
# method is deallocated). While exceptions in such code are not
|
| 26 |
+
# propagated, the executed code has free access to all Python
|
| 27 |
+
# global variables. This means that any object that is reachable
|
| 28 |
+
# from a global variable should be in a consistent state before
|
| 29 |
+
# Py_DECREF() is invoked. For example, code to delete an object
|
| 30 |
+
# from a list should copy a reference to the deleted object in a
|
| 31 |
+
# temporary variable, update the list data structure, and then
|
| 32 |
+
# call Py_DECREF() for the temporary variable.
|
| 33 |
+
|
| 34 |
+
void Py_XDECREF(PyObject* o)
|
| 35 |
+
# Decrement the reference count for object o. The object may be
|
| 36 |
+
# NULL, in which case the macro has no effect; otherwise the
|
| 37 |
+
# effect is the same as for Py_DECREF(), and the same warning
|
| 38 |
+
# applies.
|
| 39 |
+
|
| 40 |
+
void Py_CLEAR(PyObject* o)
|
| 41 |
+
# Decrement the reference count for object o. The object may be
|
| 42 |
+
# NULL, in which case the macro has no effect; otherwise the
|
| 43 |
+
# effect is the same as for Py_DECREF(), except that the argument
|
| 44 |
+
# is also set to NULL. The warning for Py_DECREF() does not apply
|
| 45 |
+
# with respect to the object passed because the macro carefully
|
| 46 |
+
# uses a temporary variable and sets the argument to NULL before
|
| 47 |
+
# decrementing its reference count.
|
| 48 |
+
# It is a good idea to use this macro whenever decrementing the
|
| 49 |
+
# value of a variable that might be traversed during garbage
|
| 50 |
+
# collection.
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/cpython/set.pxd
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
cdef extern from "Python.h":
|
| 2 |
+
|
| 3 |
+
############################################################################
|
| 4 |
+
# 7.5.14 Set Objects
|
| 5 |
+
############################################################################
|
| 6 |
+
|
| 7 |
+
# This section details the public API for set and frozenset
|
| 8 |
+
# objects. Any functionality not listed below is best accessed
|
| 9 |
+
# using the either the abstract object protocol (including
|
| 10 |
+
# PyObject_CallMethod(), PyObject_RichCompareBool(),
|
| 11 |
+
# PyObject_Hash(), PyObject_Repr(), PyObject_IsTrue(),
|
| 12 |
+
# PyObject_Print(), and PyObject_GetIter()) or the abstract number
|
| 13 |
+
# protocol (including PyNumber_Add(), PyNumber_Subtract(),
|
| 14 |
+
# PyNumber_Or(), PyNumber_Xor(), PyNumber_InPlaceAdd(),
|
| 15 |
+
# PyNumber_InPlaceSubtract(), PyNumber_InPlaceOr(), and
|
| 16 |
+
# PyNumber_InPlaceXor()).
|
| 17 |
+
|
| 18 |
+
# PySetObject
|
| 19 |
+
# This subtype of PyObject is used to hold the internal data for
|
| 20 |
+
# both set and frozenset objects. It is like a PyDictObject in
|
| 21 |
+
# that it is a fixed size for small sets (much like tuple storage)
|
| 22 |
+
# and will point to a separate, variable sized block of memory for
|
| 23 |
+
# medium and large sized sets (much like list storage). None of
|
| 24 |
+
# the fields of this structure should be considered public and are
|
| 25 |
+
# subject to change. All access should be done through the
|
| 26 |
+
# documented API rather than by manipulating the values in the
|
| 27 |
+
# structure.
|
| 28 |
+
|
| 29 |
+
# PyTypeObject PySet_Type
|
| 30 |
+
# This is an instance of PyTypeObject representing the Python set type.
|
| 31 |
+
|
| 32 |
+
# PyTypeObject PyFrozenSet_Type
|
| 33 |
+
# This is an instance of PyTypeObject representing the Python frozenset type.
|
| 34 |
+
|
| 35 |
+
# The following type check macros work on pointers to any Python
|
| 36 |
+
# object. Likewise, the constructor functions work with any
|
| 37 |
+
# iterable Python object.
|
| 38 |
+
|
| 39 |
+
bint PyAnySet_Check(object p)
|
| 40 |
+
# Return true if p is a set object, a frozenset object, or an
|
| 41 |
+
# instance of a subtype.
|
| 42 |
+
|
| 43 |
+
bint PyAnySet_CheckExact(object p)
|
| 44 |
+
# Return true if p is a set object or a frozenset object but not
|
| 45 |
+
# an instance of a subtype.
|
| 46 |
+
|
| 47 |
+
bint PyFrozenSet_Check(object p)
|
| 48 |
+
# Return true if p is a frozenset object or an instance of a subtype.
|
| 49 |
+
|
| 50 |
+
bint PyFrozenSet_CheckExact(object p)
|
| 51 |
+
# Return true if p is a frozenset object but not an instance of a subtype.
|
| 52 |
+
|
| 53 |
+
bint PySet_Check(object p)
|
| 54 |
+
# Return true if p is a set object or an instance of a subtype.
|
| 55 |
+
|
| 56 |
+
object PySet_New(object iterable)
|
| 57 |
+
# Return value: New reference.
|
| 58 |
+
# Return a new set containing objects returned by the
|
| 59 |
+
# iterable. The iterable may be NULL to create a new empty
|
| 60 |
+
# set. Return the new set on success or NULL on failure. Raise
|
| 61 |
+
# TypeError if iterable is not actually iterable. The constructor
|
| 62 |
+
# is also useful for copying a set (c=set(s)).
|
| 63 |
+
|
| 64 |
+
object PyFrozenSet_New(object iterable)
|
| 65 |
+
# Return value: New reference.
|
| 66 |
+
# Return a new frozenset containing objects returned by the
|
| 67 |
+
# iterable. The iterable may be NULL to create a new empty
|
| 68 |
+
# frozenset. Return the new set on success or NULL on
|
| 69 |
+
# failure. Raise TypeError if iterable is not actually iterable.
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
# The following functions and macros are available for instances
|
| 73 |
+
# of set or frozenset or instances of their subtypes.
|
| 74 |
+
|
| 75 |
+
Py_ssize_t PySet_Size(object anyset) except -1
|
| 76 |
+
# Return the length of a set or frozenset object. Equivalent to
|
| 77 |
+
# "len(anyset)". Raises a PyExc_SystemError if anyset is not a
|
| 78 |
+
# set, frozenset, or an instance of a subtype.
|
| 79 |
+
|
| 80 |
+
Py_ssize_t PySet_GET_SIZE(object anyset)
|
| 81 |
+
# Macro form of PySet_Size() without error checking.
|
| 82 |
+
|
| 83 |
+
bint PySet_Contains(object anyset, object key) except -1
|
| 84 |
+
# Return 1 if found, 0 if not found, and -1 if an error is
|
| 85 |
+
# encountered. Unlike the Python __contains__() method, this
|
| 86 |
+
# function does not automatically convert unhashable sets into
|
| 87 |
+
# temporary frozensets. Raise a TypeError if the key is
|
| 88 |
+
# unhashable. Raise PyExc_SystemError if anyset is not a set,
|
| 89 |
+
# frozenset, or an instance of a subtype.
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
# The following functions are available for instances of set or
|
| 93 |
+
# its subtypes but not for instances of frozenset or its subtypes.
|
| 94 |
+
|
| 95 |
+
int PySet_Add(object set, object key) except -1
|
| 96 |
+
# Add key to a set instance. Does not apply to frozenset
|
| 97 |
+
# instances. Return 0 on success or -1 on failure. Raise a
|
| 98 |
+
# TypeError if the key is unhashable. Raise a MemoryError if there
|
| 99 |
+
# is no room to grow. Raise a SystemError if set is an not an
|
| 100 |
+
# instance of set or its subtype.
|
| 101 |
+
|
| 102 |
+
bint PySet_Discard(object set, object key) except -1
|
| 103 |
+
# Return 1 if found and removed, 0 if not found (no action taken),
|
| 104 |
+
# and -1 if an error is encountered. Does not raise KeyError for
|
| 105 |
+
# missing keys. Raise a TypeError if the key is unhashable. Unlike
|
| 106 |
+
# the Python discard() method, this function does not
|
| 107 |
+
# automatically convert unhashable sets into temporary
|
| 108 |
+
# frozensets. Raise PyExc_SystemError if set is an not an instance
|
| 109 |
+
# of set or its subtype.
|
| 110 |
+
|
| 111 |
+
object PySet_Pop(object set)
|
| 112 |
+
# Return value: New reference.
|
| 113 |
+
# Return a new reference to an arbitrary object in the set, and
|
| 114 |
+
# removes the object from the set. Return NULL on failure. Raise
|
| 115 |
+
# KeyError if the set is empty. Raise a SystemError if set is an
|
| 116 |
+
# not an instance of set or its subtype.
|
| 117 |
+
|
| 118 |
+
int PySet_Clear(object set)
|
| 119 |
+
# Empty an existing set of all elements.
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/cpython/slice.pxd
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
cdef extern from "Python.h":
|
| 2 |
+
|
| 3 |
+
# PyTypeObject PySlice_Type
|
| 4 |
+
#
|
| 5 |
+
# The type object for slice objects. This is the same as slice and types.SliceType
|
| 6 |
+
|
| 7 |
+
bint PySlice_Check(object ob)
|
| 8 |
+
#
|
| 9 |
+
# Return true if ob is a slice object; ob must not be NULL.
|
| 10 |
+
|
| 11 |
+
slice PySlice_New(object start, object stop, object step)
|
| 12 |
+
#
|
| 13 |
+
# Return a new slice object with the given values. The start, stop, and step
|
| 14 |
+
# parameters are used as the values of the slice object attributes of the same
|
| 15 |
+
# names. Any of the values may be NULL, in which case the None will be used
|
| 16 |
+
# for the corresponding attribute. Return NULL if the new object could not be
|
| 17 |
+
# allocated.
|
| 18 |
+
|
| 19 |
+
int PySlice_GetIndices(object slice, Py_ssize_t length,
|
| 20 |
+
Py_ssize_t *start, Py_ssize_t *stop, Py_ssize_t *step) except? -1
|
| 21 |
+
#
|
| 22 |
+
# Retrieve the start, stop and step indices from the slice object slice,
|
| 23 |
+
# assuming a sequence of length length. Treats indices greater than length
|
| 24 |
+
# as errors.
|
| 25 |
+
#
|
| 26 |
+
# Returns 0 on success and -1 on error with no exception set (unless one
|
| 27 |
+
# of the indices was not None and failed to be converted to an integer,
|
| 28 |
+
# in which case -1 is returned with an exception set).
|
| 29 |
+
#
|
| 30 |
+
# You probably do not want to use this function.
|
| 31 |
+
#
|
| 32 |
+
# Changed in version 3.2: The parameter type for the slice parameter was
|
| 33 |
+
# PySliceObject* before.
|
| 34 |
+
|
| 35 |
+
int PySlice_GetIndicesEx(object slice, Py_ssize_t length,
|
| 36 |
+
Py_ssize_t *start, Py_ssize_t *stop, Py_ssize_t *step,
|
| 37 |
+
Py_ssize_t *slicelength) except -1
|
| 38 |
+
#
|
| 39 |
+
# Usable replacement for PySlice_GetIndices(). Retrieve the start, stop, and step
|
| 40 |
+
# indices from the slice object slice assuming a sequence of length length, and
|
| 41 |
+
# store the length of the slice in slicelength. Out of bounds indices are clipped
|
| 42 |
+
# in a manner consistent with the handling of normal slices.
|
| 43 |
+
#
|
| 44 |
+
# Returns 0 on success and -1 on error with exception set.
|
| 45 |
+
#
|
| 46 |
+
# Changed in version 3.2: The parameter type for the slice parameter was
|
| 47 |
+
# PySliceObject* before.
|
| 48 |
+
|
| 49 |
+
int PySlice_Unpack(object slice, Py_ssize_t *start, Py_ssize_t *stop,
|
| 50 |
+
Py_ssize_t *step) except -1
|
| 51 |
+
# Extract the start, stop and step data members from a slice object as C
|
| 52 |
+
# integers. Silently reduce values larger than PY_SSIZE_T_MAX to
|
| 53 |
+
# PY_SSIZE_T_MAX, silently boost the start and stop values less than
|
| 54 |
+
# PY_SSIZE_T_MIN to PY_SSIZE_T_MIN, and silently boost the step values
|
| 55 |
+
# less than -PY_SSIZE_T_MAX to -PY_SSIZE_T_MAX.
|
| 56 |
+
|
| 57 |
+
# Return -1 on error, 0 on success.
|
| 58 |
+
|
| 59 |
+
# New in version 3.6.1.
|
| 60 |
+
|
| 61 |
+
Py_ssize_t PySlice_AdjustIndices(Py_ssize_t length, Py_ssize_t *start,
|
| 62 |
+
Py_ssize_t *stop, Py_ssize_t step)
|
| 63 |
+
# Adjust start/end slice indices assuming a sequence of the specified
|
| 64 |
+
# length. Out of bounds indices are clipped in a manner consistent with
|
| 65 |
+
# the handling of normal slices.
|
| 66 |
+
|
| 67 |
+
# Return the length of the slice. Always successful. Doesn’t call Python
|
| 68 |
+
# code.
|
| 69 |
+
|
| 70 |
+
# New in version 3.6.1.
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/Cython/Includes/cpython/weakref.pxd
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .object cimport PyObject
|
| 2 |
+
|
| 3 |
+
cdef extern from "Python.h":
|
| 4 |
+
|
| 5 |
+
bint PyWeakref_Check(object ob)
|
| 6 |
+
# Return true if ob is either a reference or proxy object.
|
| 7 |
+
|
| 8 |
+
bint PyWeakref_CheckRef(object ob)
|
| 9 |
+
# Return true if ob is a reference object.
|
| 10 |
+
|
| 11 |
+
bint PyWeakref_CheckProxy(ob)
|
| 12 |
+
# Return true if *ob* is a proxy object.
|
| 13 |
+
|
| 14 |
+
object PyWeakref_NewRef(object ob, object callback)
|
| 15 |
+
# Return a weak reference object for the object ob. This will
|
| 16 |
+
# always return a new reference, but is not guaranteed to create a
|
| 17 |
+
# new object; an existing reference object may be returned. The
|
| 18 |
+
# second parameter, callback, can be a callable object that
|
| 19 |
+
# receives notification when ob is garbage collected; it should
|
| 20 |
+
# accept a single parameter, which will be the weak reference
|
| 21 |
+
# object itself. callback may also be None or NULL. If ob is not
|
| 22 |
+
# a weakly-referencable object, or if callback is not callable,
|
| 23 |
+
# None, or NULL, this will return NULL and raise TypeError.
|
| 24 |
+
|
| 25 |
+
object PyWeakref_NewProxy(object ob, object callback)
|
| 26 |
+
# Return a weak reference proxy object for the object ob. This
|
| 27 |
+
# will always return a new reference, but is not guaranteed to
|
| 28 |
+
# create a new object; an existing proxy object may be returned.
|
| 29 |
+
# The second parameter, callback, can be a callable object that
|
| 30 |
+
# receives notification when ob is garbage collected; it should
|
| 31 |
+
# accept a single parameter, which will be the weak reference
|
| 32 |
+
# object itself. callback may also be None or NULL. If ob is not
|
| 33 |
+
# a weakly-referencable object, or if callback is not callable,
|
| 34 |
+
# None, or NULL, this will return NULL and raise TypeError.
|
| 35 |
+
|
| 36 |
+
PyObject* PyWeakref_GetObject(object ref) except NULL
|
| 37 |
+
# Return the referenced object from a weak reference, ref. If the
|
| 38 |
+
# referent is no longer live, returns None.
|
| 39 |
+
|
| 40 |
+
PyObject* PyWeakref_GET_OBJECT(object ref)
|
| 41 |
+
# Similar to PyWeakref_GetObject, but implemented as a macro that
|
| 42 |
+
# does no error checking.
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/__pycache__/nx_pylab.cpython-311.pyc
ADDED
|
Binary file (57.1 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/nx_pydot.py
ADDED
|
@@ -0,0 +1,454 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
*****
|
| 3 |
+
Pydot
|
| 4 |
+
*****
|
| 5 |
+
|
| 6 |
+
Import and export NetworkX graphs in Graphviz dot format using pydot.
|
| 7 |
+
|
| 8 |
+
Either this module or nx_agraph can be used to interface with graphviz.
|
| 9 |
+
|
| 10 |
+
Examples
|
| 11 |
+
--------
|
| 12 |
+
>>> G = nx.complete_graph(5)
|
| 13 |
+
>>> PG = nx.nx_pydot.to_pydot(G)
|
| 14 |
+
>>> H = nx.nx_pydot.from_pydot(PG)
|
| 15 |
+
|
| 16 |
+
See Also
|
| 17 |
+
--------
|
| 18 |
+
- pydot: https://github.com/erocarrera/pydot
|
| 19 |
+
- Graphviz: https://www.graphviz.org
|
| 20 |
+
- DOT Language: http://www.graphviz.org/doc/info/lang.html
|
| 21 |
+
"""
|
| 22 |
+
import warnings
|
| 23 |
+
from locale import getpreferredencoding
|
| 24 |
+
|
| 25 |
+
import networkx as nx
|
| 26 |
+
from networkx.utils import open_file
|
| 27 |
+
|
| 28 |
+
__all__ = [
|
| 29 |
+
"write_dot",
|
| 30 |
+
"read_dot",
|
| 31 |
+
"graphviz_layout",
|
| 32 |
+
"pydot_layout",
|
| 33 |
+
"to_pydot",
|
| 34 |
+
"from_pydot",
|
| 35 |
+
]
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
@open_file(1, mode="w")
|
| 39 |
+
def write_dot(G, path):
|
| 40 |
+
"""Write NetworkX graph G to Graphviz dot format on path.
|
| 41 |
+
|
| 42 |
+
Path can be a string or a file handle.
|
| 43 |
+
"""
|
| 44 |
+
msg = (
|
| 45 |
+
"nx.nx_pydot.write_dot depends on the pydot package, which has "
|
| 46 |
+
"known issues and is not actively maintained. Consider using "
|
| 47 |
+
"nx.nx_agraph.write_dot instead.\n\n"
|
| 48 |
+
"See https://github.com/networkx/networkx/issues/5723"
|
| 49 |
+
)
|
| 50 |
+
warnings.warn(msg, DeprecationWarning, stacklevel=2)
|
| 51 |
+
P = to_pydot(G)
|
| 52 |
+
path.write(P.to_string())
|
| 53 |
+
return
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
@open_file(0, mode="r")
|
| 57 |
+
@nx._dispatch(name="pydot_read_dot", graphs=None)
|
| 58 |
+
def read_dot(path):
|
| 59 |
+
"""Returns a NetworkX :class:`MultiGraph` or :class:`MultiDiGraph` from the
|
| 60 |
+
dot file with the passed path.
|
| 61 |
+
|
| 62 |
+
If this file contains multiple graphs, only the first such graph is
|
| 63 |
+
returned. All graphs _except_ the first are silently ignored.
|
| 64 |
+
|
| 65 |
+
Parameters
|
| 66 |
+
----------
|
| 67 |
+
path : str or file
|
| 68 |
+
Filename or file handle.
|
| 69 |
+
|
| 70 |
+
Returns
|
| 71 |
+
-------
|
| 72 |
+
G : MultiGraph or MultiDiGraph
|
| 73 |
+
A :class:`MultiGraph` or :class:`MultiDiGraph`.
|
| 74 |
+
|
| 75 |
+
Notes
|
| 76 |
+
-----
|
| 77 |
+
Use `G = nx.Graph(nx.nx_pydot.read_dot(path))` to return a :class:`Graph` instead of a
|
| 78 |
+
:class:`MultiGraph`.
|
| 79 |
+
"""
|
| 80 |
+
import pydot
|
| 81 |
+
|
| 82 |
+
msg = (
|
| 83 |
+
"nx.nx_pydot.read_dot depends on the pydot package, which has "
|
| 84 |
+
"known issues and is not actively maintained. Consider using "
|
| 85 |
+
"nx.nx_agraph.read_dot instead.\n\n"
|
| 86 |
+
"See https://github.com/networkx/networkx/issues/5723"
|
| 87 |
+
)
|
| 88 |
+
warnings.warn(msg, DeprecationWarning, stacklevel=2)
|
| 89 |
+
|
| 90 |
+
data = path.read()
|
| 91 |
+
|
| 92 |
+
# List of one or more "pydot.Dot" instances deserialized from this file.
|
| 93 |
+
P_list = pydot.graph_from_dot_data(data)
|
| 94 |
+
|
| 95 |
+
# Convert only the first such instance into a NetworkX graph.
|
| 96 |
+
return from_pydot(P_list[0])
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
@nx._dispatch(graphs=None)
|
| 100 |
+
def from_pydot(P):
|
| 101 |
+
"""Returns a NetworkX graph from a Pydot graph.
|
| 102 |
+
|
| 103 |
+
Parameters
|
| 104 |
+
----------
|
| 105 |
+
P : Pydot graph
|
| 106 |
+
A graph created with Pydot
|
| 107 |
+
|
| 108 |
+
Returns
|
| 109 |
+
-------
|
| 110 |
+
G : NetworkX multigraph
|
| 111 |
+
A MultiGraph or MultiDiGraph.
|
| 112 |
+
|
| 113 |
+
Examples
|
| 114 |
+
--------
|
| 115 |
+
>>> K5 = nx.complete_graph(5)
|
| 116 |
+
>>> A = nx.nx_pydot.to_pydot(K5)
|
| 117 |
+
>>> G = nx.nx_pydot.from_pydot(A) # return MultiGraph
|
| 118 |
+
|
| 119 |
+
# make a Graph instead of MultiGraph
|
| 120 |
+
>>> G = nx.Graph(nx.nx_pydot.from_pydot(A))
|
| 121 |
+
|
| 122 |
+
"""
|
| 123 |
+
msg = (
|
| 124 |
+
"nx.nx_pydot.from_pydot depends on the pydot package, which has "
|
| 125 |
+
"known issues and is not actively maintained.\n\n"
|
| 126 |
+
"See https://github.com/networkx/networkx/issues/5723"
|
| 127 |
+
)
|
| 128 |
+
warnings.warn(msg, DeprecationWarning, stacklevel=2)
|
| 129 |
+
|
| 130 |
+
if P.get_strict(None): # pydot bug: get_strict() shouldn't take argument
|
| 131 |
+
multiedges = False
|
| 132 |
+
else:
|
| 133 |
+
multiedges = True
|
| 134 |
+
|
| 135 |
+
if P.get_type() == "graph": # undirected
|
| 136 |
+
if multiedges:
|
| 137 |
+
N = nx.MultiGraph()
|
| 138 |
+
else:
|
| 139 |
+
N = nx.Graph()
|
| 140 |
+
else:
|
| 141 |
+
if multiedges:
|
| 142 |
+
N = nx.MultiDiGraph()
|
| 143 |
+
else:
|
| 144 |
+
N = nx.DiGraph()
|
| 145 |
+
|
| 146 |
+
# assign defaults
|
| 147 |
+
name = P.get_name().strip('"')
|
| 148 |
+
if name != "":
|
| 149 |
+
N.name = name
|
| 150 |
+
|
| 151 |
+
# add nodes, attributes to N.node_attr
|
| 152 |
+
for p in P.get_node_list():
|
| 153 |
+
n = p.get_name().strip('"')
|
| 154 |
+
if n in ("node", "graph", "edge"):
|
| 155 |
+
continue
|
| 156 |
+
N.add_node(n, **p.get_attributes())
|
| 157 |
+
|
| 158 |
+
# add edges
|
| 159 |
+
for e in P.get_edge_list():
|
| 160 |
+
u = e.get_source()
|
| 161 |
+
v = e.get_destination()
|
| 162 |
+
attr = e.get_attributes()
|
| 163 |
+
s = []
|
| 164 |
+
d = []
|
| 165 |
+
|
| 166 |
+
if isinstance(u, str):
|
| 167 |
+
s.append(u.strip('"'))
|
| 168 |
+
else:
|
| 169 |
+
for unodes in u["nodes"]:
|
| 170 |
+
s.append(unodes.strip('"'))
|
| 171 |
+
|
| 172 |
+
if isinstance(v, str):
|
| 173 |
+
d.append(v.strip('"'))
|
| 174 |
+
else:
|
| 175 |
+
for vnodes in v["nodes"]:
|
| 176 |
+
d.append(vnodes.strip('"'))
|
| 177 |
+
|
| 178 |
+
for source_node in s:
|
| 179 |
+
for destination_node in d:
|
| 180 |
+
N.add_edge(source_node, destination_node, **attr)
|
| 181 |
+
|
| 182 |
+
# add default attributes for graph, nodes, edges
|
| 183 |
+
pattr = P.get_attributes()
|
| 184 |
+
if pattr:
|
| 185 |
+
N.graph["graph"] = pattr
|
| 186 |
+
try:
|
| 187 |
+
N.graph["node"] = P.get_node_defaults()[0]
|
| 188 |
+
except (IndexError, TypeError):
|
| 189 |
+
pass # N.graph['node']={}
|
| 190 |
+
try:
|
| 191 |
+
N.graph["edge"] = P.get_edge_defaults()[0]
|
| 192 |
+
except (IndexError, TypeError):
|
| 193 |
+
pass # N.graph['edge']={}
|
| 194 |
+
return N
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
def _check_colon_quotes(s):
|
| 198 |
+
# A quick helper function to check if a string has a colon in it
|
| 199 |
+
# and if it is quoted properly with double quotes.
|
| 200 |
+
# refer https://github.com/pydot/pydot/issues/258
|
| 201 |
+
return ":" in s and (s[0] != '"' or s[-1] != '"')
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
def to_pydot(N):
|
| 205 |
+
"""Returns a pydot graph from a NetworkX graph N.
|
| 206 |
+
|
| 207 |
+
Parameters
|
| 208 |
+
----------
|
| 209 |
+
N : NetworkX graph
|
| 210 |
+
A graph created with NetworkX
|
| 211 |
+
|
| 212 |
+
Examples
|
| 213 |
+
--------
|
| 214 |
+
>>> K5 = nx.complete_graph(5)
|
| 215 |
+
>>> P = nx.nx_pydot.to_pydot(K5)
|
| 216 |
+
|
| 217 |
+
Notes
|
| 218 |
+
-----
|
| 219 |
+
|
| 220 |
+
"""
|
| 221 |
+
import pydot
|
| 222 |
+
|
| 223 |
+
msg = (
|
| 224 |
+
"nx.nx_pydot.to_pydot depends on the pydot package, which has "
|
| 225 |
+
"known issues and is not actively maintained.\n\n"
|
| 226 |
+
"See https://github.com/networkx/networkx/issues/5723"
|
| 227 |
+
)
|
| 228 |
+
warnings.warn(msg, DeprecationWarning, stacklevel=2)
|
| 229 |
+
|
| 230 |
+
# set Graphviz graph type
|
| 231 |
+
if N.is_directed():
|
| 232 |
+
graph_type = "digraph"
|
| 233 |
+
else:
|
| 234 |
+
graph_type = "graph"
|
| 235 |
+
strict = nx.number_of_selfloops(N) == 0 and not N.is_multigraph()
|
| 236 |
+
|
| 237 |
+
name = N.name
|
| 238 |
+
graph_defaults = N.graph.get("graph", {})
|
| 239 |
+
if name == "":
|
| 240 |
+
P = pydot.Dot("", graph_type=graph_type, strict=strict, **graph_defaults)
|
| 241 |
+
else:
|
| 242 |
+
P = pydot.Dot(
|
| 243 |
+
f'"{name}"', graph_type=graph_type, strict=strict, **graph_defaults
|
| 244 |
+
)
|
| 245 |
+
try:
|
| 246 |
+
P.set_node_defaults(**N.graph["node"])
|
| 247 |
+
except KeyError:
|
| 248 |
+
pass
|
| 249 |
+
try:
|
| 250 |
+
P.set_edge_defaults(**N.graph["edge"])
|
| 251 |
+
except KeyError:
|
| 252 |
+
pass
|
| 253 |
+
|
| 254 |
+
for n, nodedata in N.nodes(data=True):
|
| 255 |
+
str_nodedata = {str(k): str(v) for k, v in nodedata.items()}
|
| 256 |
+
# Explicitly catch nodes with ":" in node names or nodedata.
|
| 257 |
+
n = str(n)
|
| 258 |
+
raise_error = _check_colon_quotes(n) or (
|
| 259 |
+
any(
|
| 260 |
+
(_check_colon_quotes(k) or _check_colon_quotes(v))
|
| 261 |
+
for k, v in str_nodedata.items()
|
| 262 |
+
)
|
| 263 |
+
)
|
| 264 |
+
if raise_error:
|
| 265 |
+
raise ValueError(
|
| 266 |
+
f'Node names and attributes should not contain ":" unless they are quoted with "".\
|
| 267 |
+
For example the string \'attribute:data1\' should be written as \'"attribute:data1"\'.\
|
| 268 |
+
Please refer https://github.com/pydot/pydot/issues/258'
|
| 269 |
+
)
|
| 270 |
+
p = pydot.Node(n, **str_nodedata)
|
| 271 |
+
P.add_node(p)
|
| 272 |
+
|
| 273 |
+
if N.is_multigraph():
|
| 274 |
+
for u, v, key, edgedata in N.edges(data=True, keys=True):
|
| 275 |
+
str_edgedata = {str(k): str(v) for k, v in edgedata.items() if k != "key"}
|
| 276 |
+
u, v = str(u), str(v)
|
| 277 |
+
raise_error = (
|
| 278 |
+
_check_colon_quotes(u)
|
| 279 |
+
or _check_colon_quotes(v)
|
| 280 |
+
or (
|
| 281 |
+
any(
|
| 282 |
+
(_check_colon_quotes(k) or _check_colon_quotes(val))
|
| 283 |
+
for k, val in str_edgedata.items()
|
| 284 |
+
)
|
| 285 |
+
)
|
| 286 |
+
)
|
| 287 |
+
if raise_error:
|
| 288 |
+
raise ValueError(
|
| 289 |
+
f'Node names and attributes should not contain ":" unless they are quoted with "".\
|
| 290 |
+
For example the string \'attribute:data1\' should be written as \'"attribute:data1"\'.\
|
| 291 |
+
Please refer https://github.com/pydot/pydot/issues/258'
|
| 292 |
+
)
|
| 293 |
+
edge = pydot.Edge(u, v, key=str(key), **str_edgedata)
|
| 294 |
+
P.add_edge(edge)
|
| 295 |
+
|
| 296 |
+
else:
|
| 297 |
+
for u, v, edgedata in N.edges(data=True):
|
| 298 |
+
str_edgedata = {str(k): str(v) for k, v in edgedata.items()}
|
| 299 |
+
u, v = str(u), str(v)
|
| 300 |
+
raise_error = (
|
| 301 |
+
_check_colon_quotes(u)
|
| 302 |
+
or _check_colon_quotes(v)
|
| 303 |
+
or (
|
| 304 |
+
any(
|
| 305 |
+
(_check_colon_quotes(k) or _check_colon_quotes(val))
|
| 306 |
+
for k, val in str_edgedata.items()
|
| 307 |
+
)
|
| 308 |
+
)
|
| 309 |
+
)
|
| 310 |
+
if raise_error:
|
| 311 |
+
raise ValueError(
|
| 312 |
+
f'Node names and attributes should not contain ":" unless they are quoted with "".\
|
| 313 |
+
For example the string \'attribute:data1\' should be written as \'"attribute:data1"\'.\
|
| 314 |
+
Please refer https://github.com/pydot/pydot/issues/258'
|
| 315 |
+
)
|
| 316 |
+
edge = pydot.Edge(u, v, **str_edgedata)
|
| 317 |
+
P.add_edge(edge)
|
| 318 |
+
return P
|
| 319 |
+
|
| 320 |
+
|
| 321 |
+
def graphviz_layout(G, prog="neato", root=None):
|
| 322 |
+
"""Create node positions using Pydot and Graphviz.
|
| 323 |
+
|
| 324 |
+
Returns a dictionary of positions keyed by node.
|
| 325 |
+
|
| 326 |
+
Parameters
|
| 327 |
+
----------
|
| 328 |
+
G : NetworkX Graph
|
| 329 |
+
The graph for which the layout is computed.
|
| 330 |
+
prog : string (default: 'neato')
|
| 331 |
+
The name of the GraphViz program to use for layout.
|
| 332 |
+
Options depend on GraphViz version but may include:
|
| 333 |
+
'dot', 'twopi', 'fdp', 'sfdp', 'circo'
|
| 334 |
+
root : Node from G or None (default: None)
|
| 335 |
+
The node of G from which to start some layout algorithms.
|
| 336 |
+
|
| 337 |
+
Returns
|
| 338 |
+
-------
|
| 339 |
+
Dictionary of (x, y) positions keyed by node.
|
| 340 |
+
|
| 341 |
+
Examples
|
| 342 |
+
--------
|
| 343 |
+
>>> G = nx.complete_graph(4)
|
| 344 |
+
>>> pos = nx.nx_pydot.graphviz_layout(G)
|
| 345 |
+
>>> pos = nx.nx_pydot.graphviz_layout(G, prog="dot")
|
| 346 |
+
|
| 347 |
+
Notes
|
| 348 |
+
-----
|
| 349 |
+
This is a wrapper for pydot_layout.
|
| 350 |
+
"""
|
| 351 |
+
msg = (
|
| 352 |
+
"nx.nx_pydot.graphviz_layout depends on the pydot package, which has "
|
| 353 |
+
"known issues and is not actively maintained. Consider using "
|
| 354 |
+
"nx.nx_agraph.graphviz_layout instead.\n\n"
|
| 355 |
+
"See https://github.com/networkx/networkx/issues/5723"
|
| 356 |
+
)
|
| 357 |
+
warnings.warn(msg, DeprecationWarning, stacklevel=2)
|
| 358 |
+
|
| 359 |
+
return pydot_layout(G=G, prog=prog, root=root)
|
| 360 |
+
|
| 361 |
+
|
| 362 |
+
def pydot_layout(G, prog="neato", root=None):
|
| 363 |
+
"""Create node positions using :mod:`pydot` and Graphviz.
|
| 364 |
+
|
| 365 |
+
Parameters
|
| 366 |
+
----------
|
| 367 |
+
G : Graph
|
| 368 |
+
NetworkX graph to be laid out.
|
| 369 |
+
prog : string (default: 'neato')
|
| 370 |
+
Name of the GraphViz command to use for layout.
|
| 371 |
+
Options depend on GraphViz version but may include:
|
| 372 |
+
'dot', 'twopi', 'fdp', 'sfdp', 'circo'
|
| 373 |
+
root : Node from G or None (default: None)
|
| 374 |
+
The node of G from which to start some layout algorithms.
|
| 375 |
+
|
| 376 |
+
Returns
|
| 377 |
+
-------
|
| 378 |
+
dict
|
| 379 |
+
Dictionary of positions keyed by node.
|
| 380 |
+
|
| 381 |
+
Examples
|
| 382 |
+
--------
|
| 383 |
+
>>> G = nx.complete_graph(4)
|
| 384 |
+
>>> pos = nx.nx_pydot.pydot_layout(G)
|
| 385 |
+
>>> pos = nx.nx_pydot.pydot_layout(G, prog="dot")
|
| 386 |
+
|
| 387 |
+
Notes
|
| 388 |
+
-----
|
| 389 |
+
If you use complex node objects, they may have the same string
|
| 390 |
+
representation and GraphViz could treat them as the same node.
|
| 391 |
+
The layout may assign both nodes a single location. See Issue #1568
|
| 392 |
+
If this occurs in your case, consider relabeling the nodes just
|
| 393 |
+
for the layout computation using something similar to::
|
| 394 |
+
|
| 395 |
+
H = nx.convert_node_labels_to_integers(G, label_attribute='node_label')
|
| 396 |
+
H_layout = nx.nx_pydot.pydot_layout(G, prog='dot')
|
| 397 |
+
G_layout = {H.nodes[n]['node_label']: p for n, p in H_layout.items()}
|
| 398 |
+
|
| 399 |
+
"""
|
| 400 |
+
import pydot
|
| 401 |
+
|
| 402 |
+
msg = (
|
| 403 |
+
"nx.nx_pydot.pydot_layout depends on the pydot package, which has "
|
| 404 |
+
"known issues and is not actively maintained.\n\n"
|
| 405 |
+
"See https://github.com/networkx/networkx/issues/5723"
|
| 406 |
+
)
|
| 407 |
+
warnings.warn(msg, DeprecationWarning, stacklevel=2)
|
| 408 |
+
P = to_pydot(G)
|
| 409 |
+
if root is not None:
|
| 410 |
+
P.set("root", str(root))
|
| 411 |
+
|
| 412 |
+
# List of low-level bytes comprising a string in the dot language converted
|
| 413 |
+
# from the passed graph with the passed external GraphViz command.
|
| 414 |
+
D_bytes = P.create_dot(prog=prog)
|
| 415 |
+
|
| 416 |
+
# Unique string decoded from these bytes with the preferred locale encoding
|
| 417 |
+
D = str(D_bytes, encoding=getpreferredencoding())
|
| 418 |
+
|
| 419 |
+
if D == "": # no data returned
|
| 420 |
+
print(f"Graphviz layout with {prog} failed")
|
| 421 |
+
print()
|
| 422 |
+
print("To debug what happened try:")
|
| 423 |
+
print("P = nx.nx_pydot.to_pydot(G)")
|
| 424 |
+
print('P.write_dot("file.dot")')
|
| 425 |
+
print(f"And then run {prog} on file.dot")
|
| 426 |
+
return
|
| 427 |
+
|
| 428 |
+
# List of one or more "pydot.Dot" instances deserialized from this string.
|
| 429 |
+
Q_list = pydot.graph_from_dot_data(D)
|
| 430 |
+
assert len(Q_list) == 1
|
| 431 |
+
|
| 432 |
+
# The first and only such instance, as guaranteed by the above assertion.
|
| 433 |
+
Q = Q_list[0]
|
| 434 |
+
|
| 435 |
+
node_pos = {}
|
| 436 |
+
for n in G.nodes():
|
| 437 |
+
str_n = str(n)
|
| 438 |
+
# Explicitly catch nodes with ":" in node names or nodedata.
|
| 439 |
+
if _check_colon_quotes(str_n):
|
| 440 |
+
raise ValueError(
|
| 441 |
+
f'Node names and node attributes should not contain ":" unless they are quoted with "".\
|
| 442 |
+
For example the string \'attribute:data1\' should be written as \'"attribute:data1"\'.\
|
| 443 |
+
Please refer https://github.com/pydot/pydot/issues/258'
|
| 444 |
+
)
|
| 445 |
+
pydot_node = pydot.Node(str_n).get_name()
|
| 446 |
+
node = Q.get_node(pydot_node)
|
| 447 |
+
|
| 448 |
+
if isinstance(node, list):
|
| 449 |
+
node = node[0]
|
| 450 |
+
pos = node.get_pos()[1:-1] # strip leading and trailing double quotes
|
| 451 |
+
if pos is not None:
|
| 452 |
+
xx, yy = pos.split(",")
|
| 453 |
+
node_pos[n] = (float(xx), float(yy))
|
| 454 |
+
return node_pos
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/__init__.py
ADDED
|
File without changes
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/__pycache__/test_latex.cpython-311.pyc
ADDED
|
Binary file (11.7 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/__pycache__/test_layout.cpython-311.pyc
ADDED
|
Binary file (35.7 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/drawing/tests/__pycache__/test_pydot.cpython-311.pyc
ADDED
|
Binary file (13.7 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/tests/__pycache__/test_convert_numpy.cpython-311.pyc
ADDED
|
Binary file (30.9 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/tests/__pycache__/test_convert_pandas.cpython-311.pyc
ADDED
|
Binary file (21.5 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/__pycache__/misc.cpython-311.pyc
ADDED
|
Binary file (19.8 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/__pycache__/random_sequence.cpython-311.pyc
ADDED
|
Binary file (6.53 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/backends.py
ADDED
|
@@ -0,0 +1,975 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Code to support various backends in a plugin dispatch architecture.
|
| 3 |
+
|
| 4 |
+
Create a Dispatcher
|
| 5 |
+
-------------------
|
| 6 |
+
|
| 7 |
+
To be a valid backend, a package must register an entry_point
|
| 8 |
+
of `networkx.backends` with a key pointing to the handler.
|
| 9 |
+
|
| 10 |
+
For example::
|
| 11 |
+
|
| 12 |
+
entry_points={'networkx.backends': 'sparse = networkx_backend_sparse'}
|
| 13 |
+
|
| 14 |
+
The backend must create a Graph-like object which contains an attribute
|
| 15 |
+
``__networkx_backend__`` with a value of the entry point name.
|
| 16 |
+
|
| 17 |
+
Continuing the example above::
|
| 18 |
+
|
| 19 |
+
class WrappedSparse:
|
| 20 |
+
__networkx_backend__ = "sparse"
|
| 21 |
+
...
|
| 22 |
+
|
| 23 |
+
When a dispatchable NetworkX algorithm encounters a Graph-like object
|
| 24 |
+
with a ``__networkx_backend__`` attribute, it will look for the associated
|
| 25 |
+
dispatch object in the entry_points, load it, and dispatch the work to it.
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
Testing
|
| 29 |
+
-------
|
| 30 |
+
To assist in validating the backend algorithm implementations, if an
|
| 31 |
+
environment variable ``NETWORKX_TEST_BACKEND`` is set to a registered
|
| 32 |
+
backend key, the dispatch machinery will automatically convert regular
|
| 33 |
+
networkx Graphs and DiGraphs to the backend equivalent by calling
|
| 34 |
+
``<backend dispatcher>.convert_from_nx(G, edge_attrs=edge_attrs, name=name)``.
|
| 35 |
+
Set ``NETWORKX_FALLBACK_TO_NX`` environment variable to have tests
|
| 36 |
+
use networkx graphs for algorithms not implemented by the backend.
|
| 37 |
+
|
| 38 |
+
The arguments to ``convert_from_nx`` are:
|
| 39 |
+
|
| 40 |
+
- ``G`` : networkx Graph
|
| 41 |
+
- ``edge_attrs`` : dict, optional
|
| 42 |
+
Dict that maps edge attributes to default values if missing in ``G``.
|
| 43 |
+
If None, then no edge attributes will be converted and default may be 1.
|
| 44 |
+
- ``node_attrs``: dict, optional
|
| 45 |
+
Dict that maps node attribute to default values if missing in ``G``.
|
| 46 |
+
If None, then no node attributes will be converted.
|
| 47 |
+
- ``preserve_edge_attrs`` : bool
|
| 48 |
+
Whether to preserve all edge attributes.
|
| 49 |
+
- ``preserve_node_attrs`` : bool
|
| 50 |
+
Whether to preserve all node attributes.
|
| 51 |
+
- ``preserve_graph_attrs`` : bool
|
| 52 |
+
Whether to preserve all graph attributes.
|
| 53 |
+
- ``preserve_all_attrs`` : bool
|
| 54 |
+
Whether to preserve all graph, node, and edge attributes.
|
| 55 |
+
- ``name`` : str
|
| 56 |
+
The name of the algorithm.
|
| 57 |
+
- ``graph_name`` : str
|
| 58 |
+
The name of the graph argument being converted.
|
| 59 |
+
|
| 60 |
+
The converted object is then passed to the backend implementation of
|
| 61 |
+
the algorithm. The result is then passed to
|
| 62 |
+
``<backend dispatcher>.convert_to_nx(result, name=name)`` to convert back
|
| 63 |
+
to a form expected by the NetworkX tests.
|
| 64 |
+
|
| 65 |
+
By defining ``convert_from_nx`` and ``convert_to_nx`` methods and setting
|
| 66 |
+
the environment variable, NetworkX will automatically route tests on
|
| 67 |
+
dispatchable algorithms to the backend, allowing the full networkx test
|
| 68 |
+
suite to be run against the backend implementation.
|
| 69 |
+
|
| 70 |
+
Example pytest invocation::
|
| 71 |
+
|
| 72 |
+
NETWORKX_TEST_BACKEND=sparse pytest --pyargs networkx
|
| 73 |
+
|
| 74 |
+
Dispatchable algorithms which are not implemented by the backend
|
| 75 |
+
will cause a ``pytest.xfail()``, giving some indication that not all
|
| 76 |
+
tests are working, while avoiding causing an explicit failure.
|
| 77 |
+
|
| 78 |
+
If a backend only partially implements some algorithms, it can define
|
| 79 |
+
a ``can_run(name, args, kwargs)`` function that returns True or False
|
| 80 |
+
indicating whether it can run the algorithm with the given arguments.
|
| 81 |
+
|
| 82 |
+
A special ``on_start_tests(items)`` function may be defined by the backend.
|
| 83 |
+
It will be called with the list of NetworkX tests discovered. Each item
|
| 84 |
+
is a test object that can be marked as xfail if the backend does not support
|
| 85 |
+
the test using `item.add_marker(pytest.mark.xfail(reason=...))`.
|
| 86 |
+
"""
|
| 87 |
+
import inspect
|
| 88 |
+
import os
|
| 89 |
+
import sys
|
| 90 |
+
import warnings
|
| 91 |
+
from functools import partial
|
| 92 |
+
from importlib.metadata import entry_points
|
| 93 |
+
|
| 94 |
+
from ..exception import NetworkXNotImplemented
|
| 95 |
+
|
| 96 |
+
__all__ = ["_dispatch"]
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def _get_backends(group, *, load_and_call=False):
|
| 100 |
+
if sys.version_info < (3, 10):
|
| 101 |
+
eps = entry_points()
|
| 102 |
+
if group not in eps:
|
| 103 |
+
return {}
|
| 104 |
+
items = eps[group]
|
| 105 |
+
else:
|
| 106 |
+
items = entry_points(group=group)
|
| 107 |
+
rv = {}
|
| 108 |
+
for ep in items:
|
| 109 |
+
if ep.name in rv:
|
| 110 |
+
warnings.warn(
|
| 111 |
+
f"networkx backend defined more than once: {ep.name}",
|
| 112 |
+
RuntimeWarning,
|
| 113 |
+
stacklevel=2,
|
| 114 |
+
)
|
| 115 |
+
elif load_and_call:
|
| 116 |
+
try:
|
| 117 |
+
rv[ep.name] = ep.load()()
|
| 118 |
+
except Exception as exc:
|
| 119 |
+
warnings.warn(
|
| 120 |
+
f"Error encountered when loading info for backend {ep.name}: {exc}",
|
| 121 |
+
RuntimeWarning,
|
| 122 |
+
stacklevel=2,
|
| 123 |
+
)
|
| 124 |
+
else:
|
| 125 |
+
rv[ep.name] = ep
|
| 126 |
+
# nx-loopback backend is only available when testing (added in conftest.py)
|
| 127 |
+
rv.pop("nx-loopback", None)
|
| 128 |
+
return rv
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
# Rename "plugin" to "backend", and give backends a release cycle to update.
|
| 132 |
+
backends = _get_backends("networkx.plugins")
|
| 133 |
+
backend_info = _get_backends("networkx.plugin_info", load_and_call=True)
|
| 134 |
+
|
| 135 |
+
backends.update(_get_backends("networkx.backends"))
|
| 136 |
+
backend_info.update(_get_backends("networkx.backend_info", load_and_call=True))
|
| 137 |
+
|
| 138 |
+
# Load and cache backends on-demand
|
| 139 |
+
_loaded_backends = {} # type: ignore[var-annotated]
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def _load_backend(backend_name):
|
| 143 |
+
if backend_name in _loaded_backends:
|
| 144 |
+
return _loaded_backends[backend_name]
|
| 145 |
+
rv = _loaded_backends[backend_name] = backends[backend_name].load()
|
| 146 |
+
return rv
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
_registered_algorithms = {}
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
class _dispatch:
|
| 153 |
+
"""Dispatches to a backend algorithm based on input graph types.
|
| 154 |
+
|
| 155 |
+
Parameters
|
| 156 |
+
----------
|
| 157 |
+
func : function
|
| 158 |
+
|
| 159 |
+
name : str, optional
|
| 160 |
+
The name of the algorithm to use for dispatching. If not provided,
|
| 161 |
+
the name of ``func`` will be used. ``name`` is useful to avoid name
|
| 162 |
+
conflicts, as all dispatched algorithms live in a single namespace.
|
| 163 |
+
|
| 164 |
+
graphs : str or dict or None, default "G"
|
| 165 |
+
If a string, the parameter name of the graph, which must be the first
|
| 166 |
+
argument of the wrapped function. If more than one graph is required
|
| 167 |
+
for the algorithm (or if the graph is not the first argument), provide
|
| 168 |
+
a dict of parameter name to argument position for each graph argument.
|
| 169 |
+
For example, ``@_dispatch(graphs={"G": 0, "auxiliary?": 4})``
|
| 170 |
+
indicates the 0th parameter ``G`` of the function is a required graph,
|
| 171 |
+
and the 4th parameter ``auxiliary`` is an optional graph.
|
| 172 |
+
To indicate an argument is a list of graphs, do e.g. ``"[graphs]"``.
|
| 173 |
+
Use ``graphs=None`` if *no* arguments are NetworkX graphs such as for
|
| 174 |
+
graph generators, readers, and conversion functions.
|
| 175 |
+
|
| 176 |
+
edge_attrs : str or dict, optional
|
| 177 |
+
``edge_attrs`` holds information about edge attribute arguments
|
| 178 |
+
and default values for those edge attributes.
|
| 179 |
+
If a string, ``edge_attrs`` holds the function argument name that
|
| 180 |
+
indicates a single edge attribute to include in the converted graph.
|
| 181 |
+
The default value for this attribute is 1. To indicate that an argument
|
| 182 |
+
is a list of attributes (all with default value 1), use e.g. ``"[attrs]"``.
|
| 183 |
+
If a dict, ``edge_attrs`` holds a dict keyed by argument names, with
|
| 184 |
+
values that are either the default value or, if a string, the argument
|
| 185 |
+
name that indicates the default value.
|
| 186 |
+
|
| 187 |
+
node_attrs : str or dict, optional
|
| 188 |
+
Like ``edge_attrs``, but for node attributes.
|
| 189 |
+
|
| 190 |
+
preserve_edge_attrs : bool or str or dict, optional
|
| 191 |
+
For bool, whether to preserve all edge attributes.
|
| 192 |
+
For str, the parameter name that may indicate (with ``True`` or a
|
| 193 |
+
callable argument) whether all edge attributes should be preserved
|
| 194 |
+
when converting.
|
| 195 |
+
For dict of ``{graph_name: {attr: default}}``, indicate pre-determined
|
| 196 |
+
edge attributes (and defaults) to preserve for input graphs.
|
| 197 |
+
|
| 198 |
+
preserve_node_attrs : bool or str or dict, optional
|
| 199 |
+
Like ``preserve_edge_attrs``, but for node attributes.
|
| 200 |
+
|
| 201 |
+
preserve_graph_attrs : bool or set
|
| 202 |
+
For bool, whether to preserve all graph attributes.
|
| 203 |
+
For set, which input graph arguments to preserve graph attributes.
|
| 204 |
+
|
| 205 |
+
preserve_all_attrs : bool
|
| 206 |
+
Whether to preserve all edge, node and graph attributes.
|
| 207 |
+
This overrides all the other preserve_*_attrs.
|
| 208 |
+
|
| 209 |
+
"""
|
| 210 |
+
|
| 211 |
+
# Allow any of the following decorator forms:
|
| 212 |
+
# - @_dispatch
|
| 213 |
+
# - @_dispatch()
|
| 214 |
+
# - @_dispatch(name="override_name")
|
| 215 |
+
# - @_dispatch(graphs="graph")
|
| 216 |
+
# - @_dispatch(edge_attrs="weight")
|
| 217 |
+
# - @_dispatch(graphs={"G": 0, "H": 1}, edge_attrs={"weight": "default"})
|
| 218 |
+
|
| 219 |
+
# These class attributes are currently used to allow backends to run networkx tests.
|
| 220 |
+
# For example: `PYTHONPATH=. pytest --backend graphblas --fallback-to-nx`
|
| 221 |
+
# Future work: add configuration to control these
|
| 222 |
+
_is_testing = False
|
| 223 |
+
_fallback_to_nx = (
|
| 224 |
+
os.environ.get("NETWORKX_FALLBACK_TO_NX", "true").strip().lower() == "true"
|
| 225 |
+
)
|
| 226 |
+
_automatic_backends = [
|
| 227 |
+
x.strip()
|
| 228 |
+
for x in os.environ.get("NETWORKX_AUTOMATIC_BACKENDS", "").split(",")
|
| 229 |
+
if x.strip()
|
| 230 |
+
]
|
| 231 |
+
|
| 232 |
+
def __new__(
|
| 233 |
+
cls,
|
| 234 |
+
func=None,
|
| 235 |
+
*,
|
| 236 |
+
name=None,
|
| 237 |
+
graphs="G",
|
| 238 |
+
edge_attrs=None,
|
| 239 |
+
node_attrs=None,
|
| 240 |
+
preserve_edge_attrs=False,
|
| 241 |
+
preserve_node_attrs=False,
|
| 242 |
+
preserve_graph_attrs=False,
|
| 243 |
+
preserve_all_attrs=False,
|
| 244 |
+
):
|
| 245 |
+
if func is None:
|
| 246 |
+
return partial(
|
| 247 |
+
_dispatch,
|
| 248 |
+
name=name,
|
| 249 |
+
graphs=graphs,
|
| 250 |
+
edge_attrs=edge_attrs,
|
| 251 |
+
node_attrs=node_attrs,
|
| 252 |
+
preserve_edge_attrs=preserve_edge_attrs,
|
| 253 |
+
preserve_node_attrs=preserve_node_attrs,
|
| 254 |
+
preserve_graph_attrs=preserve_graph_attrs,
|
| 255 |
+
preserve_all_attrs=preserve_all_attrs,
|
| 256 |
+
)
|
| 257 |
+
if isinstance(func, str):
|
| 258 |
+
raise TypeError("'name' and 'graphs' must be passed by keyword") from None
|
| 259 |
+
# If name not provided, use the name of the function
|
| 260 |
+
if name is None:
|
| 261 |
+
name = func.__name__
|
| 262 |
+
|
| 263 |
+
self = object.__new__(cls)
|
| 264 |
+
|
| 265 |
+
# standard function-wrapping stuff
|
| 266 |
+
# __annotations__ not used
|
| 267 |
+
self.__name__ = func.__name__
|
| 268 |
+
# self.__doc__ = func.__doc__ # __doc__ handled as cached property
|
| 269 |
+
self.__defaults__ = func.__defaults__
|
| 270 |
+
# We "magically" add `backend=` keyword argument to allow backend to be specified
|
| 271 |
+
if func.__kwdefaults__:
|
| 272 |
+
self.__kwdefaults__ = {**func.__kwdefaults__, "backend": None}
|
| 273 |
+
else:
|
| 274 |
+
self.__kwdefaults__ = {"backend": None}
|
| 275 |
+
self.__module__ = func.__module__
|
| 276 |
+
self.__qualname__ = func.__qualname__
|
| 277 |
+
self.__dict__.update(func.__dict__)
|
| 278 |
+
self.__wrapped__ = func
|
| 279 |
+
|
| 280 |
+
# Supplement docstring with backend info; compute and cache when needed
|
| 281 |
+
self._orig_doc = func.__doc__
|
| 282 |
+
self._cached_doc = None
|
| 283 |
+
|
| 284 |
+
self.orig_func = func
|
| 285 |
+
self.name = name
|
| 286 |
+
self.edge_attrs = edge_attrs
|
| 287 |
+
self.node_attrs = node_attrs
|
| 288 |
+
self.preserve_edge_attrs = preserve_edge_attrs or preserve_all_attrs
|
| 289 |
+
self.preserve_node_attrs = preserve_node_attrs or preserve_all_attrs
|
| 290 |
+
self.preserve_graph_attrs = preserve_graph_attrs or preserve_all_attrs
|
| 291 |
+
|
| 292 |
+
if edge_attrs is not None and not isinstance(edge_attrs, (str, dict)):
|
| 293 |
+
raise TypeError(
|
| 294 |
+
f"Bad type for edge_attrs: {type(edge_attrs)}. Expected str or dict."
|
| 295 |
+
) from None
|
| 296 |
+
if node_attrs is not None and not isinstance(node_attrs, (str, dict)):
|
| 297 |
+
raise TypeError(
|
| 298 |
+
f"Bad type for node_attrs: {type(node_attrs)}. Expected str or dict."
|
| 299 |
+
) from None
|
| 300 |
+
if not isinstance(self.preserve_edge_attrs, (bool, str, dict)):
|
| 301 |
+
raise TypeError(
|
| 302 |
+
f"Bad type for preserve_edge_attrs: {type(self.preserve_edge_attrs)}."
|
| 303 |
+
" Expected bool, str, or dict."
|
| 304 |
+
) from None
|
| 305 |
+
if not isinstance(self.preserve_node_attrs, (bool, str, dict)):
|
| 306 |
+
raise TypeError(
|
| 307 |
+
f"Bad type for preserve_node_attrs: {type(self.preserve_node_attrs)}."
|
| 308 |
+
" Expected bool, str, or dict."
|
| 309 |
+
) from None
|
| 310 |
+
if not isinstance(self.preserve_graph_attrs, (bool, set)):
|
| 311 |
+
raise TypeError(
|
| 312 |
+
f"Bad type for preserve_graph_attrs: {type(self.preserve_graph_attrs)}."
|
| 313 |
+
" Expected bool or set."
|
| 314 |
+
) from None
|
| 315 |
+
|
| 316 |
+
if isinstance(graphs, str):
|
| 317 |
+
graphs = {graphs: 0}
|
| 318 |
+
elif graphs is None:
|
| 319 |
+
pass
|
| 320 |
+
elif not isinstance(graphs, dict):
|
| 321 |
+
raise TypeError(
|
| 322 |
+
f"Bad type for graphs: {type(graphs)}. Expected str or dict."
|
| 323 |
+
) from None
|
| 324 |
+
elif len(graphs) == 0:
|
| 325 |
+
raise KeyError("'graphs' must contain at least one variable name") from None
|
| 326 |
+
|
| 327 |
+
# This dict comprehension is complicated for better performance; equivalent shown below.
|
| 328 |
+
self.optional_graphs = set()
|
| 329 |
+
self.list_graphs = set()
|
| 330 |
+
if graphs is None:
|
| 331 |
+
self.graphs = {}
|
| 332 |
+
else:
|
| 333 |
+
self.graphs = {
|
| 334 |
+
self.optional_graphs.add(val := k[:-1]) or val
|
| 335 |
+
if (last := k[-1]) == "?"
|
| 336 |
+
else self.list_graphs.add(val := k[1:-1]) or val
|
| 337 |
+
if last == "]"
|
| 338 |
+
else k: v
|
| 339 |
+
for k, v in graphs.items()
|
| 340 |
+
}
|
| 341 |
+
# The above is equivalent to:
|
| 342 |
+
# self.optional_graphs = {k[:-1] for k in graphs if k[-1] == "?"}
|
| 343 |
+
# self.list_graphs = {k[1:-1] for k in graphs if k[-1] == "]"}
|
| 344 |
+
# self.graphs = {k[:-1] if k[-1] == "?" else k: v for k, v in graphs.items()}
|
| 345 |
+
|
| 346 |
+
# Compute and cache the signature on-demand
|
| 347 |
+
self._sig = None
|
| 348 |
+
|
| 349 |
+
# Which backends implement this function?
|
| 350 |
+
self.backends = {
|
| 351 |
+
backend
|
| 352 |
+
for backend, info in backend_info.items()
|
| 353 |
+
if "functions" in info and name in info["functions"]
|
| 354 |
+
}
|
| 355 |
+
|
| 356 |
+
if name in _registered_algorithms:
|
| 357 |
+
raise KeyError(
|
| 358 |
+
f"Algorithm already exists in dispatch registry: {name}"
|
| 359 |
+
) from None
|
| 360 |
+
_registered_algorithms[name] = self
|
| 361 |
+
return self
|
| 362 |
+
|
| 363 |
+
@property
|
| 364 |
+
def __doc__(self):
|
| 365 |
+
if (rv := self._cached_doc) is not None:
|
| 366 |
+
return rv
|
| 367 |
+
rv = self._cached_doc = self._make_doc()
|
| 368 |
+
return rv
|
| 369 |
+
|
| 370 |
+
@__doc__.setter
|
| 371 |
+
def __doc__(self, val):
|
| 372 |
+
self._orig_doc = val
|
| 373 |
+
self._cached_doc = None
|
| 374 |
+
|
| 375 |
+
@property
|
| 376 |
+
def __signature__(self):
|
| 377 |
+
if self._sig is None:
|
| 378 |
+
sig = inspect.signature(self.orig_func)
|
| 379 |
+
# `backend` is now a reserved argument used by dispatching.
|
| 380 |
+
# assert "backend" not in sig.parameters
|
| 381 |
+
if not any(
|
| 382 |
+
p.kind == inspect.Parameter.VAR_KEYWORD for p in sig.parameters.values()
|
| 383 |
+
):
|
| 384 |
+
sig = sig.replace(
|
| 385 |
+
parameters=[
|
| 386 |
+
*sig.parameters.values(),
|
| 387 |
+
inspect.Parameter(
|
| 388 |
+
"backend", inspect.Parameter.KEYWORD_ONLY, default=None
|
| 389 |
+
),
|
| 390 |
+
inspect.Parameter(
|
| 391 |
+
"backend_kwargs", inspect.Parameter.VAR_KEYWORD
|
| 392 |
+
),
|
| 393 |
+
]
|
| 394 |
+
)
|
| 395 |
+
else:
|
| 396 |
+
*parameters, var_keyword = sig.parameters.values()
|
| 397 |
+
sig = sig.replace(
|
| 398 |
+
parameters=[
|
| 399 |
+
*parameters,
|
| 400 |
+
inspect.Parameter(
|
| 401 |
+
"backend", inspect.Parameter.KEYWORD_ONLY, default=None
|
| 402 |
+
),
|
| 403 |
+
var_keyword,
|
| 404 |
+
]
|
| 405 |
+
)
|
| 406 |
+
self._sig = sig
|
| 407 |
+
return self._sig
|
| 408 |
+
|
| 409 |
+
def __call__(self, /, *args, backend=None, **kwargs):
|
| 410 |
+
if not backends:
|
| 411 |
+
# Fast path if no backends are installed
|
| 412 |
+
return self.orig_func(*args, **kwargs)
|
| 413 |
+
|
| 414 |
+
# Use `backend_name` in this function instead of `backend`
|
| 415 |
+
backend_name = backend
|
| 416 |
+
if backend_name is not None and backend_name not in backends:
|
| 417 |
+
raise ImportError(f"Unable to load backend: {backend_name}")
|
| 418 |
+
|
| 419 |
+
graphs_resolved = {}
|
| 420 |
+
for gname, pos in self.graphs.items():
|
| 421 |
+
if pos < len(args):
|
| 422 |
+
if gname in kwargs:
|
| 423 |
+
raise TypeError(f"{self.name}() got multiple values for {gname!r}")
|
| 424 |
+
val = args[pos]
|
| 425 |
+
elif gname in kwargs:
|
| 426 |
+
val = kwargs[gname]
|
| 427 |
+
elif gname not in self.optional_graphs:
|
| 428 |
+
raise TypeError(
|
| 429 |
+
f"{self.name}() missing required graph argument: {gname}"
|
| 430 |
+
)
|
| 431 |
+
else:
|
| 432 |
+
continue
|
| 433 |
+
if val is None:
|
| 434 |
+
if gname not in self.optional_graphs:
|
| 435 |
+
raise TypeError(
|
| 436 |
+
f"{self.name}() required graph argument {gname!r} is None; must be a graph"
|
| 437 |
+
)
|
| 438 |
+
else:
|
| 439 |
+
graphs_resolved[gname] = val
|
| 440 |
+
|
| 441 |
+
# Alternative to the above that does not check duplicated args or missing required graphs.
|
| 442 |
+
# graphs_resolved = {
|
| 443 |
+
# val
|
| 444 |
+
# for gname, pos in self.graphs.items()
|
| 445 |
+
# if (val := args[pos] if pos < len(args) else kwargs.get(gname)) is not None
|
| 446 |
+
# }
|
| 447 |
+
|
| 448 |
+
if self._is_testing and self._automatic_backends and backend_name is None:
|
| 449 |
+
# Special path if we are running networkx tests with a backend.
|
| 450 |
+
return self._convert_and_call_for_tests(
|
| 451 |
+
self._automatic_backends[0],
|
| 452 |
+
args,
|
| 453 |
+
kwargs,
|
| 454 |
+
fallback_to_nx=self._fallback_to_nx,
|
| 455 |
+
)
|
| 456 |
+
|
| 457 |
+
# Check if any graph comes from a backend
|
| 458 |
+
if self.list_graphs:
|
| 459 |
+
# Make sure we don't lose values by consuming an iterator
|
| 460 |
+
args = list(args)
|
| 461 |
+
for gname in self.list_graphs & graphs_resolved.keys():
|
| 462 |
+
val = list(graphs_resolved[gname])
|
| 463 |
+
graphs_resolved[gname] = val
|
| 464 |
+
if gname in kwargs:
|
| 465 |
+
kwargs[gname] = val
|
| 466 |
+
else:
|
| 467 |
+
args[self.graphs[gname]] = val
|
| 468 |
+
|
| 469 |
+
has_backends = any(
|
| 470 |
+
hasattr(g, "__networkx_backend__") or hasattr(g, "__networkx_plugin__")
|
| 471 |
+
if gname not in self.list_graphs
|
| 472 |
+
else any(
|
| 473 |
+
hasattr(g2, "__networkx_backend__")
|
| 474 |
+
or hasattr(g2, "__networkx_plugin__")
|
| 475 |
+
for g2 in g
|
| 476 |
+
)
|
| 477 |
+
for gname, g in graphs_resolved.items()
|
| 478 |
+
)
|
| 479 |
+
if has_backends:
|
| 480 |
+
graph_backend_names = {
|
| 481 |
+
getattr(
|
| 482 |
+
g,
|
| 483 |
+
"__networkx_backend__",
|
| 484 |
+
getattr(g, "__networkx_plugin__", "networkx"),
|
| 485 |
+
)
|
| 486 |
+
for gname, g in graphs_resolved.items()
|
| 487 |
+
if gname not in self.list_graphs
|
| 488 |
+
}
|
| 489 |
+
for gname in self.list_graphs & graphs_resolved.keys():
|
| 490 |
+
graph_backend_names.update(
|
| 491 |
+
getattr(
|
| 492 |
+
g,
|
| 493 |
+
"__networkx_backend__",
|
| 494 |
+
getattr(g, "__networkx_plugin__", "networkx"),
|
| 495 |
+
)
|
| 496 |
+
for g in graphs_resolved[gname]
|
| 497 |
+
)
|
| 498 |
+
else:
|
| 499 |
+
has_backends = any(
|
| 500 |
+
hasattr(g, "__networkx_backend__") or hasattr(g, "__networkx_plugin__")
|
| 501 |
+
for g in graphs_resolved.values()
|
| 502 |
+
)
|
| 503 |
+
if has_backends:
|
| 504 |
+
graph_backend_names = {
|
| 505 |
+
getattr(
|
| 506 |
+
g,
|
| 507 |
+
"__networkx_backend__",
|
| 508 |
+
getattr(g, "__networkx_plugin__", "networkx"),
|
| 509 |
+
)
|
| 510 |
+
for g in graphs_resolved.values()
|
| 511 |
+
}
|
| 512 |
+
if has_backends:
|
| 513 |
+
# Dispatchable graphs found! Dispatch to backend function.
|
| 514 |
+
# We don't handle calls with different backend graphs yet,
|
| 515 |
+
# but we may be able to convert additional networkx graphs.
|
| 516 |
+
backend_names = graph_backend_names - {"networkx"}
|
| 517 |
+
if len(backend_names) != 1:
|
| 518 |
+
# Future work: convert between backends and run if multiple backends found
|
| 519 |
+
raise TypeError(
|
| 520 |
+
f"{self.name}() graphs must all be from the same backend, found {backend_names}"
|
| 521 |
+
)
|
| 522 |
+
[graph_backend_name] = backend_names
|
| 523 |
+
if backend_name is not None and backend_name != graph_backend_name:
|
| 524 |
+
# Future work: convert between backends to `backend_name` backend
|
| 525 |
+
raise TypeError(
|
| 526 |
+
f"{self.name}() is unable to convert graph from backend {graph_backend_name!r} "
|
| 527 |
+
f"to the specified backend {backend_name!r}."
|
| 528 |
+
)
|
| 529 |
+
if graph_backend_name not in backends:
|
| 530 |
+
raise ImportError(f"Unable to load backend: {graph_backend_name}")
|
| 531 |
+
if (
|
| 532 |
+
"networkx" in graph_backend_names
|
| 533 |
+
and graph_backend_name not in self._automatic_backends
|
| 534 |
+
):
|
| 535 |
+
# Not configured to convert networkx graphs to this backend
|
| 536 |
+
raise TypeError(
|
| 537 |
+
f"Unable to convert inputs and run {self.name}. "
|
| 538 |
+
f"{self.name}() has networkx and {graph_backend_name} graphs, but NetworkX is not "
|
| 539 |
+
f"configured to automatically convert graphs from networkx to {graph_backend_name}."
|
| 540 |
+
)
|
| 541 |
+
backend = _load_backend(graph_backend_name)
|
| 542 |
+
if hasattr(backend, self.name):
|
| 543 |
+
if "networkx" in graph_backend_names:
|
| 544 |
+
# We need to convert networkx graphs to backend graphs
|
| 545 |
+
return self._convert_and_call(
|
| 546 |
+
graph_backend_name,
|
| 547 |
+
args,
|
| 548 |
+
kwargs,
|
| 549 |
+
fallback_to_nx=self._fallback_to_nx,
|
| 550 |
+
)
|
| 551 |
+
# All graphs are backend graphs--no need to convert!
|
| 552 |
+
return getattr(backend, self.name)(*args, **kwargs)
|
| 553 |
+
# Future work: try to convert and run with other backends in self._automatic_backends
|
| 554 |
+
raise NetworkXNotImplemented(
|
| 555 |
+
f"'{self.name}' not implemented by {graph_backend_name}"
|
| 556 |
+
)
|
| 557 |
+
|
| 558 |
+
# If backend was explicitly given by the user, so we need to use it no matter what
|
| 559 |
+
if backend_name is not None:
|
| 560 |
+
return self._convert_and_call(
|
| 561 |
+
backend_name, args, kwargs, fallback_to_nx=False
|
| 562 |
+
)
|
| 563 |
+
|
| 564 |
+
# Only networkx graphs; try to convert and run with a backend with automatic
|
| 565 |
+
# conversion, but don't do this by default for graph generators or loaders.
|
| 566 |
+
if self.graphs:
|
| 567 |
+
for backend_name in self._automatic_backends:
|
| 568 |
+
if self._can_backend_run(backend_name, *args, **kwargs):
|
| 569 |
+
return self._convert_and_call(
|
| 570 |
+
backend_name,
|
| 571 |
+
args,
|
| 572 |
+
kwargs,
|
| 573 |
+
fallback_to_nx=self._fallback_to_nx,
|
| 574 |
+
)
|
| 575 |
+
# Default: run with networkx on networkx inputs
|
| 576 |
+
return self.orig_func(*args, **kwargs)
|
| 577 |
+
|
| 578 |
+
def _can_backend_run(self, backend_name, /, *args, **kwargs):
|
| 579 |
+
"""Can the specified backend run this algorithms with these arguments?"""
|
| 580 |
+
backend = _load_backend(backend_name)
|
| 581 |
+
return hasattr(backend, self.name) and (
|
| 582 |
+
not hasattr(backend, "can_run") or backend.can_run(self.name, args, kwargs)
|
| 583 |
+
)
|
| 584 |
+
|
| 585 |
+
def _convert_arguments(self, backend_name, args, kwargs):
|
| 586 |
+
"""Convert graph arguments to the specified backend.
|
| 587 |
+
|
| 588 |
+
Returns
|
| 589 |
+
-------
|
| 590 |
+
args tuple and kwargs dict
|
| 591 |
+
"""
|
| 592 |
+
bound = self.__signature__.bind(*args, **kwargs)
|
| 593 |
+
bound.apply_defaults()
|
| 594 |
+
if not self.graphs:
|
| 595 |
+
bound_kwargs = bound.kwargs
|
| 596 |
+
del bound_kwargs["backend"]
|
| 597 |
+
return bound.args, bound_kwargs
|
| 598 |
+
# Convert graphs into backend graph-like object
|
| 599 |
+
# Include the edge and/or node labels if provided to the algorithm
|
| 600 |
+
preserve_edge_attrs = self.preserve_edge_attrs
|
| 601 |
+
edge_attrs = self.edge_attrs
|
| 602 |
+
if preserve_edge_attrs is False:
|
| 603 |
+
# e.g. `preserve_edge_attrs=False`
|
| 604 |
+
pass
|
| 605 |
+
elif preserve_edge_attrs is True:
|
| 606 |
+
# e.g. `preserve_edge_attrs=True`
|
| 607 |
+
edge_attrs = None
|
| 608 |
+
elif isinstance(preserve_edge_attrs, str):
|
| 609 |
+
if bound.arguments[preserve_edge_attrs] is True or callable(
|
| 610 |
+
bound.arguments[preserve_edge_attrs]
|
| 611 |
+
):
|
| 612 |
+
# e.g. `preserve_edge_attrs="attr"` and `func(attr=True)`
|
| 613 |
+
# e.g. `preserve_edge_attrs="attr"` and `func(attr=myfunc)`
|
| 614 |
+
preserve_edge_attrs = True
|
| 615 |
+
edge_attrs = None
|
| 616 |
+
elif bound.arguments[preserve_edge_attrs] is False and (
|
| 617 |
+
isinstance(edge_attrs, str)
|
| 618 |
+
and edge_attrs == preserve_edge_attrs
|
| 619 |
+
or isinstance(edge_attrs, dict)
|
| 620 |
+
and preserve_edge_attrs in edge_attrs
|
| 621 |
+
):
|
| 622 |
+
# e.g. `preserve_edge_attrs="attr"` and `func(attr=False)`
|
| 623 |
+
# Treat `False` argument as meaning "preserve_edge_data=False"
|
| 624 |
+
# and not `False` as the edge attribute to use.
|
| 625 |
+
preserve_edge_attrs = False
|
| 626 |
+
edge_attrs = None
|
| 627 |
+
else:
|
| 628 |
+
# e.g. `preserve_edge_attrs="attr"` and `func(attr="weight")`
|
| 629 |
+
preserve_edge_attrs = False
|
| 630 |
+
# Else: e.g. `preserve_edge_attrs={"G": {"weight": 1}}`
|
| 631 |
+
|
| 632 |
+
if edge_attrs is None:
|
| 633 |
+
# May have been set to None above b/c all attributes are preserved
|
| 634 |
+
pass
|
| 635 |
+
elif isinstance(edge_attrs, str):
|
| 636 |
+
if edge_attrs[0] == "[":
|
| 637 |
+
# e.g. `edge_attrs="[edge_attributes]"` (argument of list of attributes)
|
| 638 |
+
# e.g. `func(edge_attributes=["foo", "bar"])`
|
| 639 |
+
edge_attrs = {
|
| 640 |
+
edge_attr: 1 for edge_attr in bound.arguments[edge_attrs[1:-1]]
|
| 641 |
+
}
|
| 642 |
+
elif callable(bound.arguments[edge_attrs]):
|
| 643 |
+
# e.g. `edge_attrs="weight"` and `func(weight=myfunc)`
|
| 644 |
+
preserve_edge_attrs = True
|
| 645 |
+
edge_attrs = None
|
| 646 |
+
elif bound.arguments[edge_attrs] is not None:
|
| 647 |
+
# e.g. `edge_attrs="weight"` and `func(weight="foo")` (default of 1)
|
| 648 |
+
edge_attrs = {bound.arguments[edge_attrs]: 1}
|
| 649 |
+
elif self.name == "to_numpy_array" and hasattr(
|
| 650 |
+
bound.arguments["dtype"], "names"
|
| 651 |
+
):
|
| 652 |
+
# Custom handling: attributes may be obtained from `dtype`
|
| 653 |
+
edge_attrs = {
|
| 654 |
+
edge_attr: 1 for edge_attr in bound.arguments["dtype"].names
|
| 655 |
+
}
|
| 656 |
+
else:
|
| 657 |
+
# e.g. `edge_attrs="weight"` and `func(weight=None)`
|
| 658 |
+
edge_attrs = None
|
| 659 |
+
else:
|
| 660 |
+
# e.g. `edge_attrs={"attr": "default"}` and `func(attr="foo", default=7)`
|
| 661 |
+
# e.g. `edge_attrs={"attr": 0}` and `func(attr="foo")`
|
| 662 |
+
edge_attrs = {
|
| 663 |
+
edge_attr: bound.arguments.get(val, 1) if isinstance(val, str) else val
|
| 664 |
+
for key, val in edge_attrs.items()
|
| 665 |
+
if (edge_attr := bound.arguments[key]) is not None
|
| 666 |
+
}
|
| 667 |
+
|
| 668 |
+
preserve_node_attrs = self.preserve_node_attrs
|
| 669 |
+
node_attrs = self.node_attrs
|
| 670 |
+
if preserve_node_attrs is False:
|
| 671 |
+
# e.g. `preserve_node_attrs=False`
|
| 672 |
+
pass
|
| 673 |
+
elif preserve_node_attrs is True:
|
| 674 |
+
# e.g. `preserve_node_attrs=True`
|
| 675 |
+
node_attrs = None
|
| 676 |
+
elif isinstance(preserve_node_attrs, str):
|
| 677 |
+
if bound.arguments[preserve_node_attrs] is True or callable(
|
| 678 |
+
bound.arguments[preserve_node_attrs]
|
| 679 |
+
):
|
| 680 |
+
# e.g. `preserve_node_attrs="attr"` and `func(attr=True)`
|
| 681 |
+
# e.g. `preserve_node_attrs="attr"` and `func(attr=myfunc)`
|
| 682 |
+
preserve_node_attrs = True
|
| 683 |
+
node_attrs = None
|
| 684 |
+
elif bound.arguments[preserve_node_attrs] is False and (
|
| 685 |
+
isinstance(node_attrs, str)
|
| 686 |
+
and node_attrs == preserve_node_attrs
|
| 687 |
+
or isinstance(node_attrs, dict)
|
| 688 |
+
and preserve_node_attrs in node_attrs
|
| 689 |
+
):
|
| 690 |
+
# e.g. `preserve_node_attrs="attr"` and `func(attr=False)`
|
| 691 |
+
# Treat `False` argument as meaning "preserve_node_data=False"
|
| 692 |
+
# and not `False` as the node attribute to use. Is this used?
|
| 693 |
+
preserve_node_attrs = False
|
| 694 |
+
node_attrs = None
|
| 695 |
+
else:
|
| 696 |
+
# e.g. `preserve_node_attrs="attr"` and `func(attr="weight")`
|
| 697 |
+
preserve_node_attrs = False
|
| 698 |
+
# Else: e.g. `preserve_node_attrs={"G": {"pos": None}}`
|
| 699 |
+
|
| 700 |
+
if node_attrs is None:
|
| 701 |
+
# May have been set to None above b/c all attributes are preserved
|
| 702 |
+
pass
|
| 703 |
+
elif isinstance(node_attrs, str):
|
| 704 |
+
if node_attrs[0] == "[":
|
| 705 |
+
# e.g. `node_attrs="[node_attributes]"` (argument of list of attributes)
|
| 706 |
+
# e.g. `func(node_attributes=["foo", "bar"])`
|
| 707 |
+
node_attrs = {
|
| 708 |
+
node_attr: None for node_attr in bound.arguments[node_attrs[1:-1]]
|
| 709 |
+
}
|
| 710 |
+
elif callable(bound.arguments[node_attrs]):
|
| 711 |
+
# e.g. `node_attrs="weight"` and `func(weight=myfunc)`
|
| 712 |
+
preserve_node_attrs = True
|
| 713 |
+
node_attrs = None
|
| 714 |
+
elif bound.arguments[node_attrs] is not None:
|
| 715 |
+
# e.g. `node_attrs="weight"` and `func(weight="foo")`
|
| 716 |
+
node_attrs = {bound.arguments[node_attrs]: None}
|
| 717 |
+
else:
|
| 718 |
+
# e.g. `node_attrs="weight"` and `func(weight=None)`
|
| 719 |
+
node_attrs = None
|
| 720 |
+
else:
|
| 721 |
+
# e.g. `node_attrs={"attr": "default"}` and `func(attr="foo", default=7)`
|
| 722 |
+
# e.g. `node_attrs={"attr": 0}` and `func(attr="foo")`
|
| 723 |
+
node_attrs = {
|
| 724 |
+
node_attr: bound.arguments.get(val) if isinstance(val, str) else val
|
| 725 |
+
for key, val in node_attrs.items()
|
| 726 |
+
if (node_attr := bound.arguments[key]) is not None
|
| 727 |
+
}
|
| 728 |
+
|
| 729 |
+
preserve_graph_attrs = self.preserve_graph_attrs
|
| 730 |
+
|
| 731 |
+
# It should be safe to assume that we either have networkx graphs or backend graphs.
|
| 732 |
+
# Future work: allow conversions between backends.
|
| 733 |
+
backend = _load_backend(backend_name)
|
| 734 |
+
for gname in self.graphs:
|
| 735 |
+
if gname in self.list_graphs:
|
| 736 |
+
bound.arguments[gname] = [
|
| 737 |
+
backend.convert_from_nx(
|
| 738 |
+
g,
|
| 739 |
+
edge_attrs=edge_attrs,
|
| 740 |
+
node_attrs=node_attrs,
|
| 741 |
+
preserve_edge_attrs=preserve_edge_attrs,
|
| 742 |
+
preserve_node_attrs=preserve_node_attrs,
|
| 743 |
+
preserve_graph_attrs=preserve_graph_attrs,
|
| 744 |
+
name=self.name,
|
| 745 |
+
graph_name=gname,
|
| 746 |
+
)
|
| 747 |
+
if getattr(
|
| 748 |
+
g,
|
| 749 |
+
"__networkx_backend__",
|
| 750 |
+
getattr(g, "__networkx_plugin__", "networkx"),
|
| 751 |
+
)
|
| 752 |
+
== "networkx"
|
| 753 |
+
else g
|
| 754 |
+
for g in bound.arguments[gname]
|
| 755 |
+
]
|
| 756 |
+
else:
|
| 757 |
+
graph = bound.arguments[gname]
|
| 758 |
+
if graph is None:
|
| 759 |
+
if gname in self.optional_graphs:
|
| 760 |
+
continue
|
| 761 |
+
raise TypeError(
|
| 762 |
+
f"Missing required graph argument `{gname}` in {self.name} function"
|
| 763 |
+
)
|
| 764 |
+
if isinstance(preserve_edge_attrs, dict):
|
| 765 |
+
preserve_edges = False
|
| 766 |
+
edges = preserve_edge_attrs.get(gname, edge_attrs)
|
| 767 |
+
else:
|
| 768 |
+
preserve_edges = preserve_edge_attrs
|
| 769 |
+
edges = edge_attrs
|
| 770 |
+
if isinstance(preserve_node_attrs, dict):
|
| 771 |
+
preserve_nodes = False
|
| 772 |
+
nodes = preserve_node_attrs.get(gname, node_attrs)
|
| 773 |
+
else:
|
| 774 |
+
preserve_nodes = preserve_node_attrs
|
| 775 |
+
nodes = node_attrs
|
| 776 |
+
if isinstance(preserve_graph_attrs, set):
|
| 777 |
+
preserve_graph = gname in preserve_graph_attrs
|
| 778 |
+
else:
|
| 779 |
+
preserve_graph = preserve_graph_attrs
|
| 780 |
+
if (
|
| 781 |
+
getattr(
|
| 782 |
+
graph,
|
| 783 |
+
"__networkx_backend__",
|
| 784 |
+
getattr(graph, "__networkx_plugin__", "networkx"),
|
| 785 |
+
)
|
| 786 |
+
== "networkx"
|
| 787 |
+
):
|
| 788 |
+
bound.arguments[gname] = backend.convert_from_nx(
|
| 789 |
+
graph,
|
| 790 |
+
edge_attrs=edges,
|
| 791 |
+
node_attrs=nodes,
|
| 792 |
+
preserve_edge_attrs=preserve_edges,
|
| 793 |
+
preserve_node_attrs=preserve_nodes,
|
| 794 |
+
preserve_graph_attrs=preserve_graph,
|
| 795 |
+
name=self.name,
|
| 796 |
+
graph_name=gname,
|
| 797 |
+
)
|
| 798 |
+
bound_kwargs = bound.kwargs
|
| 799 |
+
del bound_kwargs["backend"]
|
| 800 |
+
return bound.args, bound_kwargs
|
| 801 |
+
|
| 802 |
+
def _convert_and_call(self, backend_name, args, kwargs, *, fallback_to_nx=False):
|
| 803 |
+
"""Call this dispatchable function with a backend, converting graphs if necessary."""
|
| 804 |
+
backend = _load_backend(backend_name)
|
| 805 |
+
if not self._can_backend_run(backend_name, *args, **kwargs):
|
| 806 |
+
if fallback_to_nx:
|
| 807 |
+
return self.orig_func(*args, **kwargs)
|
| 808 |
+
msg = f"'{self.name}' not implemented by {backend_name}"
|
| 809 |
+
if hasattr(backend, self.name):
|
| 810 |
+
msg += " with the given arguments"
|
| 811 |
+
raise RuntimeError(msg)
|
| 812 |
+
|
| 813 |
+
try:
|
| 814 |
+
converted_args, converted_kwargs = self._convert_arguments(
|
| 815 |
+
backend_name, args, kwargs
|
| 816 |
+
)
|
| 817 |
+
result = getattr(backend, self.name)(*converted_args, **converted_kwargs)
|
| 818 |
+
except (NotImplementedError, NetworkXNotImplemented) as exc:
|
| 819 |
+
if fallback_to_nx:
|
| 820 |
+
return self.orig_func(*args, **kwargs)
|
| 821 |
+
raise
|
| 822 |
+
|
| 823 |
+
return result
|
| 824 |
+
|
| 825 |
+
def _convert_and_call_for_tests(
|
| 826 |
+
self, backend_name, args, kwargs, *, fallback_to_nx=False
|
| 827 |
+
):
|
| 828 |
+
"""Call this dispatchable function with a backend; for use with testing."""
|
| 829 |
+
backend = _load_backend(backend_name)
|
| 830 |
+
if not self._can_backend_run(backend_name, *args, **kwargs):
|
| 831 |
+
if fallback_to_nx or not self.graphs:
|
| 832 |
+
return self.orig_func(*args, **kwargs)
|
| 833 |
+
|
| 834 |
+
import pytest
|
| 835 |
+
|
| 836 |
+
msg = f"'{self.name}' not implemented by {backend_name}"
|
| 837 |
+
if hasattr(backend, self.name):
|
| 838 |
+
msg += " with the given arguments"
|
| 839 |
+
pytest.xfail(msg)
|
| 840 |
+
|
| 841 |
+
try:
|
| 842 |
+
converted_args, converted_kwargs = self._convert_arguments(
|
| 843 |
+
backend_name, args, kwargs
|
| 844 |
+
)
|
| 845 |
+
result = getattr(backend, self.name)(*converted_args, **converted_kwargs)
|
| 846 |
+
except (NotImplementedError, NetworkXNotImplemented) as exc:
|
| 847 |
+
if fallback_to_nx:
|
| 848 |
+
return self.orig_func(*args, **kwargs)
|
| 849 |
+
import pytest
|
| 850 |
+
|
| 851 |
+
pytest.xfail(
|
| 852 |
+
exc.args[0] if exc.args else f"{self.name} raised {type(exc).__name__}"
|
| 853 |
+
)
|
| 854 |
+
|
| 855 |
+
if self.name in {
|
| 856 |
+
"edmonds_karp_core",
|
| 857 |
+
"barycenter",
|
| 858 |
+
"contracted_nodes",
|
| 859 |
+
"stochastic_graph",
|
| 860 |
+
"relabel_nodes",
|
| 861 |
+
}:
|
| 862 |
+
# Special-case algorithms that mutate input graphs
|
| 863 |
+
bound = self.__signature__.bind(*converted_args, **converted_kwargs)
|
| 864 |
+
bound.apply_defaults()
|
| 865 |
+
bound2 = self.__signature__.bind(*args, **kwargs)
|
| 866 |
+
bound2.apply_defaults()
|
| 867 |
+
if self.name == "edmonds_karp_core":
|
| 868 |
+
R1 = backend.convert_to_nx(bound.arguments["R"])
|
| 869 |
+
R2 = bound2.arguments["R"]
|
| 870 |
+
for k, v in R1.edges.items():
|
| 871 |
+
R2.edges[k]["flow"] = v["flow"]
|
| 872 |
+
elif self.name == "barycenter" and bound.arguments["attr"] is not None:
|
| 873 |
+
G1 = backend.convert_to_nx(bound.arguments["G"])
|
| 874 |
+
G2 = bound2.arguments["G"]
|
| 875 |
+
attr = bound.arguments["attr"]
|
| 876 |
+
for k, v in G1.nodes.items():
|
| 877 |
+
G2.nodes[k][attr] = v[attr]
|
| 878 |
+
elif self.name == "contracted_nodes" and not bound.arguments["copy"]:
|
| 879 |
+
# Edges and nodes changed; node "contraction" and edge "weight" attrs
|
| 880 |
+
G1 = backend.convert_to_nx(bound.arguments["G"])
|
| 881 |
+
G2 = bound2.arguments["G"]
|
| 882 |
+
G2.__dict__.update(G1.__dict__)
|
| 883 |
+
elif self.name == "stochastic_graph" and not bound.arguments["copy"]:
|
| 884 |
+
G1 = backend.convert_to_nx(bound.arguments["G"])
|
| 885 |
+
G2 = bound2.arguments["G"]
|
| 886 |
+
for k, v in G1.edges.items():
|
| 887 |
+
G2.edges[k]["weight"] = v["weight"]
|
| 888 |
+
elif self.name == "relabel_nodes" and not bound.arguments["copy"]:
|
| 889 |
+
G1 = backend.convert_to_nx(bound.arguments["G"])
|
| 890 |
+
G2 = bound2.arguments["G"]
|
| 891 |
+
if G1 is G2:
|
| 892 |
+
return G2
|
| 893 |
+
G2._node.clear()
|
| 894 |
+
G2._node.update(G1._node)
|
| 895 |
+
G2._adj.clear()
|
| 896 |
+
G2._adj.update(G1._adj)
|
| 897 |
+
if hasattr(G1, "_pred") and hasattr(G2, "_pred"):
|
| 898 |
+
G2._pred.clear()
|
| 899 |
+
G2._pred.update(G1._pred)
|
| 900 |
+
if hasattr(G1, "_succ") and hasattr(G2, "_succ"):
|
| 901 |
+
G2._succ.clear()
|
| 902 |
+
G2._succ.update(G1._succ)
|
| 903 |
+
return G2
|
| 904 |
+
|
| 905 |
+
return backend.convert_to_nx(result, name=self.name)
|
| 906 |
+
|
| 907 |
+
def _make_doc(self):
|
| 908 |
+
if not self.backends:
|
| 909 |
+
return self._orig_doc
|
| 910 |
+
lines = [
|
| 911 |
+
"Backends",
|
| 912 |
+
"--------",
|
| 913 |
+
]
|
| 914 |
+
for backend in sorted(self.backends):
|
| 915 |
+
info = backend_info[backend]
|
| 916 |
+
if "short_summary" in info:
|
| 917 |
+
lines.append(f"{backend} : {info['short_summary']}")
|
| 918 |
+
else:
|
| 919 |
+
lines.append(backend)
|
| 920 |
+
if "functions" not in info or self.name not in info["functions"]:
|
| 921 |
+
lines.append("")
|
| 922 |
+
continue
|
| 923 |
+
|
| 924 |
+
func_info = info["functions"][self.name]
|
| 925 |
+
if "extra_docstring" in func_info:
|
| 926 |
+
lines.extend(
|
| 927 |
+
f" {line}" if line else line
|
| 928 |
+
for line in func_info["extra_docstring"].split("\n")
|
| 929 |
+
)
|
| 930 |
+
add_gap = True
|
| 931 |
+
else:
|
| 932 |
+
add_gap = False
|
| 933 |
+
if "extra_parameters" in func_info:
|
| 934 |
+
if add_gap:
|
| 935 |
+
lines.append("")
|
| 936 |
+
lines.append(" Extra parameters:")
|
| 937 |
+
extra_parameters = func_info["extra_parameters"]
|
| 938 |
+
for param in sorted(extra_parameters):
|
| 939 |
+
lines.append(f" {param}")
|
| 940 |
+
if desc := extra_parameters[param]:
|
| 941 |
+
lines.append(f" {desc}")
|
| 942 |
+
lines.append("")
|
| 943 |
+
else:
|
| 944 |
+
lines.append("")
|
| 945 |
+
|
| 946 |
+
lines.pop() # Remove last empty line
|
| 947 |
+
to_add = "\n ".join(lines)
|
| 948 |
+
return f"{self._orig_doc.rstrip()}\n\n {to_add}"
|
| 949 |
+
|
| 950 |
+
def __reduce__(self):
|
| 951 |
+
"""Allow this object to be serialized with pickle.
|
| 952 |
+
|
| 953 |
+
This uses the global registry `_registered_algorithms` to deserialize.
|
| 954 |
+
"""
|
| 955 |
+
return _restore_dispatch, (self.name,)
|
| 956 |
+
|
| 957 |
+
|
| 958 |
+
def _restore_dispatch(name):
|
| 959 |
+
return _registered_algorithms[name]
|
| 960 |
+
|
| 961 |
+
|
| 962 |
+
if os.environ.get("_NETWORKX_BUILDING_DOCS_"):
|
| 963 |
+
# When building docs with Sphinx, use the original function with the
|
| 964 |
+
# dispatched __doc__, b/c Sphinx renders normal Python functions better.
|
| 965 |
+
# This doesn't show e.g. `*, backend=None, **backend_kwargs` in the
|
| 966 |
+
# signatures, which is probably okay. It does allow the docstring to be
|
| 967 |
+
# updated based on the installed backends.
|
| 968 |
+
_orig_dispatch = _dispatch
|
| 969 |
+
|
| 970 |
+
def _dispatch(func=None, **kwargs): # type: ignore[no-redef]
|
| 971 |
+
if func is None:
|
| 972 |
+
return partial(_dispatch, **kwargs)
|
| 973 |
+
dispatched_func = _orig_dispatch(func, **kwargs)
|
| 974 |
+
func.__doc__ = dispatched_func.__doc__
|
| 975 |
+
return func
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/mapped_queue.py
ADDED
|
@@ -0,0 +1,298 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Priority queue class with updatable priorities.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import heapq
|
| 5 |
+
|
| 6 |
+
__all__ = ["MappedQueue"]
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class _HeapElement:
|
| 10 |
+
"""This proxy class separates the heap element from its priority.
|
| 11 |
+
|
| 12 |
+
The idea is that using a 2-tuple (priority, element) works
|
| 13 |
+
for sorting, but not for dict lookup because priorities are
|
| 14 |
+
often floating point values so round-off can mess up equality.
|
| 15 |
+
|
| 16 |
+
So, we need inequalities to look at the priority (for sorting)
|
| 17 |
+
and equality (and hash) to look at the element to enable
|
| 18 |
+
updates to the priority.
|
| 19 |
+
|
| 20 |
+
Unfortunately, this class can be tricky to work with if you forget that
|
| 21 |
+
`__lt__` compares the priority while `__eq__` compares the element.
|
| 22 |
+
In `greedy_modularity_communities()` the following code is
|
| 23 |
+
used to check that two _HeapElements differ in either element or priority:
|
| 24 |
+
|
| 25 |
+
if d_oldmax != row_max or d_oldmax.priority != row_max.priority:
|
| 26 |
+
|
| 27 |
+
If the priorities are the same, this implementation uses the element
|
| 28 |
+
as a tiebreaker. This provides compatibility with older systems that
|
| 29 |
+
use tuples to combine priority and elements.
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
__slots__ = ["priority", "element", "_hash"]
|
| 33 |
+
|
| 34 |
+
def __init__(self, priority, element):
|
| 35 |
+
self.priority = priority
|
| 36 |
+
self.element = element
|
| 37 |
+
self._hash = hash(element)
|
| 38 |
+
|
| 39 |
+
def __lt__(self, other):
|
| 40 |
+
try:
|
| 41 |
+
other_priority = other.priority
|
| 42 |
+
except AttributeError:
|
| 43 |
+
return self.priority < other
|
| 44 |
+
# assume comparing to another _HeapElement
|
| 45 |
+
if self.priority == other_priority:
|
| 46 |
+
try:
|
| 47 |
+
return self.element < other.element
|
| 48 |
+
except TypeError as err:
|
| 49 |
+
raise TypeError(
|
| 50 |
+
"Consider using a tuple, with a priority value that can be compared."
|
| 51 |
+
)
|
| 52 |
+
return self.priority < other_priority
|
| 53 |
+
|
| 54 |
+
def __gt__(self, other):
|
| 55 |
+
try:
|
| 56 |
+
other_priority = other.priority
|
| 57 |
+
except AttributeError:
|
| 58 |
+
return self.priority > other
|
| 59 |
+
# assume comparing to another _HeapElement
|
| 60 |
+
if self.priority == other_priority:
|
| 61 |
+
try:
|
| 62 |
+
return self.element > other.element
|
| 63 |
+
except TypeError as err:
|
| 64 |
+
raise TypeError(
|
| 65 |
+
"Consider using a tuple, with a priority value that can be compared."
|
| 66 |
+
)
|
| 67 |
+
return self.priority > other_priority
|
| 68 |
+
|
| 69 |
+
def __eq__(self, other):
|
| 70 |
+
try:
|
| 71 |
+
return self.element == other.element
|
| 72 |
+
except AttributeError:
|
| 73 |
+
return self.element == other
|
| 74 |
+
|
| 75 |
+
def __hash__(self):
|
| 76 |
+
return self._hash
|
| 77 |
+
|
| 78 |
+
def __getitem__(self, indx):
|
| 79 |
+
return self.priority if indx == 0 else self.element[indx - 1]
|
| 80 |
+
|
| 81 |
+
def __iter__(self):
|
| 82 |
+
yield self.priority
|
| 83 |
+
try:
|
| 84 |
+
yield from self.element
|
| 85 |
+
except TypeError:
|
| 86 |
+
yield self.element
|
| 87 |
+
|
| 88 |
+
def __repr__(self):
|
| 89 |
+
return f"_HeapElement({self.priority}, {self.element})"
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
class MappedQueue:
|
| 93 |
+
"""The MappedQueue class implements a min-heap with removal and update-priority.
|
| 94 |
+
|
| 95 |
+
The min heap uses heapq as well as custom written _siftup and _siftdown
|
| 96 |
+
methods to allow the heap positions to be tracked by an additional dict
|
| 97 |
+
keyed by element to position. The smallest element can be popped in O(1) time,
|
| 98 |
+
new elements can be pushed in O(log n) time, and any element can be removed
|
| 99 |
+
or updated in O(log n) time. The queue cannot contain duplicate elements
|
| 100 |
+
and an attempt to push an element already in the queue will have no effect.
|
| 101 |
+
|
| 102 |
+
MappedQueue complements the heapq package from the python standard
|
| 103 |
+
library. While MappedQueue is designed for maximum compatibility with
|
| 104 |
+
heapq, it adds element removal, lookup, and priority update.
|
| 105 |
+
|
| 106 |
+
Parameters
|
| 107 |
+
----------
|
| 108 |
+
data : dict or iterable
|
| 109 |
+
|
| 110 |
+
Examples
|
| 111 |
+
--------
|
| 112 |
+
|
| 113 |
+
A `MappedQueue` can be created empty, or optionally, given a dictionary
|
| 114 |
+
of initial elements and priorities. The methods `push`, `pop`,
|
| 115 |
+
`remove`, and `update` operate on the queue.
|
| 116 |
+
|
| 117 |
+
>>> colors_nm = {'red':665, 'blue': 470, 'green': 550}
|
| 118 |
+
>>> q = MappedQueue(colors_nm)
|
| 119 |
+
>>> q.remove('red')
|
| 120 |
+
>>> q.update('green', 'violet', 400)
|
| 121 |
+
>>> q.push('indigo', 425)
|
| 122 |
+
True
|
| 123 |
+
>>> [q.pop().element for i in range(len(q.heap))]
|
| 124 |
+
['violet', 'indigo', 'blue']
|
| 125 |
+
|
| 126 |
+
A `MappedQueue` can also be initialized with a list or other iterable. The priority is assumed
|
| 127 |
+
to be the sort order of the items in the list.
|
| 128 |
+
|
| 129 |
+
>>> q = MappedQueue([916, 50, 4609, 493, 237])
|
| 130 |
+
>>> q.remove(493)
|
| 131 |
+
>>> q.update(237, 1117)
|
| 132 |
+
>>> [q.pop() for i in range(len(q.heap))]
|
| 133 |
+
[50, 916, 1117, 4609]
|
| 134 |
+
|
| 135 |
+
An exception is raised if the elements are not comparable.
|
| 136 |
+
|
| 137 |
+
>>> q = MappedQueue([100, 'a'])
|
| 138 |
+
Traceback (most recent call last):
|
| 139 |
+
...
|
| 140 |
+
TypeError: '<' not supported between instances of 'int' and 'str'
|
| 141 |
+
|
| 142 |
+
To avoid the exception, use a dictionary to assign priorities to the elements.
|
| 143 |
+
|
| 144 |
+
>>> q = MappedQueue({100: 0, 'a': 1 })
|
| 145 |
+
|
| 146 |
+
References
|
| 147 |
+
----------
|
| 148 |
+
.. [1] Cormen, T. H., Leiserson, C. E., Rivest, R. L., & Stein, C. (2001).
|
| 149 |
+
Introduction to algorithms second edition.
|
| 150 |
+
.. [2] Knuth, D. E. (1997). The art of computer programming (Vol. 3).
|
| 151 |
+
Pearson Education.
|
| 152 |
+
"""
|
| 153 |
+
|
| 154 |
+
def __init__(self, data=None):
|
| 155 |
+
"""Priority queue class with updatable priorities."""
|
| 156 |
+
if data is None:
|
| 157 |
+
self.heap = []
|
| 158 |
+
elif isinstance(data, dict):
|
| 159 |
+
self.heap = [_HeapElement(v, k) for k, v in data.items()]
|
| 160 |
+
else:
|
| 161 |
+
self.heap = list(data)
|
| 162 |
+
self.position = {}
|
| 163 |
+
self._heapify()
|
| 164 |
+
|
| 165 |
+
def _heapify(self):
|
| 166 |
+
"""Restore heap invariant and recalculate map."""
|
| 167 |
+
heapq.heapify(self.heap)
|
| 168 |
+
self.position = {elt: pos for pos, elt in enumerate(self.heap)}
|
| 169 |
+
if len(self.heap) != len(self.position):
|
| 170 |
+
raise AssertionError("Heap contains duplicate elements")
|
| 171 |
+
|
| 172 |
+
def __len__(self):
|
| 173 |
+
return len(self.heap)
|
| 174 |
+
|
| 175 |
+
def push(self, elt, priority=None):
|
| 176 |
+
"""Add an element to the queue."""
|
| 177 |
+
if priority is not None:
|
| 178 |
+
elt = _HeapElement(priority, elt)
|
| 179 |
+
# If element is already in queue, do nothing
|
| 180 |
+
if elt in self.position:
|
| 181 |
+
return False
|
| 182 |
+
# Add element to heap and dict
|
| 183 |
+
pos = len(self.heap)
|
| 184 |
+
self.heap.append(elt)
|
| 185 |
+
self.position[elt] = pos
|
| 186 |
+
# Restore invariant by sifting down
|
| 187 |
+
self._siftdown(0, pos)
|
| 188 |
+
return True
|
| 189 |
+
|
| 190 |
+
def pop(self):
|
| 191 |
+
"""Remove and return the smallest element in the queue."""
|
| 192 |
+
# Remove smallest element
|
| 193 |
+
elt = self.heap[0]
|
| 194 |
+
del self.position[elt]
|
| 195 |
+
# If elt is last item, remove and return
|
| 196 |
+
if len(self.heap) == 1:
|
| 197 |
+
self.heap.pop()
|
| 198 |
+
return elt
|
| 199 |
+
# Replace root with last element
|
| 200 |
+
last = self.heap.pop()
|
| 201 |
+
self.heap[0] = last
|
| 202 |
+
self.position[last] = 0
|
| 203 |
+
# Restore invariant by sifting up
|
| 204 |
+
self._siftup(0)
|
| 205 |
+
# Return smallest element
|
| 206 |
+
return elt
|
| 207 |
+
|
| 208 |
+
def update(self, elt, new, priority=None):
|
| 209 |
+
"""Replace an element in the queue with a new one."""
|
| 210 |
+
if priority is not None:
|
| 211 |
+
new = _HeapElement(priority, new)
|
| 212 |
+
# Replace
|
| 213 |
+
pos = self.position[elt]
|
| 214 |
+
self.heap[pos] = new
|
| 215 |
+
del self.position[elt]
|
| 216 |
+
self.position[new] = pos
|
| 217 |
+
# Restore invariant by sifting up
|
| 218 |
+
self._siftup(pos)
|
| 219 |
+
|
| 220 |
+
def remove(self, elt):
|
| 221 |
+
"""Remove an element from the queue."""
|
| 222 |
+
# Find and remove element
|
| 223 |
+
try:
|
| 224 |
+
pos = self.position[elt]
|
| 225 |
+
del self.position[elt]
|
| 226 |
+
except KeyError:
|
| 227 |
+
# Not in queue
|
| 228 |
+
raise
|
| 229 |
+
# If elt is last item, remove and return
|
| 230 |
+
if pos == len(self.heap) - 1:
|
| 231 |
+
self.heap.pop()
|
| 232 |
+
return
|
| 233 |
+
# Replace elt with last element
|
| 234 |
+
last = self.heap.pop()
|
| 235 |
+
self.heap[pos] = last
|
| 236 |
+
self.position[last] = pos
|
| 237 |
+
# Restore invariant by sifting up
|
| 238 |
+
self._siftup(pos)
|
| 239 |
+
|
| 240 |
+
def _siftup(self, pos):
|
| 241 |
+
"""Move smaller child up until hitting a leaf.
|
| 242 |
+
|
| 243 |
+
Built to mimic code for heapq._siftup
|
| 244 |
+
only updating position dict too.
|
| 245 |
+
"""
|
| 246 |
+
heap, position = self.heap, self.position
|
| 247 |
+
end_pos = len(heap)
|
| 248 |
+
startpos = pos
|
| 249 |
+
newitem = heap[pos]
|
| 250 |
+
# Shift up the smaller child until hitting a leaf
|
| 251 |
+
child_pos = (pos << 1) + 1 # start with leftmost child position
|
| 252 |
+
while child_pos < end_pos:
|
| 253 |
+
# Set child_pos to index of smaller child.
|
| 254 |
+
child = heap[child_pos]
|
| 255 |
+
right_pos = child_pos + 1
|
| 256 |
+
if right_pos < end_pos:
|
| 257 |
+
right = heap[right_pos]
|
| 258 |
+
if not child < right:
|
| 259 |
+
child = right
|
| 260 |
+
child_pos = right_pos
|
| 261 |
+
# Move the smaller child up.
|
| 262 |
+
heap[pos] = child
|
| 263 |
+
position[child] = pos
|
| 264 |
+
pos = child_pos
|
| 265 |
+
child_pos = (pos << 1) + 1
|
| 266 |
+
# pos is a leaf position. Put newitem there, and bubble it up
|
| 267 |
+
# to its final resting place (by sifting its parents down).
|
| 268 |
+
while pos > 0:
|
| 269 |
+
parent_pos = (pos - 1) >> 1
|
| 270 |
+
parent = heap[parent_pos]
|
| 271 |
+
if not newitem < parent:
|
| 272 |
+
break
|
| 273 |
+
heap[pos] = parent
|
| 274 |
+
position[parent] = pos
|
| 275 |
+
pos = parent_pos
|
| 276 |
+
heap[pos] = newitem
|
| 277 |
+
position[newitem] = pos
|
| 278 |
+
|
| 279 |
+
def _siftdown(self, start_pos, pos):
|
| 280 |
+
"""Restore invariant. keep swapping with parent until smaller.
|
| 281 |
+
|
| 282 |
+
Built to mimic code for heapq._siftdown
|
| 283 |
+
only updating position dict too.
|
| 284 |
+
"""
|
| 285 |
+
heap, position = self.heap, self.position
|
| 286 |
+
newitem = heap[pos]
|
| 287 |
+
# Follow the path to the root, moving parents down until finding a place
|
| 288 |
+
# newitem fits.
|
| 289 |
+
while pos > start_pos:
|
| 290 |
+
parent_pos = (pos - 1) >> 1
|
| 291 |
+
parent = heap[parent_pos]
|
| 292 |
+
if not newitem < parent:
|
| 293 |
+
break
|
| 294 |
+
heap[pos] = parent
|
| 295 |
+
position[parent] = pos
|
| 296 |
+
pos = parent_pos
|
| 297 |
+
heap[pos] = newitem
|
| 298 |
+
position[newitem] = pos
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/tests/test_decorators.py
ADDED
|
@@ -0,0 +1,491 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import pathlib
|
| 3 |
+
import random
|
| 4 |
+
import tempfile
|
| 5 |
+
|
| 6 |
+
import pytest
|
| 7 |
+
|
| 8 |
+
import networkx as nx
|
| 9 |
+
from networkx.utils.decorators import (
|
| 10 |
+
argmap,
|
| 11 |
+
not_implemented_for,
|
| 12 |
+
np_random_state,
|
| 13 |
+
open_file,
|
| 14 |
+
py_random_state,
|
| 15 |
+
)
|
| 16 |
+
from networkx.utils.misc import PythonRandomInterface
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def test_not_implemented_decorator():
|
| 20 |
+
@not_implemented_for("directed")
|
| 21 |
+
def test_d(G):
|
| 22 |
+
pass
|
| 23 |
+
|
| 24 |
+
test_d(nx.Graph())
|
| 25 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 26 |
+
test_d(nx.DiGraph())
|
| 27 |
+
|
| 28 |
+
@not_implemented_for("undirected")
|
| 29 |
+
def test_u(G):
|
| 30 |
+
pass
|
| 31 |
+
|
| 32 |
+
test_u(nx.DiGraph())
|
| 33 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 34 |
+
test_u(nx.Graph())
|
| 35 |
+
|
| 36 |
+
@not_implemented_for("multigraph")
|
| 37 |
+
def test_m(G):
|
| 38 |
+
pass
|
| 39 |
+
|
| 40 |
+
test_m(nx.Graph())
|
| 41 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 42 |
+
test_m(nx.MultiGraph())
|
| 43 |
+
|
| 44 |
+
@not_implemented_for("graph")
|
| 45 |
+
def test_g(G):
|
| 46 |
+
pass
|
| 47 |
+
|
| 48 |
+
test_g(nx.MultiGraph())
|
| 49 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 50 |
+
test_g(nx.Graph())
|
| 51 |
+
|
| 52 |
+
# not MultiDiGraph (multiple arguments => AND)
|
| 53 |
+
@not_implemented_for("directed", "multigraph")
|
| 54 |
+
def test_not_md(G):
|
| 55 |
+
pass
|
| 56 |
+
|
| 57 |
+
test_not_md(nx.Graph())
|
| 58 |
+
test_not_md(nx.DiGraph())
|
| 59 |
+
test_not_md(nx.MultiGraph())
|
| 60 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 61 |
+
test_not_md(nx.MultiDiGraph())
|
| 62 |
+
|
| 63 |
+
# Graph only (multiple decorators => OR)
|
| 64 |
+
@not_implemented_for("directed")
|
| 65 |
+
@not_implemented_for("multigraph")
|
| 66 |
+
def test_graph_only(G):
|
| 67 |
+
pass
|
| 68 |
+
|
| 69 |
+
test_graph_only(nx.Graph())
|
| 70 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 71 |
+
test_graph_only(nx.DiGraph())
|
| 72 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 73 |
+
test_graph_only(nx.MultiGraph())
|
| 74 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 75 |
+
test_graph_only(nx.MultiDiGraph())
|
| 76 |
+
|
| 77 |
+
with pytest.raises(ValueError):
|
| 78 |
+
not_implemented_for("directed", "undirected")
|
| 79 |
+
|
| 80 |
+
with pytest.raises(ValueError):
|
| 81 |
+
not_implemented_for("multigraph", "graph")
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def test_not_implemented_decorator_key():
|
| 85 |
+
with pytest.raises(KeyError):
|
| 86 |
+
|
| 87 |
+
@not_implemented_for("foo")
|
| 88 |
+
def test1(G):
|
| 89 |
+
pass
|
| 90 |
+
|
| 91 |
+
test1(nx.Graph())
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def test_not_implemented_decorator_raise():
|
| 95 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 96 |
+
|
| 97 |
+
@not_implemented_for("graph")
|
| 98 |
+
def test1(G):
|
| 99 |
+
pass
|
| 100 |
+
|
| 101 |
+
test1(nx.Graph())
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
class TestOpenFileDecorator:
|
| 105 |
+
def setup_method(self):
|
| 106 |
+
self.text = ["Blah... ", "BLAH ", "BLAH!!!!"]
|
| 107 |
+
self.fobj = tempfile.NamedTemporaryFile("wb+", delete=False)
|
| 108 |
+
self.name = self.fobj.name
|
| 109 |
+
|
| 110 |
+
def teardown_method(self):
|
| 111 |
+
self.fobj.close()
|
| 112 |
+
os.unlink(self.name)
|
| 113 |
+
|
| 114 |
+
def write(self, path):
|
| 115 |
+
for text in self.text:
|
| 116 |
+
path.write(text.encode("ascii"))
|
| 117 |
+
|
| 118 |
+
@open_file(1, "r")
|
| 119 |
+
def read(self, path):
|
| 120 |
+
return path.readlines()[0]
|
| 121 |
+
|
| 122 |
+
@staticmethod
|
| 123 |
+
@open_file(0, "wb")
|
| 124 |
+
def writer_arg0(path):
|
| 125 |
+
path.write(b"demo")
|
| 126 |
+
|
| 127 |
+
@open_file(1, "wb+")
|
| 128 |
+
def writer_arg1(self, path):
|
| 129 |
+
self.write(path)
|
| 130 |
+
|
| 131 |
+
@open_file(2, "wb")
|
| 132 |
+
def writer_arg2default(self, x, path=None):
|
| 133 |
+
if path is None:
|
| 134 |
+
with tempfile.NamedTemporaryFile("wb+") as fh:
|
| 135 |
+
self.write(fh)
|
| 136 |
+
else:
|
| 137 |
+
self.write(path)
|
| 138 |
+
|
| 139 |
+
@open_file(4, "wb")
|
| 140 |
+
def writer_arg4default(self, x, y, other="hello", path=None, **kwargs):
|
| 141 |
+
if path is None:
|
| 142 |
+
with tempfile.NamedTemporaryFile("wb+") as fh:
|
| 143 |
+
self.write(fh)
|
| 144 |
+
else:
|
| 145 |
+
self.write(path)
|
| 146 |
+
|
| 147 |
+
@open_file("path", "wb")
|
| 148 |
+
def writer_kwarg(self, **kwargs):
|
| 149 |
+
path = kwargs.get("path", None)
|
| 150 |
+
if path is None:
|
| 151 |
+
with tempfile.NamedTemporaryFile("wb+") as fh:
|
| 152 |
+
self.write(fh)
|
| 153 |
+
else:
|
| 154 |
+
self.write(path)
|
| 155 |
+
|
| 156 |
+
def test_writer_arg0_str(self):
|
| 157 |
+
self.writer_arg0(self.name)
|
| 158 |
+
|
| 159 |
+
def test_writer_arg0_fobj(self):
|
| 160 |
+
self.writer_arg0(self.fobj)
|
| 161 |
+
|
| 162 |
+
def test_writer_arg0_pathlib(self):
|
| 163 |
+
self.writer_arg0(pathlib.Path(self.name))
|
| 164 |
+
|
| 165 |
+
def test_writer_arg1_str(self):
|
| 166 |
+
self.writer_arg1(self.name)
|
| 167 |
+
assert self.read(self.name) == "".join(self.text)
|
| 168 |
+
|
| 169 |
+
def test_writer_arg1_fobj(self):
|
| 170 |
+
self.writer_arg1(self.fobj)
|
| 171 |
+
assert not self.fobj.closed
|
| 172 |
+
self.fobj.close()
|
| 173 |
+
assert self.read(self.name) == "".join(self.text)
|
| 174 |
+
|
| 175 |
+
def test_writer_arg2default_str(self):
|
| 176 |
+
self.writer_arg2default(0, path=None)
|
| 177 |
+
self.writer_arg2default(0, path=self.name)
|
| 178 |
+
assert self.read(self.name) == "".join(self.text)
|
| 179 |
+
|
| 180 |
+
def test_writer_arg2default_fobj(self):
|
| 181 |
+
self.writer_arg2default(0, path=self.fobj)
|
| 182 |
+
assert not self.fobj.closed
|
| 183 |
+
self.fobj.close()
|
| 184 |
+
assert self.read(self.name) == "".join(self.text)
|
| 185 |
+
|
| 186 |
+
def test_writer_arg2default_fobj_path_none(self):
|
| 187 |
+
self.writer_arg2default(0, path=None)
|
| 188 |
+
|
| 189 |
+
def test_writer_arg4default_fobj(self):
|
| 190 |
+
self.writer_arg4default(0, 1, dog="dog", other="other")
|
| 191 |
+
self.writer_arg4default(0, 1, dog="dog", other="other", path=self.name)
|
| 192 |
+
assert self.read(self.name) == "".join(self.text)
|
| 193 |
+
|
| 194 |
+
def test_writer_kwarg_str(self):
|
| 195 |
+
self.writer_kwarg(path=self.name)
|
| 196 |
+
assert self.read(self.name) == "".join(self.text)
|
| 197 |
+
|
| 198 |
+
def test_writer_kwarg_fobj(self):
|
| 199 |
+
self.writer_kwarg(path=self.fobj)
|
| 200 |
+
self.fobj.close()
|
| 201 |
+
assert self.read(self.name) == "".join(self.text)
|
| 202 |
+
|
| 203 |
+
def test_writer_kwarg_path_none(self):
|
| 204 |
+
self.writer_kwarg(path=None)
|
| 205 |
+
|
| 206 |
+
|
| 207 |
+
class TestRandomState:
|
| 208 |
+
@classmethod
|
| 209 |
+
def setup_class(cls):
|
| 210 |
+
global np
|
| 211 |
+
np = pytest.importorskip("numpy")
|
| 212 |
+
|
| 213 |
+
@np_random_state(1)
|
| 214 |
+
def instantiate_np_random_state(self, random_state):
|
| 215 |
+
assert isinstance(random_state, np.random.RandomState)
|
| 216 |
+
return random_state.random_sample()
|
| 217 |
+
|
| 218 |
+
@py_random_state(1)
|
| 219 |
+
def instantiate_py_random_state(self, random_state):
|
| 220 |
+
assert isinstance(random_state, (random.Random, PythonRandomInterface))
|
| 221 |
+
return random_state.random()
|
| 222 |
+
|
| 223 |
+
def test_random_state_None(self):
|
| 224 |
+
np.random.seed(42)
|
| 225 |
+
rv = np.random.random_sample()
|
| 226 |
+
np.random.seed(42)
|
| 227 |
+
assert rv == self.instantiate_np_random_state(None)
|
| 228 |
+
|
| 229 |
+
random.seed(42)
|
| 230 |
+
rv = random.random()
|
| 231 |
+
random.seed(42)
|
| 232 |
+
assert rv == self.instantiate_py_random_state(None)
|
| 233 |
+
|
| 234 |
+
def test_random_state_np_random(self):
|
| 235 |
+
np.random.seed(42)
|
| 236 |
+
rv = np.random.random_sample()
|
| 237 |
+
np.random.seed(42)
|
| 238 |
+
assert rv == self.instantiate_np_random_state(np.random)
|
| 239 |
+
np.random.seed(42)
|
| 240 |
+
assert rv == self.instantiate_py_random_state(np.random)
|
| 241 |
+
|
| 242 |
+
def test_random_state_int(self):
|
| 243 |
+
np.random.seed(42)
|
| 244 |
+
np_rv = np.random.random_sample()
|
| 245 |
+
random.seed(42)
|
| 246 |
+
py_rv = random.random()
|
| 247 |
+
|
| 248 |
+
np.random.seed(42)
|
| 249 |
+
seed = 1
|
| 250 |
+
rval = self.instantiate_np_random_state(seed)
|
| 251 |
+
rval_expected = np.random.RandomState(seed).rand()
|
| 252 |
+
assert rval, rval_expected
|
| 253 |
+
# test that global seed wasn't changed in function
|
| 254 |
+
assert np_rv == np.random.random_sample()
|
| 255 |
+
|
| 256 |
+
random.seed(42)
|
| 257 |
+
rval = self.instantiate_py_random_state(seed)
|
| 258 |
+
rval_expected = random.Random(seed).random()
|
| 259 |
+
assert rval, rval_expected
|
| 260 |
+
# test that global seed wasn't changed in function
|
| 261 |
+
assert py_rv == random.random()
|
| 262 |
+
|
| 263 |
+
def test_random_state_np_random_RandomState(self):
|
| 264 |
+
np.random.seed(42)
|
| 265 |
+
np_rv = np.random.random_sample()
|
| 266 |
+
|
| 267 |
+
np.random.seed(42)
|
| 268 |
+
seed = 1
|
| 269 |
+
rng = np.random.RandomState(seed)
|
| 270 |
+
rval = self.instantiate_np_random_state(seed)
|
| 271 |
+
rval_expected = np.random.RandomState(seed).rand()
|
| 272 |
+
assert rval, rval_expected
|
| 273 |
+
|
| 274 |
+
rval = self.instantiate_py_random_state(seed)
|
| 275 |
+
rval_expected = np.random.RandomState(seed).rand()
|
| 276 |
+
assert rval, rval_expected
|
| 277 |
+
# test that global seed wasn't changed in function
|
| 278 |
+
assert np_rv == np.random.random_sample()
|
| 279 |
+
|
| 280 |
+
def test_random_state_py_random(self):
|
| 281 |
+
seed = 1
|
| 282 |
+
rng = random.Random(seed)
|
| 283 |
+
rv = self.instantiate_py_random_state(rng)
|
| 284 |
+
assert rv, random.Random(seed).random()
|
| 285 |
+
|
| 286 |
+
pytest.raises(ValueError, self.instantiate_np_random_state, rng)
|
| 287 |
+
|
| 288 |
+
|
| 289 |
+
def test_random_state_string_arg_index():
|
| 290 |
+
with pytest.raises(nx.NetworkXError):
|
| 291 |
+
|
| 292 |
+
@np_random_state("a")
|
| 293 |
+
def make_random_state(rs):
|
| 294 |
+
pass
|
| 295 |
+
|
| 296 |
+
rstate = make_random_state(1)
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
def test_py_random_state_string_arg_index():
|
| 300 |
+
with pytest.raises(nx.NetworkXError):
|
| 301 |
+
|
| 302 |
+
@py_random_state("a")
|
| 303 |
+
def make_random_state(rs):
|
| 304 |
+
pass
|
| 305 |
+
|
| 306 |
+
rstate = make_random_state(1)
|
| 307 |
+
|
| 308 |
+
|
| 309 |
+
def test_random_state_invalid_arg_index():
|
| 310 |
+
with pytest.raises(nx.NetworkXError):
|
| 311 |
+
|
| 312 |
+
@np_random_state(2)
|
| 313 |
+
def make_random_state(rs):
|
| 314 |
+
pass
|
| 315 |
+
|
| 316 |
+
rstate = make_random_state(1)
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
def test_py_random_state_invalid_arg_index():
|
| 320 |
+
with pytest.raises(nx.NetworkXError):
|
| 321 |
+
|
| 322 |
+
@py_random_state(2)
|
| 323 |
+
def make_random_state(rs):
|
| 324 |
+
pass
|
| 325 |
+
|
| 326 |
+
rstate = make_random_state(1)
|
| 327 |
+
|
| 328 |
+
|
| 329 |
+
class TestArgmap:
|
| 330 |
+
class ArgmapError(RuntimeError):
|
| 331 |
+
pass
|
| 332 |
+
|
| 333 |
+
def test_trivial_function(self):
|
| 334 |
+
def do_not_call(x):
|
| 335 |
+
raise ArgmapError("do not call this function")
|
| 336 |
+
|
| 337 |
+
@argmap(do_not_call)
|
| 338 |
+
def trivial_argmap():
|
| 339 |
+
return 1
|
| 340 |
+
|
| 341 |
+
assert trivial_argmap() == 1
|
| 342 |
+
|
| 343 |
+
def test_trivial_iterator(self):
|
| 344 |
+
def do_not_call(x):
|
| 345 |
+
raise ArgmapError("do not call this function")
|
| 346 |
+
|
| 347 |
+
@argmap(do_not_call)
|
| 348 |
+
def trivial_argmap():
|
| 349 |
+
yield from (1, 2, 3)
|
| 350 |
+
|
| 351 |
+
assert tuple(trivial_argmap()) == (1, 2, 3)
|
| 352 |
+
|
| 353 |
+
def test_contextmanager(self):
|
| 354 |
+
container = []
|
| 355 |
+
|
| 356 |
+
def contextmanager(x):
|
| 357 |
+
nonlocal container
|
| 358 |
+
return x, lambda: container.append(x)
|
| 359 |
+
|
| 360 |
+
@argmap(contextmanager, 0, 1, 2, try_finally=True)
|
| 361 |
+
def foo(x, y, z):
|
| 362 |
+
return x, y, z
|
| 363 |
+
|
| 364 |
+
x, y, z = foo("a", "b", "c")
|
| 365 |
+
|
| 366 |
+
# context exits are called in reverse
|
| 367 |
+
assert container == ["c", "b", "a"]
|
| 368 |
+
|
| 369 |
+
def test_tryfinally_generator(self):
|
| 370 |
+
container = []
|
| 371 |
+
|
| 372 |
+
def singleton(x):
|
| 373 |
+
return (x,)
|
| 374 |
+
|
| 375 |
+
with pytest.raises(nx.NetworkXError):
|
| 376 |
+
|
| 377 |
+
@argmap(singleton, 0, 1, 2, try_finally=True)
|
| 378 |
+
def foo(x, y, z):
|
| 379 |
+
yield from (x, y, z)
|
| 380 |
+
|
| 381 |
+
@argmap(singleton, 0, 1, 2)
|
| 382 |
+
def foo(x, y, z):
|
| 383 |
+
return x + y + z
|
| 384 |
+
|
| 385 |
+
q = foo("a", "b", "c")
|
| 386 |
+
|
| 387 |
+
assert q == ("a", "b", "c")
|
| 388 |
+
|
| 389 |
+
def test_actual_vararg(self):
|
| 390 |
+
@argmap(lambda x: -x, 4)
|
| 391 |
+
def foo(x, y, *args):
|
| 392 |
+
return (x, y) + tuple(args)
|
| 393 |
+
|
| 394 |
+
assert foo(1, 2, 3, 4, 5, 6) == (1, 2, 3, 4, -5, 6)
|
| 395 |
+
|
| 396 |
+
def test_signature_destroying_intermediate_decorator(self):
|
| 397 |
+
def add_one_to_first_bad_decorator(f):
|
| 398 |
+
"""Bad because it doesn't wrap the f signature (clobbers it)"""
|
| 399 |
+
|
| 400 |
+
def decorated(a, *args, **kwargs):
|
| 401 |
+
return f(a + 1, *args, **kwargs)
|
| 402 |
+
|
| 403 |
+
return decorated
|
| 404 |
+
|
| 405 |
+
add_two_to_second = argmap(lambda b: b + 2, 1)
|
| 406 |
+
|
| 407 |
+
@add_two_to_second
|
| 408 |
+
@add_one_to_first_bad_decorator
|
| 409 |
+
def add_one_and_two(a, b):
|
| 410 |
+
return a, b
|
| 411 |
+
|
| 412 |
+
assert add_one_and_two(5, 5) == (6, 7)
|
| 413 |
+
|
| 414 |
+
def test_actual_kwarg(self):
|
| 415 |
+
@argmap(lambda x: -x, "arg")
|
| 416 |
+
def foo(*, arg):
|
| 417 |
+
return arg
|
| 418 |
+
|
| 419 |
+
assert foo(arg=3) == -3
|
| 420 |
+
|
| 421 |
+
def test_nested_tuple(self):
|
| 422 |
+
def xform(x, y):
|
| 423 |
+
u, v = y
|
| 424 |
+
return x + u + v, (x + u, x + v)
|
| 425 |
+
|
| 426 |
+
# we're testing args and kwargs here, too
|
| 427 |
+
@argmap(xform, (0, ("t", 2)))
|
| 428 |
+
def foo(a, *args, **kwargs):
|
| 429 |
+
return a, args, kwargs
|
| 430 |
+
|
| 431 |
+
a, args, kwargs = foo(1, 2, 3, t=4)
|
| 432 |
+
|
| 433 |
+
assert a == 1 + 4 + 3
|
| 434 |
+
assert args == (2, 1 + 3)
|
| 435 |
+
assert kwargs == {"t": 1 + 4}
|
| 436 |
+
|
| 437 |
+
def test_flatten(self):
|
| 438 |
+
assert tuple(argmap._flatten([[[[[], []], [], []], [], [], []]], set())) == ()
|
| 439 |
+
|
| 440 |
+
rlist = ["a", ["b", "c"], [["d"], "e"], "f"]
|
| 441 |
+
assert "".join(argmap._flatten(rlist, set())) == "abcdef"
|
| 442 |
+
|
| 443 |
+
def test_indent(self):
|
| 444 |
+
code = "\n".join(
|
| 445 |
+
argmap._indent(
|
| 446 |
+
*[
|
| 447 |
+
"try:",
|
| 448 |
+
"try:",
|
| 449 |
+
"pass#",
|
| 450 |
+
"finally:",
|
| 451 |
+
"pass#",
|
| 452 |
+
"#",
|
| 453 |
+
"finally:",
|
| 454 |
+
"pass#",
|
| 455 |
+
]
|
| 456 |
+
)
|
| 457 |
+
)
|
| 458 |
+
assert (
|
| 459 |
+
code
|
| 460 |
+
== """try:
|
| 461 |
+
try:
|
| 462 |
+
pass#
|
| 463 |
+
finally:
|
| 464 |
+
pass#
|
| 465 |
+
#
|
| 466 |
+
finally:
|
| 467 |
+
pass#"""
|
| 468 |
+
)
|
| 469 |
+
|
| 470 |
+
def test_immediate_raise(self):
|
| 471 |
+
@not_implemented_for("directed")
|
| 472 |
+
def yield_nodes(G):
|
| 473 |
+
yield from G
|
| 474 |
+
|
| 475 |
+
G = nx.Graph([(1, 2)])
|
| 476 |
+
D = nx.DiGraph()
|
| 477 |
+
|
| 478 |
+
# test first call (argmap is compiled and executed)
|
| 479 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 480 |
+
node_iter = yield_nodes(D)
|
| 481 |
+
|
| 482 |
+
# test second call (argmap is only executed)
|
| 483 |
+
with pytest.raises(nx.NetworkXNotImplemented):
|
| 484 |
+
node_iter = yield_nodes(D)
|
| 485 |
+
|
| 486 |
+
# ensure that generators still make generators
|
| 487 |
+
node_iter = yield_nodes(G)
|
| 488 |
+
next(node_iter)
|
| 489 |
+
next(node_iter)
|
| 490 |
+
with pytest.raises(StopIteration):
|
| 491 |
+
next(node_iter)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/tests/test_heaps.py
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
import networkx as nx
|
| 4 |
+
from networkx.utils import BinaryHeap, PairingHeap
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class X:
|
| 8 |
+
def __eq__(self, other):
|
| 9 |
+
raise self is other
|
| 10 |
+
|
| 11 |
+
def __ne__(self, other):
|
| 12 |
+
raise self is not other
|
| 13 |
+
|
| 14 |
+
def __lt__(self, other):
|
| 15 |
+
raise TypeError("cannot compare")
|
| 16 |
+
|
| 17 |
+
def __le__(self, other):
|
| 18 |
+
raise TypeError("cannot compare")
|
| 19 |
+
|
| 20 |
+
def __ge__(self, other):
|
| 21 |
+
raise TypeError("cannot compare")
|
| 22 |
+
|
| 23 |
+
def __gt__(self, other):
|
| 24 |
+
raise TypeError("cannot compare")
|
| 25 |
+
|
| 26 |
+
def __hash__(self):
|
| 27 |
+
return hash(id(self))
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
x = X()
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
data = [ # min should not invent an element.
|
| 34 |
+
("min", nx.NetworkXError),
|
| 35 |
+
# Popping an empty heap should fail.
|
| 36 |
+
("pop", nx.NetworkXError),
|
| 37 |
+
# Getting nonexisting elements should return None.
|
| 38 |
+
("get", 0, None),
|
| 39 |
+
("get", x, None),
|
| 40 |
+
("get", None, None),
|
| 41 |
+
# Inserting a new key should succeed.
|
| 42 |
+
("insert", x, 1, True),
|
| 43 |
+
("get", x, 1),
|
| 44 |
+
("min", (x, 1)),
|
| 45 |
+
# min should not pop the top element.
|
| 46 |
+
("min", (x, 1)),
|
| 47 |
+
# Inserting a new key of different type should succeed.
|
| 48 |
+
("insert", 1, -2.0, True),
|
| 49 |
+
# int and float values should interop.
|
| 50 |
+
("min", (1, -2.0)),
|
| 51 |
+
# pop removes minimum-valued element.
|
| 52 |
+
("insert", 3, -(10**100), True),
|
| 53 |
+
("insert", 4, 5, True),
|
| 54 |
+
("pop", (3, -(10**100))),
|
| 55 |
+
("pop", (1, -2.0)),
|
| 56 |
+
# Decrease-insert should succeed.
|
| 57 |
+
("insert", 4, -50, True),
|
| 58 |
+
("insert", 4, -60, False, True),
|
| 59 |
+
# Decrease-insert should not create duplicate keys.
|
| 60 |
+
("pop", (4, -60)),
|
| 61 |
+
("pop", (x, 1)),
|
| 62 |
+
# Popping all elements should empty the heap.
|
| 63 |
+
("min", nx.NetworkXError),
|
| 64 |
+
("pop", nx.NetworkXError),
|
| 65 |
+
# Non-value-changing insert should fail.
|
| 66 |
+
("insert", x, 0, True),
|
| 67 |
+
("insert", x, 0, False, False),
|
| 68 |
+
("min", (x, 0)),
|
| 69 |
+
("insert", x, 0, True, False),
|
| 70 |
+
("min", (x, 0)),
|
| 71 |
+
# Failed insert should not create duplicate keys.
|
| 72 |
+
("pop", (x, 0)),
|
| 73 |
+
("pop", nx.NetworkXError),
|
| 74 |
+
# Increase-insert should succeed when allowed.
|
| 75 |
+
("insert", None, 0, True),
|
| 76 |
+
("insert", 2, -1, True),
|
| 77 |
+
("min", (2, -1)),
|
| 78 |
+
("insert", 2, 1, True, False),
|
| 79 |
+
("min", (None, 0)),
|
| 80 |
+
# Increase-insert should fail when disallowed.
|
| 81 |
+
("insert", None, 2, False, False),
|
| 82 |
+
("min", (None, 0)),
|
| 83 |
+
# Failed increase-insert should not create duplicate keys.
|
| 84 |
+
("pop", (None, 0)),
|
| 85 |
+
("pop", (2, 1)),
|
| 86 |
+
("min", nx.NetworkXError),
|
| 87 |
+
("pop", nx.NetworkXError),
|
| 88 |
+
]
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def _test_heap_class(cls, *args, **kwargs):
|
| 92 |
+
heap = cls(*args, **kwargs)
|
| 93 |
+
# Basic behavioral test
|
| 94 |
+
for op in data:
|
| 95 |
+
if op[-1] is not nx.NetworkXError:
|
| 96 |
+
assert op[-1] == getattr(heap, op[0])(*op[1:-1])
|
| 97 |
+
else:
|
| 98 |
+
pytest.raises(op[-1], getattr(heap, op[0]), *op[1:-1])
|
| 99 |
+
# Coverage test.
|
| 100 |
+
for i in range(99, -1, -1):
|
| 101 |
+
assert heap.insert(i, i)
|
| 102 |
+
for i in range(50):
|
| 103 |
+
assert heap.pop() == (i, i)
|
| 104 |
+
for i in range(100):
|
| 105 |
+
assert heap.insert(i, i) == (i < 50)
|
| 106 |
+
for i in range(100):
|
| 107 |
+
assert not heap.insert(i, i + 1)
|
| 108 |
+
for i in range(50):
|
| 109 |
+
assert heap.pop() == (i, i)
|
| 110 |
+
for i in range(100):
|
| 111 |
+
assert heap.insert(i, i + 1) == (i < 50)
|
| 112 |
+
for i in range(49):
|
| 113 |
+
assert heap.pop() == (i, i + 1)
|
| 114 |
+
assert sorted([heap.pop(), heap.pop()]) == [(49, 50), (50, 50)]
|
| 115 |
+
for i in range(51, 100):
|
| 116 |
+
assert not heap.insert(i, i + 1, True)
|
| 117 |
+
for i in range(51, 70):
|
| 118 |
+
assert heap.pop() == (i, i + 1)
|
| 119 |
+
for i in range(100):
|
| 120 |
+
assert heap.insert(i, i)
|
| 121 |
+
for i in range(100):
|
| 122 |
+
assert heap.pop() == (i, i)
|
| 123 |
+
pytest.raises(nx.NetworkXError, heap.pop)
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def test_PairingHeap():
|
| 127 |
+
_test_heap_class(PairingHeap)
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def test_BinaryHeap():
|
| 131 |
+
_test_heap_class(BinaryHeap)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/tests/test_random_sequence.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
|
| 3 |
+
from networkx.utils import (
|
| 4 |
+
powerlaw_sequence,
|
| 5 |
+
random_weighted_sample,
|
| 6 |
+
weighted_choice,
|
| 7 |
+
zipf_rv,
|
| 8 |
+
)
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def test_degree_sequences():
|
| 12 |
+
seq = powerlaw_sequence(10, seed=1)
|
| 13 |
+
seq = powerlaw_sequence(10)
|
| 14 |
+
assert len(seq) == 10
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def test_zipf_rv():
|
| 18 |
+
r = zipf_rv(2.3, xmin=2, seed=1)
|
| 19 |
+
r = zipf_rv(2.3, 2, 1)
|
| 20 |
+
r = zipf_rv(2.3)
|
| 21 |
+
assert type(r), int
|
| 22 |
+
pytest.raises(ValueError, zipf_rv, 0.5)
|
| 23 |
+
pytest.raises(ValueError, zipf_rv, 2, xmin=0)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def test_random_weighted_sample():
|
| 27 |
+
mapping = {"a": 10, "b": 20}
|
| 28 |
+
s = random_weighted_sample(mapping, 2, seed=1)
|
| 29 |
+
s = random_weighted_sample(mapping, 2)
|
| 30 |
+
assert sorted(s) == sorted(mapping.keys())
|
| 31 |
+
pytest.raises(ValueError, random_weighted_sample, mapping, 3)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def test_random_weighted_choice():
|
| 35 |
+
mapping = {"a": 10, "b": 0}
|
| 36 |
+
c = weighted_choice(mapping, seed=1)
|
| 37 |
+
c = weighted_choice(mapping)
|
| 38 |
+
assert c == "a"
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/networkx/utils/tests/test_unionfind.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import networkx as nx
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def test_unionfind():
|
| 5 |
+
# Fixed by: 2cddd5958689bdecdcd89b91ac9aaf6ce0e4f6b8
|
| 6 |
+
# Previously (in 2.x), the UnionFind class could handle mixed types.
|
| 7 |
+
# But in Python 3.x, this causes a TypeError such as:
|
| 8 |
+
# TypeError: unorderable types: str() > int()
|
| 9 |
+
#
|
| 10 |
+
# Now we just make sure that no exception is raised.
|
| 11 |
+
x = nx.utils.UnionFind()
|
| 12 |
+
x.union(0, "a")
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def test_subtree_union():
|
| 16 |
+
# See https://github.com/networkx/networkx/pull/3224
|
| 17 |
+
# (35db1b551ee65780794a357794f521d8768d5049).
|
| 18 |
+
# Test if subtree unions hare handled correctly by to_sets().
|
| 19 |
+
uf = nx.utils.UnionFind()
|
| 20 |
+
uf.union(1, 2)
|
| 21 |
+
uf.union(3, 4)
|
| 22 |
+
uf.union(4, 5)
|
| 23 |
+
uf.union(1, 5)
|
| 24 |
+
assert list(uf.to_sets()) == [{1, 2, 3, 4, 5}]
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def test_unionfind_weights():
|
| 28 |
+
# Tests if weights are computed correctly with unions of many elements
|
| 29 |
+
uf = nx.utils.UnionFind()
|
| 30 |
+
uf.union(1, 4, 7)
|
| 31 |
+
uf.union(2, 5, 8)
|
| 32 |
+
uf.union(3, 6, 9)
|
| 33 |
+
uf.union(1, 2, 3, 4, 5, 6, 7, 8, 9)
|
| 34 |
+
assert uf.weights[uf[1]] == 9
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def test_unbalanced_merge_weights():
|
| 38 |
+
# Tests if the largest set's root is used as the new root when merging
|
| 39 |
+
uf = nx.utils.UnionFind()
|
| 40 |
+
uf.union(1, 2, 3)
|
| 41 |
+
uf.union(4, 5, 6, 7, 8, 9)
|
| 42 |
+
assert uf.weights[uf[1]] == 3
|
| 43 |
+
assert uf.weights[uf[4]] == 6
|
| 44 |
+
largest_root = uf[4]
|
| 45 |
+
uf.union(1, 4)
|
| 46 |
+
assert uf[1] == largest_root
|
| 47 |
+
assert uf.weights[largest_root] == 9
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def test_empty_union():
|
| 51 |
+
# Tests if a null-union does nothing.
|
| 52 |
+
uf = nx.utils.UnionFind((0, 1))
|
| 53 |
+
uf.union()
|
| 54 |
+
assert uf[0] == 0
|
| 55 |
+
assert uf[1] == 1
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/__pycache__/cache.cpython-311.pyc
ADDED
|
Binary file (14.4 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/__pycache__/configuration.cpython-311.pyc
ADDED
|
Binary file (19.8 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/__pycache__/pyproject.cpython-311.pyc
ADDED
|
Binary file (5.81 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-311.pyc
ADDED
|
Binary file (11.4 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/cmdoptions.py
ADDED
|
@@ -0,0 +1,1075 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
shared options and groups
|
| 3 |
+
|
| 4 |
+
The principle here is to define options once, but *not* instantiate them
|
| 5 |
+
globally. One reason being that options with action='append' can carry state
|
| 6 |
+
between parses. pip parses general options twice internally, and shouldn't
|
| 7 |
+
pass on state. To be consistent, all options will follow this design.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
# The following comment should be removed at some point in the future.
|
| 11 |
+
# mypy: strict-optional=False
|
| 12 |
+
|
| 13 |
+
import importlib.util
|
| 14 |
+
import logging
|
| 15 |
+
import os
|
| 16 |
+
import textwrap
|
| 17 |
+
from functools import partial
|
| 18 |
+
from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
|
| 19 |
+
from textwrap import dedent
|
| 20 |
+
from typing import Any, Callable, Dict, Optional, Tuple
|
| 21 |
+
|
| 22 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 23 |
+
|
| 24 |
+
from pip._internal.cli.parser import ConfigOptionParser
|
| 25 |
+
from pip._internal.exceptions import CommandError
|
| 26 |
+
from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
|
| 27 |
+
from pip._internal.models.format_control import FormatControl
|
| 28 |
+
from pip._internal.models.index import PyPI
|
| 29 |
+
from pip._internal.models.target_python import TargetPython
|
| 30 |
+
from pip._internal.utils.hashes import STRONG_HASHES
|
| 31 |
+
from pip._internal.utils.misc import strtobool
|
| 32 |
+
|
| 33 |
+
logger = logging.getLogger(__name__)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None:
|
| 37 |
+
"""
|
| 38 |
+
Raise an option parsing error using parser.error().
|
| 39 |
+
|
| 40 |
+
Args:
|
| 41 |
+
parser: an OptionParser instance.
|
| 42 |
+
option: an Option instance.
|
| 43 |
+
msg: the error text.
|
| 44 |
+
"""
|
| 45 |
+
msg = f"{option} error: {msg}"
|
| 46 |
+
msg = textwrap.fill(" ".join(msg.split()))
|
| 47 |
+
parser.error(msg)
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> OptionGroup:
|
| 51 |
+
"""
|
| 52 |
+
Return an OptionGroup object
|
| 53 |
+
group -- assumed to be dict with 'name' and 'options' keys
|
| 54 |
+
parser -- an optparse Parser
|
| 55 |
+
"""
|
| 56 |
+
option_group = OptionGroup(parser, group["name"])
|
| 57 |
+
for option in group["options"]:
|
| 58 |
+
option_group.add_option(option())
|
| 59 |
+
return option_group
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def check_dist_restriction(options: Values, check_target: bool = False) -> None:
|
| 63 |
+
"""Function for determining if custom platform options are allowed.
|
| 64 |
+
|
| 65 |
+
:param options: The OptionParser options.
|
| 66 |
+
:param check_target: Whether or not to check if --target is being used.
|
| 67 |
+
"""
|
| 68 |
+
dist_restriction_set = any(
|
| 69 |
+
[
|
| 70 |
+
options.python_version,
|
| 71 |
+
options.platforms,
|
| 72 |
+
options.abis,
|
| 73 |
+
options.implementation,
|
| 74 |
+
]
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
binary_only = FormatControl(set(), {":all:"})
|
| 78 |
+
sdist_dependencies_allowed = (
|
| 79 |
+
options.format_control != binary_only and not options.ignore_dependencies
|
| 80 |
+
)
|
| 81 |
+
|
| 82 |
+
# Installations or downloads using dist restrictions must not combine
|
| 83 |
+
# source distributions and dist-specific wheels, as they are not
|
| 84 |
+
# guaranteed to be locally compatible.
|
| 85 |
+
if dist_restriction_set and sdist_dependencies_allowed:
|
| 86 |
+
raise CommandError(
|
| 87 |
+
"When restricting platform and interpreter constraints using "
|
| 88 |
+
"--python-version, --platform, --abi, or --implementation, "
|
| 89 |
+
"either --no-deps must be set, or --only-binary=:all: must be "
|
| 90 |
+
"set and --no-binary must not be set (or must be set to "
|
| 91 |
+
":none:)."
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
if check_target:
|
| 95 |
+
if not options.dry_run and dist_restriction_set and not options.target_dir:
|
| 96 |
+
raise CommandError(
|
| 97 |
+
"Can not use any platform or abi specific options unless "
|
| 98 |
+
"installing via '--target' or using '--dry-run'"
|
| 99 |
+
)
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
def _path_option_check(option: Option, opt: str, value: str) -> str:
|
| 103 |
+
return os.path.expanduser(value)
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
def _package_name_option_check(option: Option, opt: str, value: str) -> str:
|
| 107 |
+
return canonicalize_name(value)
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
class PipOption(Option):
|
| 111 |
+
TYPES = Option.TYPES + ("path", "package_name")
|
| 112 |
+
TYPE_CHECKER = Option.TYPE_CHECKER.copy()
|
| 113 |
+
TYPE_CHECKER["package_name"] = _package_name_option_check
|
| 114 |
+
TYPE_CHECKER["path"] = _path_option_check
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
###########
|
| 118 |
+
# options #
|
| 119 |
+
###########
|
| 120 |
+
|
| 121 |
+
help_: Callable[..., Option] = partial(
|
| 122 |
+
Option,
|
| 123 |
+
"-h",
|
| 124 |
+
"--help",
|
| 125 |
+
dest="help",
|
| 126 |
+
action="help",
|
| 127 |
+
help="Show help.",
|
| 128 |
+
)
|
| 129 |
+
|
| 130 |
+
debug_mode: Callable[..., Option] = partial(
|
| 131 |
+
Option,
|
| 132 |
+
"--debug",
|
| 133 |
+
dest="debug_mode",
|
| 134 |
+
action="store_true",
|
| 135 |
+
default=False,
|
| 136 |
+
help=(
|
| 137 |
+
"Let unhandled exceptions propagate outside the main subroutine, "
|
| 138 |
+
"instead of logging them to stderr."
|
| 139 |
+
),
|
| 140 |
+
)
|
| 141 |
+
|
| 142 |
+
isolated_mode: Callable[..., Option] = partial(
|
| 143 |
+
Option,
|
| 144 |
+
"--isolated",
|
| 145 |
+
dest="isolated_mode",
|
| 146 |
+
action="store_true",
|
| 147 |
+
default=False,
|
| 148 |
+
help=(
|
| 149 |
+
"Run pip in an isolated mode, ignoring environment variables and user "
|
| 150 |
+
"configuration."
|
| 151 |
+
),
|
| 152 |
+
)
|
| 153 |
+
|
| 154 |
+
require_virtualenv: Callable[..., Option] = partial(
|
| 155 |
+
Option,
|
| 156 |
+
"--require-virtualenv",
|
| 157 |
+
"--require-venv",
|
| 158 |
+
dest="require_venv",
|
| 159 |
+
action="store_true",
|
| 160 |
+
default=False,
|
| 161 |
+
help=(
|
| 162 |
+
"Allow pip to only run in a virtual environment; "
|
| 163 |
+
"exit with an error otherwise."
|
| 164 |
+
),
|
| 165 |
+
)
|
| 166 |
+
|
| 167 |
+
override_externally_managed: Callable[..., Option] = partial(
|
| 168 |
+
Option,
|
| 169 |
+
"--break-system-packages",
|
| 170 |
+
dest="override_externally_managed",
|
| 171 |
+
action="store_true",
|
| 172 |
+
help="Allow pip to modify an EXTERNALLY-MANAGED Python installation",
|
| 173 |
+
)
|
| 174 |
+
|
| 175 |
+
python: Callable[..., Option] = partial(
|
| 176 |
+
Option,
|
| 177 |
+
"--python",
|
| 178 |
+
dest="python",
|
| 179 |
+
help="Run pip with the specified Python interpreter.",
|
| 180 |
+
)
|
| 181 |
+
|
| 182 |
+
verbose: Callable[..., Option] = partial(
|
| 183 |
+
Option,
|
| 184 |
+
"-v",
|
| 185 |
+
"--verbose",
|
| 186 |
+
dest="verbose",
|
| 187 |
+
action="count",
|
| 188 |
+
default=0,
|
| 189 |
+
help="Give more output. Option is additive, and can be used up to 3 times.",
|
| 190 |
+
)
|
| 191 |
+
|
| 192 |
+
no_color: Callable[..., Option] = partial(
|
| 193 |
+
Option,
|
| 194 |
+
"--no-color",
|
| 195 |
+
dest="no_color",
|
| 196 |
+
action="store_true",
|
| 197 |
+
default=False,
|
| 198 |
+
help="Suppress colored output.",
|
| 199 |
+
)
|
| 200 |
+
|
| 201 |
+
version: Callable[..., Option] = partial(
|
| 202 |
+
Option,
|
| 203 |
+
"-V",
|
| 204 |
+
"--version",
|
| 205 |
+
dest="version",
|
| 206 |
+
action="store_true",
|
| 207 |
+
help="Show version and exit.",
|
| 208 |
+
)
|
| 209 |
+
|
| 210 |
+
quiet: Callable[..., Option] = partial(
|
| 211 |
+
Option,
|
| 212 |
+
"-q",
|
| 213 |
+
"--quiet",
|
| 214 |
+
dest="quiet",
|
| 215 |
+
action="count",
|
| 216 |
+
default=0,
|
| 217 |
+
help=(
|
| 218 |
+
"Give less output. Option is additive, and can be used up to 3"
|
| 219 |
+
" times (corresponding to WARNING, ERROR, and CRITICAL logging"
|
| 220 |
+
" levels)."
|
| 221 |
+
),
|
| 222 |
+
)
|
| 223 |
+
|
| 224 |
+
progress_bar: Callable[..., Option] = partial(
|
| 225 |
+
Option,
|
| 226 |
+
"--progress-bar",
|
| 227 |
+
dest="progress_bar",
|
| 228 |
+
type="choice",
|
| 229 |
+
choices=["on", "off", "raw"],
|
| 230 |
+
default="on",
|
| 231 |
+
help="Specify whether the progress bar should be used [on, off, raw] (default: on)",
|
| 232 |
+
)
|
| 233 |
+
|
| 234 |
+
log: Callable[..., Option] = partial(
|
| 235 |
+
PipOption,
|
| 236 |
+
"--log",
|
| 237 |
+
"--log-file",
|
| 238 |
+
"--local-log",
|
| 239 |
+
dest="log",
|
| 240 |
+
metavar="path",
|
| 241 |
+
type="path",
|
| 242 |
+
help="Path to a verbose appending log.",
|
| 243 |
+
)
|
| 244 |
+
|
| 245 |
+
no_input: Callable[..., Option] = partial(
|
| 246 |
+
Option,
|
| 247 |
+
# Don't ask for input
|
| 248 |
+
"--no-input",
|
| 249 |
+
dest="no_input",
|
| 250 |
+
action="store_true",
|
| 251 |
+
default=False,
|
| 252 |
+
help="Disable prompting for input.",
|
| 253 |
+
)
|
| 254 |
+
|
| 255 |
+
keyring_provider: Callable[..., Option] = partial(
|
| 256 |
+
Option,
|
| 257 |
+
"--keyring-provider",
|
| 258 |
+
dest="keyring_provider",
|
| 259 |
+
choices=["auto", "disabled", "import", "subprocess"],
|
| 260 |
+
default="auto",
|
| 261 |
+
help=(
|
| 262 |
+
"Enable the credential lookup via the keyring library if user input is allowed."
|
| 263 |
+
" Specify which mechanism to use [disabled, import, subprocess]."
|
| 264 |
+
" (default: disabled)"
|
| 265 |
+
),
|
| 266 |
+
)
|
| 267 |
+
|
| 268 |
+
proxy: Callable[..., Option] = partial(
|
| 269 |
+
Option,
|
| 270 |
+
"--proxy",
|
| 271 |
+
dest="proxy",
|
| 272 |
+
type="str",
|
| 273 |
+
default="",
|
| 274 |
+
help="Specify a proxy in the form scheme://[user:passwd@]proxy.server:port.",
|
| 275 |
+
)
|
| 276 |
+
|
| 277 |
+
retries: Callable[..., Option] = partial(
|
| 278 |
+
Option,
|
| 279 |
+
"--retries",
|
| 280 |
+
dest="retries",
|
| 281 |
+
type="int",
|
| 282 |
+
default=5,
|
| 283 |
+
help="Maximum number of retries each connection should attempt "
|
| 284 |
+
"(default %default times).",
|
| 285 |
+
)
|
| 286 |
+
|
| 287 |
+
timeout: Callable[..., Option] = partial(
|
| 288 |
+
Option,
|
| 289 |
+
"--timeout",
|
| 290 |
+
"--default-timeout",
|
| 291 |
+
metavar="sec",
|
| 292 |
+
dest="timeout",
|
| 293 |
+
type="float",
|
| 294 |
+
default=15,
|
| 295 |
+
help="Set the socket timeout (default %default seconds).",
|
| 296 |
+
)
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
def exists_action() -> Option:
|
| 300 |
+
return Option(
|
| 301 |
+
# Option when path already exist
|
| 302 |
+
"--exists-action",
|
| 303 |
+
dest="exists_action",
|
| 304 |
+
type="choice",
|
| 305 |
+
choices=["s", "i", "w", "b", "a"],
|
| 306 |
+
default=[],
|
| 307 |
+
action="append",
|
| 308 |
+
metavar="action",
|
| 309 |
+
help="Default action when a path already exists: "
|
| 310 |
+
"(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
|
| 311 |
+
)
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
cert: Callable[..., Option] = partial(
|
| 315 |
+
PipOption,
|
| 316 |
+
"--cert",
|
| 317 |
+
dest="cert",
|
| 318 |
+
type="path",
|
| 319 |
+
metavar="path",
|
| 320 |
+
help=(
|
| 321 |
+
"Path to PEM-encoded CA certificate bundle. "
|
| 322 |
+
"If provided, overrides the default. "
|
| 323 |
+
"See 'SSL Certificate Verification' in pip documentation "
|
| 324 |
+
"for more information."
|
| 325 |
+
),
|
| 326 |
+
)
|
| 327 |
+
|
| 328 |
+
client_cert: Callable[..., Option] = partial(
|
| 329 |
+
PipOption,
|
| 330 |
+
"--client-cert",
|
| 331 |
+
dest="client_cert",
|
| 332 |
+
type="path",
|
| 333 |
+
default=None,
|
| 334 |
+
metavar="path",
|
| 335 |
+
help="Path to SSL client certificate, a single file containing the "
|
| 336 |
+
"private key and the certificate in PEM format.",
|
| 337 |
+
)
|
| 338 |
+
|
| 339 |
+
index_url: Callable[..., Option] = partial(
|
| 340 |
+
Option,
|
| 341 |
+
"-i",
|
| 342 |
+
"--index-url",
|
| 343 |
+
"--pypi-url",
|
| 344 |
+
dest="index_url",
|
| 345 |
+
metavar="URL",
|
| 346 |
+
default=PyPI.simple_url,
|
| 347 |
+
help="Base URL of the Python Package Index (default %default). "
|
| 348 |
+
"This should point to a repository compliant with PEP 503 "
|
| 349 |
+
"(the simple repository API) or a local directory laid out "
|
| 350 |
+
"in the same format.",
|
| 351 |
+
)
|
| 352 |
+
|
| 353 |
+
|
| 354 |
+
def extra_index_url() -> Option:
|
| 355 |
+
return Option(
|
| 356 |
+
"--extra-index-url",
|
| 357 |
+
dest="extra_index_urls",
|
| 358 |
+
metavar="URL",
|
| 359 |
+
action="append",
|
| 360 |
+
default=[],
|
| 361 |
+
help="Extra URLs of package indexes to use in addition to "
|
| 362 |
+
"--index-url. Should follow the same rules as "
|
| 363 |
+
"--index-url.",
|
| 364 |
+
)
|
| 365 |
+
|
| 366 |
+
|
| 367 |
+
no_index: Callable[..., Option] = partial(
|
| 368 |
+
Option,
|
| 369 |
+
"--no-index",
|
| 370 |
+
dest="no_index",
|
| 371 |
+
action="store_true",
|
| 372 |
+
default=False,
|
| 373 |
+
help="Ignore package index (only looking at --find-links URLs instead).",
|
| 374 |
+
)
|
| 375 |
+
|
| 376 |
+
|
| 377 |
+
def find_links() -> Option:
|
| 378 |
+
return Option(
|
| 379 |
+
"-f",
|
| 380 |
+
"--find-links",
|
| 381 |
+
dest="find_links",
|
| 382 |
+
action="append",
|
| 383 |
+
default=[],
|
| 384 |
+
metavar="url",
|
| 385 |
+
help="If a URL or path to an html file, then parse for links to "
|
| 386 |
+
"archives such as sdist (.tar.gz) or wheel (.whl) files. "
|
| 387 |
+
"If a local path or file:// URL that's a directory, "
|
| 388 |
+
"then look for archives in the directory listing. "
|
| 389 |
+
"Links to VCS project URLs are not supported.",
|
| 390 |
+
)
|
| 391 |
+
|
| 392 |
+
|
| 393 |
+
def trusted_host() -> Option:
|
| 394 |
+
return Option(
|
| 395 |
+
"--trusted-host",
|
| 396 |
+
dest="trusted_hosts",
|
| 397 |
+
action="append",
|
| 398 |
+
metavar="HOSTNAME",
|
| 399 |
+
default=[],
|
| 400 |
+
help="Mark this host or host:port pair as trusted, even though it "
|
| 401 |
+
"does not have valid or any HTTPS.",
|
| 402 |
+
)
|
| 403 |
+
|
| 404 |
+
|
| 405 |
+
def constraints() -> Option:
|
| 406 |
+
return Option(
|
| 407 |
+
"-c",
|
| 408 |
+
"--constraint",
|
| 409 |
+
dest="constraints",
|
| 410 |
+
action="append",
|
| 411 |
+
default=[],
|
| 412 |
+
metavar="file",
|
| 413 |
+
help="Constrain versions using the given constraints file. "
|
| 414 |
+
"This option can be used multiple times.",
|
| 415 |
+
)
|
| 416 |
+
|
| 417 |
+
|
| 418 |
+
def requirements() -> Option:
|
| 419 |
+
return Option(
|
| 420 |
+
"-r",
|
| 421 |
+
"--requirement",
|
| 422 |
+
dest="requirements",
|
| 423 |
+
action="append",
|
| 424 |
+
default=[],
|
| 425 |
+
metavar="file",
|
| 426 |
+
help="Install from the given requirements file. "
|
| 427 |
+
"This option can be used multiple times.",
|
| 428 |
+
)
|
| 429 |
+
|
| 430 |
+
|
| 431 |
+
def editable() -> Option:
|
| 432 |
+
return Option(
|
| 433 |
+
"-e",
|
| 434 |
+
"--editable",
|
| 435 |
+
dest="editables",
|
| 436 |
+
action="append",
|
| 437 |
+
default=[],
|
| 438 |
+
metavar="path/url",
|
| 439 |
+
help=(
|
| 440 |
+
"Install a project in editable mode (i.e. setuptools "
|
| 441 |
+
'"develop mode") from a local project path or a VCS url.'
|
| 442 |
+
),
|
| 443 |
+
)
|
| 444 |
+
|
| 445 |
+
|
| 446 |
+
def _handle_src(option: Option, opt_str: str, value: str, parser: OptionParser) -> None:
|
| 447 |
+
value = os.path.abspath(value)
|
| 448 |
+
setattr(parser.values, option.dest, value)
|
| 449 |
+
|
| 450 |
+
|
| 451 |
+
src: Callable[..., Option] = partial(
|
| 452 |
+
PipOption,
|
| 453 |
+
"--src",
|
| 454 |
+
"--source",
|
| 455 |
+
"--source-dir",
|
| 456 |
+
"--source-directory",
|
| 457 |
+
dest="src_dir",
|
| 458 |
+
type="path",
|
| 459 |
+
metavar="dir",
|
| 460 |
+
default=get_src_prefix(),
|
| 461 |
+
action="callback",
|
| 462 |
+
callback=_handle_src,
|
| 463 |
+
help="Directory to check out editable projects into. "
|
| 464 |
+
'The default in a virtualenv is "<venv path>/src". '
|
| 465 |
+
'The default for global installs is "<current dir>/src".',
|
| 466 |
+
)
|
| 467 |
+
|
| 468 |
+
|
| 469 |
+
def _get_format_control(values: Values, option: Option) -> Any:
|
| 470 |
+
"""Get a format_control object."""
|
| 471 |
+
return getattr(values, option.dest)
|
| 472 |
+
|
| 473 |
+
|
| 474 |
+
def _handle_no_binary(
|
| 475 |
+
option: Option, opt_str: str, value: str, parser: OptionParser
|
| 476 |
+
) -> None:
|
| 477 |
+
existing = _get_format_control(parser.values, option)
|
| 478 |
+
FormatControl.handle_mutual_excludes(
|
| 479 |
+
value,
|
| 480 |
+
existing.no_binary,
|
| 481 |
+
existing.only_binary,
|
| 482 |
+
)
|
| 483 |
+
|
| 484 |
+
|
| 485 |
+
def _handle_only_binary(
|
| 486 |
+
option: Option, opt_str: str, value: str, parser: OptionParser
|
| 487 |
+
) -> None:
|
| 488 |
+
existing = _get_format_control(parser.values, option)
|
| 489 |
+
FormatControl.handle_mutual_excludes(
|
| 490 |
+
value,
|
| 491 |
+
existing.only_binary,
|
| 492 |
+
existing.no_binary,
|
| 493 |
+
)
|
| 494 |
+
|
| 495 |
+
|
| 496 |
+
def no_binary() -> Option:
|
| 497 |
+
format_control = FormatControl(set(), set())
|
| 498 |
+
return Option(
|
| 499 |
+
"--no-binary",
|
| 500 |
+
dest="format_control",
|
| 501 |
+
action="callback",
|
| 502 |
+
callback=_handle_no_binary,
|
| 503 |
+
type="str",
|
| 504 |
+
default=format_control,
|
| 505 |
+
help="Do not use binary packages. Can be supplied multiple times, and "
|
| 506 |
+
'each time adds to the existing value. Accepts either ":all:" to '
|
| 507 |
+
'disable all binary packages, ":none:" to empty the set (notice '
|
| 508 |
+
"the colons), or one or more package names with commas between "
|
| 509 |
+
"them (no colons). Note that some packages are tricky to compile "
|
| 510 |
+
"and may fail to install when this option is used on them.",
|
| 511 |
+
)
|
| 512 |
+
|
| 513 |
+
|
| 514 |
+
def only_binary() -> Option:
|
| 515 |
+
format_control = FormatControl(set(), set())
|
| 516 |
+
return Option(
|
| 517 |
+
"--only-binary",
|
| 518 |
+
dest="format_control",
|
| 519 |
+
action="callback",
|
| 520 |
+
callback=_handle_only_binary,
|
| 521 |
+
type="str",
|
| 522 |
+
default=format_control,
|
| 523 |
+
help="Do not use source packages. Can be supplied multiple times, and "
|
| 524 |
+
'each time adds to the existing value. Accepts either ":all:" to '
|
| 525 |
+
'disable all source packages, ":none:" to empty the set, or one '
|
| 526 |
+
"or more package names with commas between them. Packages "
|
| 527 |
+
"without binary distributions will fail to install when this "
|
| 528 |
+
"option is used on them.",
|
| 529 |
+
)
|
| 530 |
+
|
| 531 |
+
|
| 532 |
+
platforms: Callable[..., Option] = partial(
|
| 533 |
+
Option,
|
| 534 |
+
"--platform",
|
| 535 |
+
dest="platforms",
|
| 536 |
+
metavar="platform",
|
| 537 |
+
action="append",
|
| 538 |
+
default=None,
|
| 539 |
+
help=(
|
| 540 |
+
"Only use wheels compatible with <platform>. Defaults to the "
|
| 541 |
+
"platform of the running system. Use this option multiple times to "
|
| 542 |
+
"specify multiple platforms supported by the target interpreter."
|
| 543 |
+
),
|
| 544 |
+
)
|
| 545 |
+
|
| 546 |
+
|
| 547 |
+
# This was made a separate function for unit-testing purposes.
|
| 548 |
+
def _convert_python_version(value: str) -> Tuple[Tuple[int, ...], Optional[str]]:
|
| 549 |
+
"""
|
| 550 |
+
Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
|
| 551 |
+
|
| 552 |
+
:return: A 2-tuple (version_info, error_msg), where `error_msg` is
|
| 553 |
+
non-None if and only if there was a parsing error.
|
| 554 |
+
"""
|
| 555 |
+
if not value:
|
| 556 |
+
# The empty string is the same as not providing a value.
|
| 557 |
+
return (None, None)
|
| 558 |
+
|
| 559 |
+
parts = value.split(".")
|
| 560 |
+
if len(parts) > 3:
|
| 561 |
+
return ((), "at most three version parts are allowed")
|
| 562 |
+
|
| 563 |
+
if len(parts) == 1:
|
| 564 |
+
# Then we are in the case of "3" or "37".
|
| 565 |
+
value = parts[0]
|
| 566 |
+
if len(value) > 1:
|
| 567 |
+
parts = [value[0], value[1:]]
|
| 568 |
+
|
| 569 |
+
try:
|
| 570 |
+
version_info = tuple(int(part) for part in parts)
|
| 571 |
+
except ValueError:
|
| 572 |
+
return ((), "each version part must be an integer")
|
| 573 |
+
|
| 574 |
+
return (version_info, None)
|
| 575 |
+
|
| 576 |
+
|
| 577 |
+
def _handle_python_version(
|
| 578 |
+
option: Option, opt_str: str, value: str, parser: OptionParser
|
| 579 |
+
) -> None:
|
| 580 |
+
"""
|
| 581 |
+
Handle a provided --python-version value.
|
| 582 |
+
"""
|
| 583 |
+
version_info, error_msg = _convert_python_version(value)
|
| 584 |
+
if error_msg is not None:
|
| 585 |
+
msg = f"invalid --python-version value: {value!r}: {error_msg}"
|
| 586 |
+
raise_option_error(parser, option=option, msg=msg)
|
| 587 |
+
|
| 588 |
+
parser.values.python_version = version_info
|
| 589 |
+
|
| 590 |
+
|
| 591 |
+
python_version: Callable[..., Option] = partial(
|
| 592 |
+
Option,
|
| 593 |
+
"--python-version",
|
| 594 |
+
dest="python_version",
|
| 595 |
+
metavar="python_version",
|
| 596 |
+
action="callback",
|
| 597 |
+
callback=_handle_python_version,
|
| 598 |
+
type="str",
|
| 599 |
+
default=None,
|
| 600 |
+
help=dedent(
|
| 601 |
+
"""\
|
| 602 |
+
The Python interpreter version to use for wheel and "Requires-Python"
|
| 603 |
+
compatibility checks. Defaults to a version derived from the running
|
| 604 |
+
interpreter. The version can be specified using up to three dot-separated
|
| 605 |
+
integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
|
| 606 |
+
version can also be given as a string without dots (e.g. "37" for 3.7.0).
|
| 607 |
+
"""
|
| 608 |
+
),
|
| 609 |
+
)
|
| 610 |
+
|
| 611 |
+
|
| 612 |
+
implementation: Callable[..., Option] = partial(
|
| 613 |
+
Option,
|
| 614 |
+
"--implementation",
|
| 615 |
+
dest="implementation",
|
| 616 |
+
metavar="implementation",
|
| 617 |
+
default=None,
|
| 618 |
+
help=(
|
| 619 |
+
"Only use wheels compatible with Python "
|
| 620 |
+
"implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
|
| 621 |
+
" or 'ip'. If not specified, then the current "
|
| 622 |
+
"interpreter implementation is used. Use 'py' to force "
|
| 623 |
+
"implementation-agnostic wheels."
|
| 624 |
+
),
|
| 625 |
+
)
|
| 626 |
+
|
| 627 |
+
|
| 628 |
+
abis: Callable[..., Option] = partial(
|
| 629 |
+
Option,
|
| 630 |
+
"--abi",
|
| 631 |
+
dest="abis",
|
| 632 |
+
metavar="abi",
|
| 633 |
+
action="append",
|
| 634 |
+
default=None,
|
| 635 |
+
help=(
|
| 636 |
+
"Only use wheels compatible with Python abi <abi>, e.g. 'pypy_41'. "
|
| 637 |
+
"If not specified, then the current interpreter abi tag is used. "
|
| 638 |
+
"Use this option multiple times to specify multiple abis supported "
|
| 639 |
+
"by the target interpreter. Generally you will need to specify "
|
| 640 |
+
"--implementation, --platform, and --python-version when using this "
|
| 641 |
+
"option."
|
| 642 |
+
),
|
| 643 |
+
)
|
| 644 |
+
|
| 645 |
+
|
| 646 |
+
def add_target_python_options(cmd_opts: OptionGroup) -> None:
|
| 647 |
+
cmd_opts.add_option(platforms())
|
| 648 |
+
cmd_opts.add_option(python_version())
|
| 649 |
+
cmd_opts.add_option(implementation())
|
| 650 |
+
cmd_opts.add_option(abis())
|
| 651 |
+
|
| 652 |
+
|
| 653 |
+
def make_target_python(options: Values) -> TargetPython:
|
| 654 |
+
target_python = TargetPython(
|
| 655 |
+
platforms=options.platforms,
|
| 656 |
+
py_version_info=options.python_version,
|
| 657 |
+
abis=options.abis,
|
| 658 |
+
implementation=options.implementation,
|
| 659 |
+
)
|
| 660 |
+
|
| 661 |
+
return target_python
|
| 662 |
+
|
| 663 |
+
|
| 664 |
+
def prefer_binary() -> Option:
|
| 665 |
+
return Option(
|
| 666 |
+
"--prefer-binary",
|
| 667 |
+
dest="prefer_binary",
|
| 668 |
+
action="store_true",
|
| 669 |
+
default=False,
|
| 670 |
+
help=(
|
| 671 |
+
"Prefer binary packages over source packages, even if the "
|
| 672 |
+
"source packages are newer."
|
| 673 |
+
),
|
| 674 |
+
)
|
| 675 |
+
|
| 676 |
+
|
| 677 |
+
cache_dir: Callable[..., Option] = partial(
|
| 678 |
+
PipOption,
|
| 679 |
+
"--cache-dir",
|
| 680 |
+
dest="cache_dir",
|
| 681 |
+
default=USER_CACHE_DIR,
|
| 682 |
+
metavar="dir",
|
| 683 |
+
type="path",
|
| 684 |
+
help="Store the cache data in <dir>.",
|
| 685 |
+
)
|
| 686 |
+
|
| 687 |
+
|
| 688 |
+
def _handle_no_cache_dir(
|
| 689 |
+
option: Option, opt: str, value: str, parser: OptionParser
|
| 690 |
+
) -> None:
|
| 691 |
+
"""
|
| 692 |
+
Process a value provided for the --no-cache-dir option.
|
| 693 |
+
|
| 694 |
+
This is an optparse.Option callback for the --no-cache-dir option.
|
| 695 |
+
"""
|
| 696 |
+
# The value argument will be None if --no-cache-dir is passed via the
|
| 697 |
+
# command-line, since the option doesn't accept arguments. However,
|
| 698 |
+
# the value can be non-None if the option is triggered e.g. by an
|
| 699 |
+
# environment variable, like PIP_NO_CACHE_DIR=true.
|
| 700 |
+
if value is not None:
|
| 701 |
+
# Then parse the string value to get argument error-checking.
|
| 702 |
+
try:
|
| 703 |
+
strtobool(value)
|
| 704 |
+
except ValueError as exc:
|
| 705 |
+
raise_option_error(parser, option=option, msg=str(exc))
|
| 706 |
+
|
| 707 |
+
# Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
|
| 708 |
+
# converted to 0 (like "false" or "no") caused cache_dir to be disabled
|
| 709 |
+
# rather than enabled (logic would say the latter). Thus, we disable
|
| 710 |
+
# the cache directory not just on values that parse to True, but (for
|
| 711 |
+
# backwards compatibility reasons) also on values that parse to False.
|
| 712 |
+
# In other words, always set it to False if the option is provided in
|
| 713 |
+
# some (valid) form.
|
| 714 |
+
parser.values.cache_dir = False
|
| 715 |
+
|
| 716 |
+
|
| 717 |
+
no_cache: Callable[..., Option] = partial(
|
| 718 |
+
Option,
|
| 719 |
+
"--no-cache-dir",
|
| 720 |
+
dest="cache_dir",
|
| 721 |
+
action="callback",
|
| 722 |
+
callback=_handle_no_cache_dir,
|
| 723 |
+
help="Disable the cache.",
|
| 724 |
+
)
|
| 725 |
+
|
| 726 |
+
no_deps: Callable[..., Option] = partial(
|
| 727 |
+
Option,
|
| 728 |
+
"--no-deps",
|
| 729 |
+
"--no-dependencies",
|
| 730 |
+
dest="ignore_dependencies",
|
| 731 |
+
action="store_true",
|
| 732 |
+
default=False,
|
| 733 |
+
help="Don't install package dependencies.",
|
| 734 |
+
)
|
| 735 |
+
|
| 736 |
+
ignore_requires_python: Callable[..., Option] = partial(
|
| 737 |
+
Option,
|
| 738 |
+
"--ignore-requires-python",
|
| 739 |
+
dest="ignore_requires_python",
|
| 740 |
+
action="store_true",
|
| 741 |
+
help="Ignore the Requires-Python information.",
|
| 742 |
+
)
|
| 743 |
+
|
| 744 |
+
no_build_isolation: Callable[..., Option] = partial(
|
| 745 |
+
Option,
|
| 746 |
+
"--no-build-isolation",
|
| 747 |
+
dest="build_isolation",
|
| 748 |
+
action="store_false",
|
| 749 |
+
default=True,
|
| 750 |
+
help="Disable isolation when building a modern source distribution. "
|
| 751 |
+
"Build dependencies specified by PEP 518 must be already installed "
|
| 752 |
+
"if this option is used.",
|
| 753 |
+
)
|
| 754 |
+
|
| 755 |
+
check_build_deps: Callable[..., Option] = partial(
|
| 756 |
+
Option,
|
| 757 |
+
"--check-build-dependencies",
|
| 758 |
+
dest="check_build_deps",
|
| 759 |
+
action="store_true",
|
| 760 |
+
default=False,
|
| 761 |
+
help="Check the build dependencies when PEP517 is used.",
|
| 762 |
+
)
|
| 763 |
+
|
| 764 |
+
|
| 765 |
+
def _handle_no_use_pep517(
|
| 766 |
+
option: Option, opt: str, value: str, parser: OptionParser
|
| 767 |
+
) -> None:
|
| 768 |
+
"""
|
| 769 |
+
Process a value provided for the --no-use-pep517 option.
|
| 770 |
+
|
| 771 |
+
This is an optparse.Option callback for the no_use_pep517 option.
|
| 772 |
+
"""
|
| 773 |
+
# Since --no-use-pep517 doesn't accept arguments, the value argument
|
| 774 |
+
# will be None if --no-use-pep517 is passed via the command-line.
|
| 775 |
+
# However, the value can be non-None if the option is triggered e.g.
|
| 776 |
+
# by an environment variable, for example "PIP_NO_USE_PEP517=true".
|
| 777 |
+
if value is not None:
|
| 778 |
+
msg = """A value was passed for --no-use-pep517,
|
| 779 |
+
probably using either the PIP_NO_USE_PEP517 environment variable
|
| 780 |
+
or the "no-use-pep517" config file option. Use an appropriate value
|
| 781 |
+
of the PIP_USE_PEP517 environment variable or the "use-pep517"
|
| 782 |
+
config file option instead.
|
| 783 |
+
"""
|
| 784 |
+
raise_option_error(parser, option=option, msg=msg)
|
| 785 |
+
|
| 786 |
+
# If user doesn't wish to use pep517, we check if setuptools and wheel are installed
|
| 787 |
+
# and raise error if it is not.
|
| 788 |
+
packages = ("setuptools", "wheel")
|
| 789 |
+
if not all(importlib.util.find_spec(package) for package in packages):
|
| 790 |
+
msg = (
|
| 791 |
+
f"It is not possible to use --no-use-pep517 "
|
| 792 |
+
f"without {' and '.join(packages)} installed."
|
| 793 |
+
)
|
| 794 |
+
raise_option_error(parser, option=option, msg=msg)
|
| 795 |
+
|
| 796 |
+
# Otherwise, --no-use-pep517 was passed via the command-line.
|
| 797 |
+
parser.values.use_pep517 = False
|
| 798 |
+
|
| 799 |
+
|
| 800 |
+
use_pep517: Any = partial(
|
| 801 |
+
Option,
|
| 802 |
+
"--use-pep517",
|
| 803 |
+
dest="use_pep517",
|
| 804 |
+
action="store_true",
|
| 805 |
+
default=None,
|
| 806 |
+
help="Use PEP 517 for building source distributions "
|
| 807 |
+
"(use --no-use-pep517 to force legacy behaviour).",
|
| 808 |
+
)
|
| 809 |
+
|
| 810 |
+
no_use_pep517: Any = partial(
|
| 811 |
+
Option,
|
| 812 |
+
"--no-use-pep517",
|
| 813 |
+
dest="use_pep517",
|
| 814 |
+
action="callback",
|
| 815 |
+
callback=_handle_no_use_pep517,
|
| 816 |
+
default=None,
|
| 817 |
+
help=SUPPRESS_HELP,
|
| 818 |
+
)
|
| 819 |
+
|
| 820 |
+
|
| 821 |
+
def _handle_config_settings(
|
| 822 |
+
option: Option, opt_str: str, value: str, parser: OptionParser
|
| 823 |
+
) -> None:
|
| 824 |
+
key, sep, val = value.partition("=")
|
| 825 |
+
if sep != "=":
|
| 826 |
+
parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL")
|
| 827 |
+
dest = getattr(parser.values, option.dest)
|
| 828 |
+
if dest is None:
|
| 829 |
+
dest = {}
|
| 830 |
+
setattr(parser.values, option.dest, dest)
|
| 831 |
+
if key in dest:
|
| 832 |
+
if isinstance(dest[key], list):
|
| 833 |
+
dest[key].append(val)
|
| 834 |
+
else:
|
| 835 |
+
dest[key] = [dest[key], val]
|
| 836 |
+
else:
|
| 837 |
+
dest[key] = val
|
| 838 |
+
|
| 839 |
+
|
| 840 |
+
config_settings: Callable[..., Option] = partial(
|
| 841 |
+
Option,
|
| 842 |
+
"-C",
|
| 843 |
+
"--config-settings",
|
| 844 |
+
dest="config_settings",
|
| 845 |
+
type=str,
|
| 846 |
+
action="callback",
|
| 847 |
+
callback=_handle_config_settings,
|
| 848 |
+
metavar="settings",
|
| 849 |
+
help="Configuration settings to be passed to the PEP 517 build backend. "
|
| 850 |
+
"Settings take the form KEY=VALUE. Use multiple --config-settings options "
|
| 851 |
+
"to pass multiple keys to the backend.",
|
| 852 |
+
)
|
| 853 |
+
|
| 854 |
+
build_options: Callable[..., Option] = partial(
|
| 855 |
+
Option,
|
| 856 |
+
"--build-option",
|
| 857 |
+
dest="build_options",
|
| 858 |
+
metavar="options",
|
| 859 |
+
action="append",
|
| 860 |
+
help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
|
| 861 |
+
)
|
| 862 |
+
|
| 863 |
+
global_options: Callable[..., Option] = partial(
|
| 864 |
+
Option,
|
| 865 |
+
"--global-option",
|
| 866 |
+
dest="global_options",
|
| 867 |
+
action="append",
|
| 868 |
+
metavar="options",
|
| 869 |
+
help="Extra global options to be supplied to the setup.py "
|
| 870 |
+
"call before the install or bdist_wheel command.",
|
| 871 |
+
)
|
| 872 |
+
|
| 873 |
+
no_clean: Callable[..., Option] = partial(
|
| 874 |
+
Option,
|
| 875 |
+
"--no-clean",
|
| 876 |
+
action="store_true",
|
| 877 |
+
default=False,
|
| 878 |
+
help="Don't clean up build directories.",
|
| 879 |
+
)
|
| 880 |
+
|
| 881 |
+
pre: Callable[..., Option] = partial(
|
| 882 |
+
Option,
|
| 883 |
+
"--pre",
|
| 884 |
+
action="store_true",
|
| 885 |
+
default=False,
|
| 886 |
+
help="Include pre-release and development versions. By default, "
|
| 887 |
+
"pip only finds stable versions.",
|
| 888 |
+
)
|
| 889 |
+
|
| 890 |
+
disable_pip_version_check: Callable[..., Option] = partial(
|
| 891 |
+
Option,
|
| 892 |
+
"--disable-pip-version-check",
|
| 893 |
+
dest="disable_pip_version_check",
|
| 894 |
+
action="store_true",
|
| 895 |
+
default=False,
|
| 896 |
+
help="Don't periodically check PyPI to determine whether a new version "
|
| 897 |
+
"of pip is available for download. Implied with --no-index.",
|
| 898 |
+
)
|
| 899 |
+
|
| 900 |
+
root_user_action: Callable[..., Option] = partial(
|
| 901 |
+
Option,
|
| 902 |
+
"--root-user-action",
|
| 903 |
+
dest="root_user_action",
|
| 904 |
+
default="warn",
|
| 905 |
+
choices=["warn", "ignore"],
|
| 906 |
+
help="Action if pip is run as a root user [warn, ignore] (default: warn)",
|
| 907 |
+
)
|
| 908 |
+
|
| 909 |
+
|
| 910 |
+
def _handle_merge_hash(
|
| 911 |
+
option: Option, opt_str: str, value: str, parser: OptionParser
|
| 912 |
+
) -> None:
|
| 913 |
+
"""Given a value spelled "algo:digest", append the digest to a list
|
| 914 |
+
pointed to in a dict by the algo name."""
|
| 915 |
+
if not parser.values.hashes:
|
| 916 |
+
parser.values.hashes = {}
|
| 917 |
+
try:
|
| 918 |
+
algo, digest = value.split(":", 1)
|
| 919 |
+
except ValueError:
|
| 920 |
+
parser.error(
|
| 921 |
+
f"Arguments to {opt_str} must be a hash name "
|
| 922 |
+
"followed by a value, like --hash=sha256:"
|
| 923 |
+
"abcde..."
|
| 924 |
+
)
|
| 925 |
+
if algo not in STRONG_HASHES:
|
| 926 |
+
parser.error(
|
| 927 |
+
"Allowed hash algorithms for {} are {}.".format(
|
| 928 |
+
opt_str, ", ".join(STRONG_HASHES)
|
| 929 |
+
)
|
| 930 |
+
)
|
| 931 |
+
parser.values.hashes.setdefault(algo, []).append(digest)
|
| 932 |
+
|
| 933 |
+
|
| 934 |
+
hash: Callable[..., Option] = partial(
|
| 935 |
+
Option,
|
| 936 |
+
"--hash",
|
| 937 |
+
# Hash values eventually end up in InstallRequirement.hashes due to
|
| 938 |
+
# __dict__ copying in process_line().
|
| 939 |
+
dest="hashes",
|
| 940 |
+
action="callback",
|
| 941 |
+
callback=_handle_merge_hash,
|
| 942 |
+
type="string",
|
| 943 |
+
help="Verify that the package's archive matches this "
|
| 944 |
+
"hash before installing. Example: --hash=sha256:abcdef...",
|
| 945 |
+
)
|
| 946 |
+
|
| 947 |
+
|
| 948 |
+
require_hashes: Callable[..., Option] = partial(
|
| 949 |
+
Option,
|
| 950 |
+
"--require-hashes",
|
| 951 |
+
dest="require_hashes",
|
| 952 |
+
action="store_true",
|
| 953 |
+
default=False,
|
| 954 |
+
help="Require a hash to check each requirement against, for "
|
| 955 |
+
"repeatable installs. This option is implied when any package in a "
|
| 956 |
+
"requirements file has a --hash option.",
|
| 957 |
+
)
|
| 958 |
+
|
| 959 |
+
|
| 960 |
+
list_path: Callable[..., Option] = partial(
|
| 961 |
+
PipOption,
|
| 962 |
+
"--path",
|
| 963 |
+
dest="path",
|
| 964 |
+
type="path",
|
| 965 |
+
action="append",
|
| 966 |
+
help="Restrict to the specified installation path for listing "
|
| 967 |
+
"packages (can be used multiple times).",
|
| 968 |
+
)
|
| 969 |
+
|
| 970 |
+
|
| 971 |
+
def check_list_path_option(options: Values) -> None:
|
| 972 |
+
if options.path and (options.user or options.local):
|
| 973 |
+
raise CommandError("Cannot combine '--path' with '--user' or '--local'")
|
| 974 |
+
|
| 975 |
+
|
| 976 |
+
list_exclude: Callable[..., Option] = partial(
|
| 977 |
+
PipOption,
|
| 978 |
+
"--exclude",
|
| 979 |
+
dest="excludes",
|
| 980 |
+
action="append",
|
| 981 |
+
metavar="package",
|
| 982 |
+
type="package_name",
|
| 983 |
+
help="Exclude specified package from the output",
|
| 984 |
+
)
|
| 985 |
+
|
| 986 |
+
|
| 987 |
+
no_python_version_warning: Callable[..., Option] = partial(
|
| 988 |
+
Option,
|
| 989 |
+
"--no-python-version-warning",
|
| 990 |
+
dest="no_python_version_warning",
|
| 991 |
+
action="store_true",
|
| 992 |
+
default=False,
|
| 993 |
+
help="Silence deprecation warnings for upcoming unsupported Pythons.",
|
| 994 |
+
)
|
| 995 |
+
|
| 996 |
+
|
| 997 |
+
# Features that are now always on. A warning is printed if they are used.
|
| 998 |
+
ALWAYS_ENABLED_FEATURES = [
|
| 999 |
+
"truststore", # always on since 24.2
|
| 1000 |
+
"no-binary-enable-wheel-cache", # always on since 23.1
|
| 1001 |
+
]
|
| 1002 |
+
|
| 1003 |
+
use_new_feature: Callable[..., Option] = partial(
|
| 1004 |
+
Option,
|
| 1005 |
+
"--use-feature",
|
| 1006 |
+
dest="features_enabled",
|
| 1007 |
+
metavar="feature",
|
| 1008 |
+
action="append",
|
| 1009 |
+
default=[],
|
| 1010 |
+
choices=[
|
| 1011 |
+
"fast-deps",
|
| 1012 |
+
]
|
| 1013 |
+
+ ALWAYS_ENABLED_FEATURES,
|
| 1014 |
+
help="Enable new functionality, that may be backward incompatible.",
|
| 1015 |
+
)
|
| 1016 |
+
|
| 1017 |
+
use_deprecated_feature: Callable[..., Option] = partial(
|
| 1018 |
+
Option,
|
| 1019 |
+
"--use-deprecated",
|
| 1020 |
+
dest="deprecated_features_enabled",
|
| 1021 |
+
metavar="feature",
|
| 1022 |
+
action="append",
|
| 1023 |
+
default=[],
|
| 1024 |
+
choices=[
|
| 1025 |
+
"legacy-resolver",
|
| 1026 |
+
"legacy-certs",
|
| 1027 |
+
],
|
| 1028 |
+
help=("Enable deprecated functionality, that will be removed in the future."),
|
| 1029 |
+
)
|
| 1030 |
+
|
| 1031 |
+
|
| 1032 |
+
##########
|
| 1033 |
+
# groups #
|
| 1034 |
+
##########
|
| 1035 |
+
|
| 1036 |
+
general_group: Dict[str, Any] = {
|
| 1037 |
+
"name": "General Options",
|
| 1038 |
+
"options": [
|
| 1039 |
+
help_,
|
| 1040 |
+
debug_mode,
|
| 1041 |
+
isolated_mode,
|
| 1042 |
+
require_virtualenv,
|
| 1043 |
+
python,
|
| 1044 |
+
verbose,
|
| 1045 |
+
version,
|
| 1046 |
+
quiet,
|
| 1047 |
+
log,
|
| 1048 |
+
no_input,
|
| 1049 |
+
keyring_provider,
|
| 1050 |
+
proxy,
|
| 1051 |
+
retries,
|
| 1052 |
+
timeout,
|
| 1053 |
+
exists_action,
|
| 1054 |
+
trusted_host,
|
| 1055 |
+
cert,
|
| 1056 |
+
client_cert,
|
| 1057 |
+
cache_dir,
|
| 1058 |
+
no_cache,
|
| 1059 |
+
disable_pip_version_check,
|
| 1060 |
+
no_color,
|
| 1061 |
+
no_python_version_warning,
|
| 1062 |
+
use_new_feature,
|
| 1063 |
+
use_deprecated_feature,
|
| 1064 |
+
],
|
| 1065 |
+
}
|
| 1066 |
+
|
| 1067 |
+
index_group: Dict[str, Any] = {
|
| 1068 |
+
"name": "Package Index Options",
|
| 1069 |
+
"options": [
|
| 1070 |
+
index_url,
|
| 1071 |
+
extra_index_url,
|
| 1072 |
+
no_index,
|
| 1073 |
+
find_links,
|
| 1074 |
+
],
|
| 1075 |
+
}
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/main.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Primary application entrypoint.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import locale
|
| 5 |
+
import logging
|
| 6 |
+
import os
|
| 7 |
+
import sys
|
| 8 |
+
import warnings
|
| 9 |
+
from typing import List, Optional
|
| 10 |
+
|
| 11 |
+
from pip._internal.cli.autocompletion import autocomplete
|
| 12 |
+
from pip._internal.cli.main_parser import parse_command
|
| 13 |
+
from pip._internal.commands import create_command
|
| 14 |
+
from pip._internal.exceptions import PipError
|
| 15 |
+
from pip._internal.utils import deprecation
|
| 16 |
+
|
| 17 |
+
logger = logging.getLogger(__name__)
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
# Do not import and use main() directly! Using it directly is actively
|
| 21 |
+
# discouraged by pip's maintainers. The name, location and behavior of
|
| 22 |
+
# this function is subject to change, so calling it directly is not
|
| 23 |
+
# portable across different pip versions.
|
| 24 |
+
|
| 25 |
+
# In addition, running pip in-process is unsupported and unsafe. This is
|
| 26 |
+
# elaborated in detail at
|
| 27 |
+
# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program.
|
| 28 |
+
# That document also provides suggestions that should work for nearly
|
| 29 |
+
# all users that are considering importing and using main() directly.
|
| 30 |
+
|
| 31 |
+
# However, we know that certain users will still want to invoke pip
|
| 32 |
+
# in-process. If you understand and accept the implications of using pip
|
| 33 |
+
# in an unsupported manner, the best approach is to use runpy to avoid
|
| 34 |
+
# depending on the exact location of this entry point.
|
| 35 |
+
|
| 36 |
+
# The following example shows how to use runpy to invoke pip in that
|
| 37 |
+
# case:
|
| 38 |
+
#
|
| 39 |
+
# sys.argv = ["pip", your, args, here]
|
| 40 |
+
# runpy.run_module("pip", run_name="__main__")
|
| 41 |
+
#
|
| 42 |
+
# Note that this will exit the process after running, unlike a direct
|
| 43 |
+
# call to main. As it is not safe to do any processing after calling
|
| 44 |
+
# main, this should not be an issue in practice.
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def main(args: Optional[List[str]] = None) -> int:
|
| 48 |
+
if args is None:
|
| 49 |
+
args = sys.argv[1:]
|
| 50 |
+
|
| 51 |
+
# Suppress the pkg_resources deprecation warning
|
| 52 |
+
# Note - we use a module of .*pkg_resources to cover
|
| 53 |
+
# the normal case (pip._vendor.pkg_resources) and the
|
| 54 |
+
# devendored case (a bare pkg_resources)
|
| 55 |
+
warnings.filterwarnings(
|
| 56 |
+
action="ignore", category=DeprecationWarning, module=".*pkg_resources"
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
# Configure our deprecation warnings to be sent through loggers
|
| 60 |
+
deprecation.install_warning_logger()
|
| 61 |
+
|
| 62 |
+
autocomplete()
|
| 63 |
+
|
| 64 |
+
try:
|
| 65 |
+
cmd_name, cmd_args = parse_command(args)
|
| 66 |
+
except PipError as exc:
|
| 67 |
+
sys.stderr.write(f"ERROR: {exc}")
|
| 68 |
+
sys.stderr.write(os.linesep)
|
| 69 |
+
sys.exit(1)
|
| 70 |
+
|
| 71 |
+
# Needed for locale.getpreferredencoding(False) to work
|
| 72 |
+
# in pip._internal.utils.encoding.auto_decode
|
| 73 |
+
try:
|
| 74 |
+
locale.setlocale(locale.LC_ALL, "")
|
| 75 |
+
except locale.Error as e:
|
| 76 |
+
# setlocale can apparently crash if locale are uninitialized
|
| 77 |
+
logger.debug("Ignoring error %s when setting locale", e)
|
| 78 |
+
command = create_command(cmd_name, isolated=("--isolated" in cmd_args))
|
| 79 |
+
|
| 80 |
+
return command.main(cmd_args)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/main_parser.py
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""A single place for constructing and exposing the main parser
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import os
|
| 5 |
+
import subprocess
|
| 6 |
+
import sys
|
| 7 |
+
from typing import List, Optional, Tuple
|
| 8 |
+
|
| 9 |
+
from pip._internal.build_env import get_runnable_pip
|
| 10 |
+
from pip._internal.cli import cmdoptions
|
| 11 |
+
from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
|
| 12 |
+
from pip._internal.commands import commands_dict, get_similar_commands
|
| 13 |
+
from pip._internal.exceptions import CommandError
|
| 14 |
+
from pip._internal.utils.misc import get_pip_version, get_prog
|
| 15 |
+
|
| 16 |
+
__all__ = ["create_main_parser", "parse_command"]
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def create_main_parser() -> ConfigOptionParser:
|
| 20 |
+
"""Creates and returns the main parser for pip's CLI"""
|
| 21 |
+
|
| 22 |
+
parser = ConfigOptionParser(
|
| 23 |
+
usage="\n%prog <command> [options]",
|
| 24 |
+
add_help_option=False,
|
| 25 |
+
formatter=UpdatingDefaultsHelpFormatter(),
|
| 26 |
+
name="global",
|
| 27 |
+
prog=get_prog(),
|
| 28 |
+
)
|
| 29 |
+
parser.disable_interspersed_args()
|
| 30 |
+
|
| 31 |
+
parser.version = get_pip_version()
|
| 32 |
+
|
| 33 |
+
# add the general options
|
| 34 |
+
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
|
| 35 |
+
parser.add_option_group(gen_opts)
|
| 36 |
+
|
| 37 |
+
# so the help formatter knows
|
| 38 |
+
parser.main = True # type: ignore
|
| 39 |
+
|
| 40 |
+
# create command listing for description
|
| 41 |
+
description = [""] + [
|
| 42 |
+
f"{name:27} {command_info.summary}"
|
| 43 |
+
for name, command_info in commands_dict.items()
|
| 44 |
+
]
|
| 45 |
+
parser.description = "\n".join(description)
|
| 46 |
+
|
| 47 |
+
return parser
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def identify_python_interpreter(python: str) -> Optional[str]:
|
| 51 |
+
# If the named file exists, use it.
|
| 52 |
+
# If it's a directory, assume it's a virtual environment and
|
| 53 |
+
# look for the environment's Python executable.
|
| 54 |
+
if os.path.exists(python):
|
| 55 |
+
if os.path.isdir(python):
|
| 56 |
+
# bin/python for Unix, Scripts/python.exe for Windows
|
| 57 |
+
# Try both in case of odd cases like cygwin.
|
| 58 |
+
for exe in ("bin/python", "Scripts/python.exe"):
|
| 59 |
+
py = os.path.join(python, exe)
|
| 60 |
+
if os.path.exists(py):
|
| 61 |
+
return py
|
| 62 |
+
else:
|
| 63 |
+
return python
|
| 64 |
+
|
| 65 |
+
# Could not find the interpreter specified
|
| 66 |
+
return None
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def parse_command(args: List[str]) -> Tuple[str, List[str]]:
|
| 70 |
+
parser = create_main_parser()
|
| 71 |
+
|
| 72 |
+
# Note: parser calls disable_interspersed_args(), so the result of this
|
| 73 |
+
# call is to split the initial args into the general options before the
|
| 74 |
+
# subcommand and everything else.
|
| 75 |
+
# For example:
|
| 76 |
+
# args: ['--timeout=5', 'install', '--user', 'INITools']
|
| 77 |
+
# general_options: ['--timeout==5']
|
| 78 |
+
# args_else: ['install', '--user', 'INITools']
|
| 79 |
+
general_options, args_else = parser.parse_args(args)
|
| 80 |
+
|
| 81 |
+
# --python
|
| 82 |
+
if general_options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
|
| 83 |
+
# Re-invoke pip using the specified Python interpreter
|
| 84 |
+
interpreter = identify_python_interpreter(general_options.python)
|
| 85 |
+
if interpreter is None:
|
| 86 |
+
raise CommandError(
|
| 87 |
+
f"Could not locate Python interpreter {general_options.python}"
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
pip_cmd = [
|
| 91 |
+
interpreter,
|
| 92 |
+
get_runnable_pip(),
|
| 93 |
+
]
|
| 94 |
+
pip_cmd.extend(args)
|
| 95 |
+
|
| 96 |
+
# Set a flag so the child doesn't re-invoke itself, causing
|
| 97 |
+
# an infinite loop.
|
| 98 |
+
os.environ["_PIP_RUNNING_IN_SUBPROCESS"] = "1"
|
| 99 |
+
returncode = 0
|
| 100 |
+
try:
|
| 101 |
+
proc = subprocess.run(pip_cmd)
|
| 102 |
+
returncode = proc.returncode
|
| 103 |
+
except (subprocess.SubprocessError, OSError) as exc:
|
| 104 |
+
raise CommandError(f"Failed to run pip under {interpreter}: {exc}")
|
| 105 |
+
sys.exit(returncode)
|
| 106 |
+
|
| 107 |
+
# --version
|
| 108 |
+
if general_options.version:
|
| 109 |
+
sys.stdout.write(parser.version)
|
| 110 |
+
sys.stdout.write(os.linesep)
|
| 111 |
+
sys.exit()
|
| 112 |
+
|
| 113 |
+
# pip || pip help -> print_help()
|
| 114 |
+
if not args_else or (args_else[0] == "help" and len(args_else) == 1):
|
| 115 |
+
parser.print_help()
|
| 116 |
+
sys.exit()
|
| 117 |
+
|
| 118 |
+
# the subcommand name
|
| 119 |
+
cmd_name = args_else[0]
|
| 120 |
+
|
| 121 |
+
if cmd_name not in commands_dict:
|
| 122 |
+
guess = get_similar_commands(cmd_name)
|
| 123 |
+
|
| 124 |
+
msg = [f'unknown command "{cmd_name}"']
|
| 125 |
+
if guess:
|
| 126 |
+
msg.append(f'maybe you meant "{guess}"')
|
| 127 |
+
|
| 128 |
+
raise CommandError(" - ".join(msg))
|
| 129 |
+
|
| 130 |
+
# all the args without the subcommand
|
| 131 |
+
cmd_args = args[:]
|
| 132 |
+
cmd_args.remove(cmd_name)
|
| 133 |
+
|
| 134 |
+
return cmd_name, cmd_args
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/cli/status_codes.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
SUCCESS = 0
|
| 2 |
+
ERROR = 1
|
| 3 |
+
UNKNOWN_ERROR = 2
|
| 4 |
+
VIRTUALENV_NOT_FOUND = 3
|
| 5 |
+
PREVIOUS_BUILD_DIR_ERROR = 4
|
| 6 |
+
NO_MATCHES_FOUND = 23
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (1.06 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/base.cpython-311.pyc
ADDED
|
Binary file (3.2 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-311.pyc
ADDED
|
Binary file (18.2 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/metadata/base.py
ADDED
|
@@ -0,0 +1,688 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import csv
|
| 2 |
+
import email.message
|
| 3 |
+
import functools
|
| 4 |
+
import json
|
| 5 |
+
import logging
|
| 6 |
+
import pathlib
|
| 7 |
+
import re
|
| 8 |
+
import zipfile
|
| 9 |
+
from typing import (
|
| 10 |
+
IO,
|
| 11 |
+
Any,
|
| 12 |
+
Collection,
|
| 13 |
+
Container,
|
| 14 |
+
Dict,
|
| 15 |
+
Iterable,
|
| 16 |
+
Iterator,
|
| 17 |
+
List,
|
| 18 |
+
NamedTuple,
|
| 19 |
+
Optional,
|
| 20 |
+
Protocol,
|
| 21 |
+
Tuple,
|
| 22 |
+
Union,
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
from pip._vendor.packaging.requirements import Requirement
|
| 26 |
+
from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet
|
| 27 |
+
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
| 28 |
+
from pip._vendor.packaging.version import Version
|
| 29 |
+
|
| 30 |
+
from pip._internal.exceptions import NoneMetadataError
|
| 31 |
+
from pip._internal.locations import site_packages, user_site
|
| 32 |
+
from pip._internal.models.direct_url import (
|
| 33 |
+
DIRECT_URL_METADATA_NAME,
|
| 34 |
+
DirectUrl,
|
| 35 |
+
DirectUrlValidationError,
|
| 36 |
+
)
|
| 37 |
+
from pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here.
|
| 38 |
+
from pip._internal.utils.egg_link import egg_link_path_from_sys_path
|
| 39 |
+
from pip._internal.utils.misc import is_local, normalize_path
|
| 40 |
+
from pip._internal.utils.urls import url_to_path
|
| 41 |
+
|
| 42 |
+
from ._json import msg_to_json
|
| 43 |
+
|
| 44 |
+
InfoPath = Union[str, pathlib.PurePath]
|
| 45 |
+
|
| 46 |
+
logger = logging.getLogger(__name__)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class BaseEntryPoint(Protocol):
|
| 50 |
+
@property
|
| 51 |
+
def name(self) -> str:
|
| 52 |
+
raise NotImplementedError()
|
| 53 |
+
|
| 54 |
+
@property
|
| 55 |
+
def value(self) -> str:
|
| 56 |
+
raise NotImplementedError()
|
| 57 |
+
|
| 58 |
+
@property
|
| 59 |
+
def group(self) -> str:
|
| 60 |
+
raise NotImplementedError()
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def _convert_installed_files_path(
|
| 64 |
+
entry: Tuple[str, ...],
|
| 65 |
+
info: Tuple[str, ...],
|
| 66 |
+
) -> str:
|
| 67 |
+
"""Convert a legacy installed-files.txt path into modern RECORD path.
|
| 68 |
+
|
| 69 |
+
The legacy format stores paths relative to the info directory, while the
|
| 70 |
+
modern format stores paths relative to the package root, e.g. the
|
| 71 |
+
site-packages directory.
|
| 72 |
+
|
| 73 |
+
:param entry: Path parts of the installed-files.txt entry.
|
| 74 |
+
:param info: Path parts of the egg-info directory relative to package root.
|
| 75 |
+
:returns: The converted entry.
|
| 76 |
+
|
| 77 |
+
For best compatibility with symlinks, this does not use ``abspath()`` or
|
| 78 |
+
``Path.resolve()``, but tries to work with path parts:
|
| 79 |
+
|
| 80 |
+
1. While ``entry`` starts with ``..``, remove the equal amounts of parts
|
| 81 |
+
from ``info``; if ``info`` is empty, start appending ``..`` instead.
|
| 82 |
+
2. Join the two directly.
|
| 83 |
+
"""
|
| 84 |
+
while entry and entry[0] == "..":
|
| 85 |
+
if not info or info[-1] == "..":
|
| 86 |
+
info += ("..",)
|
| 87 |
+
else:
|
| 88 |
+
info = info[:-1]
|
| 89 |
+
entry = entry[1:]
|
| 90 |
+
return str(pathlib.Path(*info, *entry))
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
class RequiresEntry(NamedTuple):
|
| 94 |
+
requirement: str
|
| 95 |
+
extra: str
|
| 96 |
+
marker: str
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
class BaseDistribution(Protocol):
|
| 100 |
+
@classmethod
|
| 101 |
+
def from_directory(cls, directory: str) -> "BaseDistribution":
|
| 102 |
+
"""Load the distribution from a metadata directory.
|
| 103 |
+
|
| 104 |
+
:param directory: Path to a metadata directory, e.g. ``.dist-info``.
|
| 105 |
+
"""
|
| 106 |
+
raise NotImplementedError()
|
| 107 |
+
|
| 108 |
+
@classmethod
|
| 109 |
+
def from_metadata_file_contents(
|
| 110 |
+
cls,
|
| 111 |
+
metadata_contents: bytes,
|
| 112 |
+
filename: str,
|
| 113 |
+
project_name: str,
|
| 114 |
+
) -> "BaseDistribution":
|
| 115 |
+
"""Load the distribution from the contents of a METADATA file.
|
| 116 |
+
|
| 117 |
+
This is used to implement PEP 658 by generating a "shallow" dist object that can
|
| 118 |
+
be used for resolution without downloading or building the actual dist yet.
|
| 119 |
+
|
| 120 |
+
:param metadata_contents: The contents of a METADATA file.
|
| 121 |
+
:param filename: File name for the dist with this metadata.
|
| 122 |
+
:param project_name: Name of the project this dist represents.
|
| 123 |
+
"""
|
| 124 |
+
raise NotImplementedError()
|
| 125 |
+
|
| 126 |
+
@classmethod
|
| 127 |
+
def from_wheel(cls, wheel: "Wheel", name: str) -> "BaseDistribution":
|
| 128 |
+
"""Load the distribution from a given wheel.
|
| 129 |
+
|
| 130 |
+
:param wheel: A concrete wheel definition.
|
| 131 |
+
:param name: File name of the wheel.
|
| 132 |
+
|
| 133 |
+
:raises InvalidWheel: Whenever loading of the wheel causes a
|
| 134 |
+
:py:exc:`zipfile.BadZipFile` exception to be thrown.
|
| 135 |
+
:raises UnsupportedWheel: If the wheel is a valid zip, but malformed
|
| 136 |
+
internally.
|
| 137 |
+
"""
|
| 138 |
+
raise NotImplementedError()
|
| 139 |
+
|
| 140 |
+
def __repr__(self) -> str:
|
| 141 |
+
return f"{self.raw_name} {self.raw_version} ({self.location})"
|
| 142 |
+
|
| 143 |
+
def __str__(self) -> str:
|
| 144 |
+
return f"{self.raw_name} {self.raw_version}"
|
| 145 |
+
|
| 146 |
+
@property
|
| 147 |
+
def location(self) -> Optional[str]:
|
| 148 |
+
"""Where the distribution is loaded from.
|
| 149 |
+
|
| 150 |
+
A string value is not necessarily a filesystem path, since distributions
|
| 151 |
+
can be loaded from other sources, e.g. arbitrary zip archives. ``None``
|
| 152 |
+
means the distribution is created in-memory.
|
| 153 |
+
|
| 154 |
+
Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If
|
| 155 |
+
this is a symbolic link, we want to preserve the relative path between
|
| 156 |
+
it and files in the distribution.
|
| 157 |
+
"""
|
| 158 |
+
raise NotImplementedError()
|
| 159 |
+
|
| 160 |
+
@property
|
| 161 |
+
def editable_project_location(self) -> Optional[str]:
|
| 162 |
+
"""The project location for editable distributions.
|
| 163 |
+
|
| 164 |
+
This is the directory where pyproject.toml or setup.py is located.
|
| 165 |
+
None if the distribution is not installed in editable mode.
|
| 166 |
+
"""
|
| 167 |
+
# TODO: this property is relatively costly to compute, memoize it ?
|
| 168 |
+
direct_url = self.direct_url
|
| 169 |
+
if direct_url:
|
| 170 |
+
if direct_url.is_local_editable():
|
| 171 |
+
return url_to_path(direct_url.url)
|
| 172 |
+
else:
|
| 173 |
+
# Search for an .egg-link file by walking sys.path, as it was
|
| 174 |
+
# done before by dist_is_editable().
|
| 175 |
+
egg_link_path = egg_link_path_from_sys_path(self.raw_name)
|
| 176 |
+
if egg_link_path:
|
| 177 |
+
# TODO: get project location from second line of egg_link file
|
| 178 |
+
# (https://github.com/pypa/pip/issues/10243)
|
| 179 |
+
return self.location
|
| 180 |
+
return None
|
| 181 |
+
|
| 182 |
+
@property
|
| 183 |
+
def installed_location(self) -> Optional[str]:
|
| 184 |
+
"""The distribution's "installed" location.
|
| 185 |
+
|
| 186 |
+
This should generally be a ``site-packages`` directory. This is
|
| 187 |
+
usually ``dist.location``, except for legacy develop-installed packages,
|
| 188 |
+
where ``dist.location`` is the source code location, and this is where
|
| 189 |
+
the ``.egg-link`` file is.
|
| 190 |
+
|
| 191 |
+
The returned location is normalized (in particular, with symlinks removed).
|
| 192 |
+
"""
|
| 193 |
+
raise NotImplementedError()
|
| 194 |
+
|
| 195 |
+
@property
|
| 196 |
+
def info_location(self) -> Optional[str]:
|
| 197 |
+
"""Location of the .[egg|dist]-info directory or file.
|
| 198 |
+
|
| 199 |
+
Similarly to ``location``, a string value is not necessarily a
|
| 200 |
+
filesystem path. ``None`` means the distribution is created in-memory.
|
| 201 |
+
|
| 202 |
+
For a modern .dist-info installation on disk, this should be something
|
| 203 |
+
like ``{location}/{raw_name}-{version}.dist-info``.
|
| 204 |
+
|
| 205 |
+
Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If
|
| 206 |
+
this is a symbolic link, we want to preserve the relative path between
|
| 207 |
+
it and other files in the distribution.
|
| 208 |
+
"""
|
| 209 |
+
raise NotImplementedError()
|
| 210 |
+
|
| 211 |
+
@property
|
| 212 |
+
def installed_by_distutils(self) -> bool:
|
| 213 |
+
"""Whether this distribution is installed with legacy distutils format.
|
| 214 |
+
|
| 215 |
+
A distribution installed with "raw" distutils not patched by setuptools
|
| 216 |
+
uses one single file at ``info_location`` to store metadata. We need to
|
| 217 |
+
treat this specially on uninstallation.
|
| 218 |
+
"""
|
| 219 |
+
info_location = self.info_location
|
| 220 |
+
if not info_location:
|
| 221 |
+
return False
|
| 222 |
+
return pathlib.Path(info_location).is_file()
|
| 223 |
+
|
| 224 |
+
@property
|
| 225 |
+
def installed_as_egg(self) -> bool:
|
| 226 |
+
"""Whether this distribution is installed as an egg.
|
| 227 |
+
|
| 228 |
+
This usually indicates the distribution was installed by (older versions
|
| 229 |
+
of) easy_install.
|
| 230 |
+
"""
|
| 231 |
+
location = self.location
|
| 232 |
+
if not location:
|
| 233 |
+
return False
|
| 234 |
+
return location.endswith(".egg")
|
| 235 |
+
|
| 236 |
+
@property
|
| 237 |
+
def installed_with_setuptools_egg_info(self) -> bool:
|
| 238 |
+
"""Whether this distribution is installed with the ``.egg-info`` format.
|
| 239 |
+
|
| 240 |
+
This usually indicates the distribution was installed with setuptools
|
| 241 |
+
with an old pip version or with ``single-version-externally-managed``.
|
| 242 |
+
|
| 243 |
+
Note that this ensure the metadata store is a directory. distutils can
|
| 244 |
+
also installs an ``.egg-info``, but as a file, not a directory. This
|
| 245 |
+
property is *False* for that case. Also see ``installed_by_distutils``.
|
| 246 |
+
"""
|
| 247 |
+
info_location = self.info_location
|
| 248 |
+
if not info_location:
|
| 249 |
+
return False
|
| 250 |
+
if not info_location.endswith(".egg-info"):
|
| 251 |
+
return False
|
| 252 |
+
return pathlib.Path(info_location).is_dir()
|
| 253 |
+
|
| 254 |
+
@property
|
| 255 |
+
def installed_with_dist_info(self) -> bool:
|
| 256 |
+
"""Whether this distribution is installed with the "modern format".
|
| 257 |
+
|
| 258 |
+
This indicates a "modern" installation, e.g. storing metadata in the
|
| 259 |
+
``.dist-info`` directory. This applies to installations made by
|
| 260 |
+
setuptools (but through pip, not directly), or anything using the
|
| 261 |
+
standardized build backend interface (PEP 517).
|
| 262 |
+
"""
|
| 263 |
+
info_location = self.info_location
|
| 264 |
+
if not info_location:
|
| 265 |
+
return False
|
| 266 |
+
if not info_location.endswith(".dist-info"):
|
| 267 |
+
return False
|
| 268 |
+
return pathlib.Path(info_location).is_dir()
|
| 269 |
+
|
| 270 |
+
@property
|
| 271 |
+
def canonical_name(self) -> NormalizedName:
|
| 272 |
+
raise NotImplementedError()
|
| 273 |
+
|
| 274 |
+
@property
|
| 275 |
+
def version(self) -> Version:
|
| 276 |
+
raise NotImplementedError()
|
| 277 |
+
|
| 278 |
+
@property
|
| 279 |
+
def raw_version(self) -> str:
|
| 280 |
+
raise NotImplementedError()
|
| 281 |
+
|
| 282 |
+
@property
|
| 283 |
+
def setuptools_filename(self) -> str:
|
| 284 |
+
"""Convert a project name to its setuptools-compatible filename.
|
| 285 |
+
|
| 286 |
+
This is a copy of ``pkg_resources.to_filename()`` for compatibility.
|
| 287 |
+
"""
|
| 288 |
+
return self.raw_name.replace("-", "_")
|
| 289 |
+
|
| 290 |
+
@property
|
| 291 |
+
def direct_url(self) -> Optional[DirectUrl]:
|
| 292 |
+
"""Obtain a DirectUrl from this distribution.
|
| 293 |
+
|
| 294 |
+
Returns None if the distribution has no `direct_url.json` metadata,
|
| 295 |
+
or if `direct_url.json` is invalid.
|
| 296 |
+
"""
|
| 297 |
+
try:
|
| 298 |
+
content = self.read_text(DIRECT_URL_METADATA_NAME)
|
| 299 |
+
except FileNotFoundError:
|
| 300 |
+
return None
|
| 301 |
+
try:
|
| 302 |
+
return DirectUrl.from_json(content)
|
| 303 |
+
except (
|
| 304 |
+
UnicodeDecodeError,
|
| 305 |
+
json.JSONDecodeError,
|
| 306 |
+
DirectUrlValidationError,
|
| 307 |
+
) as e:
|
| 308 |
+
logger.warning(
|
| 309 |
+
"Error parsing %s for %s: %s",
|
| 310 |
+
DIRECT_URL_METADATA_NAME,
|
| 311 |
+
self.canonical_name,
|
| 312 |
+
e,
|
| 313 |
+
)
|
| 314 |
+
return None
|
| 315 |
+
|
| 316 |
+
@property
|
| 317 |
+
def installer(self) -> str:
|
| 318 |
+
try:
|
| 319 |
+
installer_text = self.read_text("INSTALLER")
|
| 320 |
+
except (OSError, ValueError, NoneMetadataError):
|
| 321 |
+
return "" # Fail silently if the installer file cannot be read.
|
| 322 |
+
for line in installer_text.splitlines():
|
| 323 |
+
cleaned_line = line.strip()
|
| 324 |
+
if cleaned_line:
|
| 325 |
+
return cleaned_line
|
| 326 |
+
return ""
|
| 327 |
+
|
| 328 |
+
@property
|
| 329 |
+
def requested(self) -> bool:
|
| 330 |
+
return self.is_file("REQUESTED")
|
| 331 |
+
|
| 332 |
+
@property
|
| 333 |
+
def editable(self) -> bool:
|
| 334 |
+
return bool(self.editable_project_location)
|
| 335 |
+
|
| 336 |
+
@property
|
| 337 |
+
def local(self) -> bool:
|
| 338 |
+
"""If distribution is installed in the current virtual environment.
|
| 339 |
+
|
| 340 |
+
Always True if we're not in a virtualenv.
|
| 341 |
+
"""
|
| 342 |
+
if self.installed_location is None:
|
| 343 |
+
return False
|
| 344 |
+
return is_local(self.installed_location)
|
| 345 |
+
|
| 346 |
+
@property
|
| 347 |
+
def in_usersite(self) -> bool:
|
| 348 |
+
if self.installed_location is None or user_site is None:
|
| 349 |
+
return False
|
| 350 |
+
return self.installed_location.startswith(normalize_path(user_site))
|
| 351 |
+
|
| 352 |
+
@property
|
| 353 |
+
def in_site_packages(self) -> bool:
|
| 354 |
+
if self.installed_location is None or site_packages is None:
|
| 355 |
+
return False
|
| 356 |
+
return self.installed_location.startswith(normalize_path(site_packages))
|
| 357 |
+
|
| 358 |
+
def is_file(self, path: InfoPath) -> bool:
|
| 359 |
+
"""Check whether an entry in the info directory is a file."""
|
| 360 |
+
raise NotImplementedError()
|
| 361 |
+
|
| 362 |
+
def iter_distutils_script_names(self) -> Iterator[str]:
|
| 363 |
+
"""Find distutils 'scripts' entries metadata.
|
| 364 |
+
|
| 365 |
+
If 'scripts' is supplied in ``setup.py``, distutils records those in the
|
| 366 |
+
installed distribution's ``scripts`` directory, a file for each script.
|
| 367 |
+
"""
|
| 368 |
+
raise NotImplementedError()
|
| 369 |
+
|
| 370 |
+
def read_text(self, path: InfoPath) -> str:
|
| 371 |
+
"""Read a file in the info directory.
|
| 372 |
+
|
| 373 |
+
:raise FileNotFoundError: If ``path`` does not exist in the directory.
|
| 374 |
+
:raise NoneMetadataError: If ``path`` exists in the info directory, but
|
| 375 |
+
cannot be read.
|
| 376 |
+
"""
|
| 377 |
+
raise NotImplementedError()
|
| 378 |
+
|
| 379 |
+
def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
|
| 380 |
+
raise NotImplementedError()
|
| 381 |
+
|
| 382 |
+
def _metadata_impl(self) -> email.message.Message:
|
| 383 |
+
raise NotImplementedError()
|
| 384 |
+
|
| 385 |
+
@functools.cached_property
|
| 386 |
+
def metadata(self) -> email.message.Message:
|
| 387 |
+
"""Metadata of distribution parsed from e.g. METADATA or PKG-INFO.
|
| 388 |
+
|
| 389 |
+
This should return an empty message if the metadata file is unavailable.
|
| 390 |
+
|
| 391 |
+
:raises NoneMetadataError: If the metadata file is available, but does
|
| 392 |
+
not contain valid metadata.
|
| 393 |
+
"""
|
| 394 |
+
metadata = self._metadata_impl()
|
| 395 |
+
self._add_egg_info_requires(metadata)
|
| 396 |
+
return metadata
|
| 397 |
+
|
| 398 |
+
@property
|
| 399 |
+
def metadata_dict(self) -> Dict[str, Any]:
|
| 400 |
+
"""PEP 566 compliant JSON-serializable representation of METADATA or PKG-INFO.
|
| 401 |
+
|
| 402 |
+
This should return an empty dict if the metadata file is unavailable.
|
| 403 |
+
|
| 404 |
+
:raises NoneMetadataError: If the metadata file is available, but does
|
| 405 |
+
not contain valid metadata.
|
| 406 |
+
"""
|
| 407 |
+
return msg_to_json(self.metadata)
|
| 408 |
+
|
| 409 |
+
@property
|
| 410 |
+
def metadata_version(self) -> Optional[str]:
|
| 411 |
+
"""Value of "Metadata-Version:" in distribution metadata, if available."""
|
| 412 |
+
return self.metadata.get("Metadata-Version")
|
| 413 |
+
|
| 414 |
+
@property
|
| 415 |
+
def raw_name(self) -> str:
|
| 416 |
+
"""Value of "Name:" in distribution metadata."""
|
| 417 |
+
# The metadata should NEVER be missing the Name: key, but if it somehow
|
| 418 |
+
# does, fall back to the known canonical name.
|
| 419 |
+
return self.metadata.get("Name", self.canonical_name)
|
| 420 |
+
|
| 421 |
+
@property
|
| 422 |
+
def requires_python(self) -> SpecifierSet:
|
| 423 |
+
"""Value of "Requires-Python:" in distribution metadata.
|
| 424 |
+
|
| 425 |
+
If the key does not exist or contains an invalid value, an empty
|
| 426 |
+
SpecifierSet should be returned.
|
| 427 |
+
"""
|
| 428 |
+
value = self.metadata.get("Requires-Python")
|
| 429 |
+
if value is None:
|
| 430 |
+
return SpecifierSet()
|
| 431 |
+
try:
|
| 432 |
+
# Convert to str to satisfy the type checker; this can be a Header object.
|
| 433 |
+
spec = SpecifierSet(str(value))
|
| 434 |
+
except InvalidSpecifier as e:
|
| 435 |
+
message = "Package %r has an invalid Requires-Python: %s"
|
| 436 |
+
logger.warning(message, self.raw_name, e)
|
| 437 |
+
return SpecifierSet()
|
| 438 |
+
return spec
|
| 439 |
+
|
| 440 |
+
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
|
| 441 |
+
"""Dependencies of this distribution.
|
| 442 |
+
|
| 443 |
+
For modern .dist-info distributions, this is the collection of
|
| 444 |
+
"Requires-Dist:" entries in distribution metadata.
|
| 445 |
+
"""
|
| 446 |
+
raise NotImplementedError()
|
| 447 |
+
|
| 448 |
+
def iter_raw_dependencies(self) -> Iterable[str]:
|
| 449 |
+
"""Raw Requires-Dist metadata."""
|
| 450 |
+
return self.metadata.get_all("Requires-Dist", [])
|
| 451 |
+
|
| 452 |
+
def iter_provided_extras(self) -> Iterable[NormalizedName]:
|
| 453 |
+
"""Extras provided by this distribution.
|
| 454 |
+
|
| 455 |
+
For modern .dist-info distributions, this is the collection of
|
| 456 |
+
"Provides-Extra:" entries in distribution metadata.
|
| 457 |
+
|
| 458 |
+
The return value of this function is expected to be normalised names,
|
| 459 |
+
per PEP 685, with the returned value being handled appropriately by
|
| 460 |
+
`iter_dependencies`.
|
| 461 |
+
"""
|
| 462 |
+
raise NotImplementedError()
|
| 463 |
+
|
| 464 |
+
def _iter_declared_entries_from_record(self) -> Optional[Iterator[str]]:
|
| 465 |
+
try:
|
| 466 |
+
text = self.read_text("RECORD")
|
| 467 |
+
except FileNotFoundError:
|
| 468 |
+
return None
|
| 469 |
+
# This extra Path-str cast normalizes entries.
|
| 470 |
+
return (str(pathlib.Path(row[0])) for row in csv.reader(text.splitlines()))
|
| 471 |
+
|
| 472 |
+
def _iter_declared_entries_from_legacy(self) -> Optional[Iterator[str]]:
|
| 473 |
+
try:
|
| 474 |
+
text = self.read_text("installed-files.txt")
|
| 475 |
+
except FileNotFoundError:
|
| 476 |
+
return None
|
| 477 |
+
paths = (p for p in text.splitlines(keepends=False) if p)
|
| 478 |
+
root = self.location
|
| 479 |
+
info = self.info_location
|
| 480 |
+
if root is None or info is None:
|
| 481 |
+
return paths
|
| 482 |
+
try:
|
| 483 |
+
info_rel = pathlib.Path(info).relative_to(root)
|
| 484 |
+
except ValueError: # info is not relative to root.
|
| 485 |
+
return paths
|
| 486 |
+
if not info_rel.parts: # info *is* root.
|
| 487 |
+
return paths
|
| 488 |
+
return (
|
| 489 |
+
_convert_installed_files_path(pathlib.Path(p).parts, info_rel.parts)
|
| 490 |
+
for p in paths
|
| 491 |
+
)
|
| 492 |
+
|
| 493 |
+
def iter_declared_entries(self) -> Optional[Iterator[str]]:
|
| 494 |
+
"""Iterate through file entries declared in this distribution.
|
| 495 |
+
|
| 496 |
+
For modern .dist-info distributions, this is the files listed in the
|
| 497 |
+
``RECORD`` metadata file. For legacy setuptools distributions, this
|
| 498 |
+
comes from ``installed-files.txt``, with entries normalized to be
|
| 499 |
+
compatible with the format used by ``RECORD``.
|
| 500 |
+
|
| 501 |
+
:return: An iterator for listed entries, or None if the distribution
|
| 502 |
+
contains neither ``RECORD`` nor ``installed-files.txt``.
|
| 503 |
+
"""
|
| 504 |
+
return (
|
| 505 |
+
self._iter_declared_entries_from_record()
|
| 506 |
+
or self._iter_declared_entries_from_legacy()
|
| 507 |
+
)
|
| 508 |
+
|
| 509 |
+
def _iter_requires_txt_entries(self) -> Iterator[RequiresEntry]:
|
| 510 |
+
"""Parse a ``requires.txt`` in an egg-info directory.
|
| 511 |
+
|
| 512 |
+
This is an INI-ish format where an egg-info stores dependencies. A
|
| 513 |
+
section name describes extra other environment markers, while each entry
|
| 514 |
+
is an arbitrary string (not a key-value pair) representing a dependency
|
| 515 |
+
as a requirement string (no markers).
|
| 516 |
+
|
| 517 |
+
There is a construct in ``importlib.metadata`` called ``Sectioned`` that
|
| 518 |
+
does mostly the same, but the format is currently considered private.
|
| 519 |
+
"""
|
| 520 |
+
try:
|
| 521 |
+
content = self.read_text("requires.txt")
|
| 522 |
+
except FileNotFoundError:
|
| 523 |
+
return
|
| 524 |
+
extra = marker = "" # Section-less entries don't have markers.
|
| 525 |
+
for line in content.splitlines():
|
| 526 |
+
line = line.strip()
|
| 527 |
+
if not line or line.startswith("#"): # Comment; ignored.
|
| 528 |
+
continue
|
| 529 |
+
if line.startswith("[") and line.endswith("]"): # A section header.
|
| 530 |
+
extra, _, marker = line.strip("[]").partition(":")
|
| 531 |
+
continue
|
| 532 |
+
yield RequiresEntry(requirement=line, extra=extra, marker=marker)
|
| 533 |
+
|
| 534 |
+
def _iter_egg_info_extras(self) -> Iterable[str]:
|
| 535 |
+
"""Get extras from the egg-info directory."""
|
| 536 |
+
known_extras = {""}
|
| 537 |
+
for entry in self._iter_requires_txt_entries():
|
| 538 |
+
extra = canonicalize_name(entry.extra)
|
| 539 |
+
if extra in known_extras:
|
| 540 |
+
continue
|
| 541 |
+
known_extras.add(extra)
|
| 542 |
+
yield extra
|
| 543 |
+
|
| 544 |
+
def _iter_egg_info_dependencies(self) -> Iterable[str]:
|
| 545 |
+
"""Get distribution dependencies from the egg-info directory.
|
| 546 |
+
|
| 547 |
+
To ease parsing, this converts a legacy dependency entry into a PEP 508
|
| 548 |
+
requirement string. Like ``_iter_requires_txt_entries()``, there is code
|
| 549 |
+
in ``importlib.metadata`` that does mostly the same, but not do exactly
|
| 550 |
+
what we need.
|
| 551 |
+
|
| 552 |
+
Namely, ``importlib.metadata`` does not normalize the extra name before
|
| 553 |
+
putting it into the requirement string, which causes marker comparison
|
| 554 |
+
to fail because the dist-info format do normalize. This is consistent in
|
| 555 |
+
all currently available PEP 517 backends, although not standardized.
|
| 556 |
+
"""
|
| 557 |
+
for entry in self._iter_requires_txt_entries():
|
| 558 |
+
extra = canonicalize_name(entry.extra)
|
| 559 |
+
if extra and entry.marker:
|
| 560 |
+
marker = f'({entry.marker}) and extra == "{extra}"'
|
| 561 |
+
elif extra:
|
| 562 |
+
marker = f'extra == "{extra}"'
|
| 563 |
+
elif entry.marker:
|
| 564 |
+
marker = entry.marker
|
| 565 |
+
else:
|
| 566 |
+
marker = ""
|
| 567 |
+
if marker:
|
| 568 |
+
yield f"{entry.requirement} ; {marker}"
|
| 569 |
+
else:
|
| 570 |
+
yield entry.requirement
|
| 571 |
+
|
| 572 |
+
def _add_egg_info_requires(self, metadata: email.message.Message) -> None:
|
| 573 |
+
"""Add egg-info requires.txt information to the metadata."""
|
| 574 |
+
if not metadata.get_all("Requires-Dist"):
|
| 575 |
+
for dep in self._iter_egg_info_dependencies():
|
| 576 |
+
metadata["Requires-Dist"] = dep
|
| 577 |
+
if not metadata.get_all("Provides-Extra"):
|
| 578 |
+
for extra in self._iter_egg_info_extras():
|
| 579 |
+
metadata["Provides-Extra"] = extra
|
| 580 |
+
|
| 581 |
+
|
| 582 |
+
class BaseEnvironment:
|
| 583 |
+
"""An environment containing distributions to introspect."""
|
| 584 |
+
|
| 585 |
+
@classmethod
|
| 586 |
+
def default(cls) -> "BaseEnvironment":
|
| 587 |
+
raise NotImplementedError()
|
| 588 |
+
|
| 589 |
+
@classmethod
|
| 590 |
+
def from_paths(cls, paths: Optional[List[str]]) -> "BaseEnvironment":
|
| 591 |
+
raise NotImplementedError()
|
| 592 |
+
|
| 593 |
+
def get_distribution(self, name: str) -> Optional["BaseDistribution"]:
|
| 594 |
+
"""Given a requirement name, return the installed distributions.
|
| 595 |
+
|
| 596 |
+
The name may not be normalized. The implementation must canonicalize
|
| 597 |
+
it for lookup.
|
| 598 |
+
"""
|
| 599 |
+
raise NotImplementedError()
|
| 600 |
+
|
| 601 |
+
def _iter_distributions(self) -> Iterator["BaseDistribution"]:
|
| 602 |
+
"""Iterate through installed distributions.
|
| 603 |
+
|
| 604 |
+
This function should be implemented by subclass, but never called
|
| 605 |
+
directly. Use the public ``iter_distribution()`` instead, which
|
| 606 |
+
implements additional logic to make sure the distributions are valid.
|
| 607 |
+
"""
|
| 608 |
+
raise NotImplementedError()
|
| 609 |
+
|
| 610 |
+
def iter_all_distributions(self) -> Iterator[BaseDistribution]:
|
| 611 |
+
"""Iterate through all installed distributions without any filtering."""
|
| 612 |
+
for dist in self._iter_distributions():
|
| 613 |
+
# Make sure the distribution actually comes from a valid Python
|
| 614 |
+
# packaging distribution. Pip's AdjacentTempDirectory leaves folders
|
| 615 |
+
# e.g. ``~atplotlib.dist-info`` if cleanup was interrupted. The
|
| 616 |
+
# valid project name pattern is taken from PEP 508.
|
| 617 |
+
project_name_valid = re.match(
|
| 618 |
+
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$",
|
| 619 |
+
dist.canonical_name,
|
| 620 |
+
flags=re.IGNORECASE,
|
| 621 |
+
)
|
| 622 |
+
if not project_name_valid:
|
| 623 |
+
logger.warning(
|
| 624 |
+
"Ignoring invalid distribution %s (%s)",
|
| 625 |
+
dist.canonical_name,
|
| 626 |
+
dist.location,
|
| 627 |
+
)
|
| 628 |
+
continue
|
| 629 |
+
yield dist
|
| 630 |
+
|
| 631 |
+
def iter_installed_distributions(
|
| 632 |
+
self,
|
| 633 |
+
local_only: bool = True,
|
| 634 |
+
skip: Container[str] = stdlib_pkgs,
|
| 635 |
+
include_editables: bool = True,
|
| 636 |
+
editables_only: bool = False,
|
| 637 |
+
user_only: bool = False,
|
| 638 |
+
) -> Iterator[BaseDistribution]:
|
| 639 |
+
"""Return a list of installed distributions.
|
| 640 |
+
|
| 641 |
+
This is based on ``iter_all_distributions()`` with additional filtering
|
| 642 |
+
options. Note that ``iter_installed_distributions()`` without arguments
|
| 643 |
+
is *not* equal to ``iter_all_distributions()``, since some of the
|
| 644 |
+
configurations exclude packages by default.
|
| 645 |
+
|
| 646 |
+
:param local_only: If True (default), only return installations
|
| 647 |
+
local to the current virtualenv, if in a virtualenv.
|
| 648 |
+
:param skip: An iterable of canonicalized project names to ignore;
|
| 649 |
+
defaults to ``stdlib_pkgs``.
|
| 650 |
+
:param include_editables: If False, don't report editables.
|
| 651 |
+
:param editables_only: If True, only report editables.
|
| 652 |
+
:param user_only: If True, only report installations in the user
|
| 653 |
+
site directory.
|
| 654 |
+
"""
|
| 655 |
+
it = self.iter_all_distributions()
|
| 656 |
+
if local_only:
|
| 657 |
+
it = (d for d in it if d.local)
|
| 658 |
+
if not include_editables:
|
| 659 |
+
it = (d for d in it if not d.editable)
|
| 660 |
+
if editables_only:
|
| 661 |
+
it = (d for d in it if d.editable)
|
| 662 |
+
if user_only:
|
| 663 |
+
it = (d for d in it if d.in_usersite)
|
| 664 |
+
return (d for d in it if d.canonical_name not in skip)
|
| 665 |
+
|
| 666 |
+
|
| 667 |
+
class Wheel(Protocol):
|
| 668 |
+
location: str
|
| 669 |
+
|
| 670 |
+
def as_zipfile(self) -> zipfile.ZipFile:
|
| 671 |
+
raise NotImplementedError()
|
| 672 |
+
|
| 673 |
+
|
| 674 |
+
class FilesystemWheel(Wheel):
|
| 675 |
+
def __init__(self, location: str) -> None:
|
| 676 |
+
self.location = location
|
| 677 |
+
|
| 678 |
+
def as_zipfile(self) -> zipfile.ZipFile:
|
| 679 |
+
return zipfile.ZipFile(self.location, allowZip64=True)
|
| 680 |
+
|
| 681 |
+
|
| 682 |
+
class MemoryWheel(Wheel):
|
| 683 |
+
def __init__(self, location: str, stream: IO[bytes]) -> None:
|
| 684 |
+
self.location = location
|
| 685 |
+
self.stream = stream
|
| 686 |
+
|
| 687 |
+
def as_zipfile(self) -> zipfile.ZipFile:
|
| 688 |
+
return zipfile.ZipFile(self.stream, allowZip64=True)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__init__.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from ._dists import Distribution
|
| 2 |
+
from ._envs import Environment
|
| 3 |
+
|
| 4 |
+
__all__ = ["NAME", "Distribution", "Environment"]
|
| 5 |
+
|
| 6 |
+
NAME = "importlib"
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (416 Bytes). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-311.pyc
ADDED
|
Binary file (14.1 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-311.pyc
ADDED
|
Binary file (12.5 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/_compat.py
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import importlib.metadata
|
| 2 |
+
import os
|
| 3 |
+
from typing import Any, Optional, Protocol, Tuple, cast
|
| 4 |
+
|
| 5 |
+
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class BadMetadata(ValueError):
|
| 9 |
+
def __init__(self, dist: importlib.metadata.Distribution, *, reason: str) -> None:
|
| 10 |
+
self.dist = dist
|
| 11 |
+
self.reason = reason
|
| 12 |
+
|
| 13 |
+
def __str__(self) -> str:
|
| 14 |
+
return f"Bad metadata in {self.dist} ({self.reason})"
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class BasePath(Protocol):
|
| 18 |
+
"""A protocol that various path objects conform.
|
| 19 |
+
|
| 20 |
+
This exists because importlib.metadata uses both ``pathlib.Path`` and
|
| 21 |
+
``zipfile.Path``, and we need a common base for type hints (Union does not
|
| 22 |
+
work well since ``zipfile.Path`` is too new for our linter setup).
|
| 23 |
+
|
| 24 |
+
This does not mean to be exhaustive, but only contains things that present
|
| 25 |
+
in both classes *that we need*.
|
| 26 |
+
"""
|
| 27 |
+
|
| 28 |
+
@property
|
| 29 |
+
def name(self) -> str:
|
| 30 |
+
raise NotImplementedError()
|
| 31 |
+
|
| 32 |
+
@property
|
| 33 |
+
def parent(self) -> "BasePath":
|
| 34 |
+
raise NotImplementedError()
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def get_info_location(d: importlib.metadata.Distribution) -> Optional[BasePath]:
|
| 38 |
+
"""Find the path to the distribution's metadata directory.
|
| 39 |
+
|
| 40 |
+
HACK: This relies on importlib.metadata's private ``_path`` attribute. Not
|
| 41 |
+
all distributions exist on disk, so importlib.metadata is correct to not
|
| 42 |
+
expose the attribute as public. But pip's code base is old and not as clean,
|
| 43 |
+
so we do this to avoid having to rewrite too many things. Hopefully we can
|
| 44 |
+
eliminate this some day.
|
| 45 |
+
"""
|
| 46 |
+
return getattr(d, "_path", None)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def parse_name_and_version_from_info_directory(
|
| 50 |
+
dist: importlib.metadata.Distribution,
|
| 51 |
+
) -> Tuple[Optional[str], Optional[str]]:
|
| 52 |
+
"""Get a name and version from the metadata directory name.
|
| 53 |
+
|
| 54 |
+
This is much faster than reading distribution metadata.
|
| 55 |
+
"""
|
| 56 |
+
info_location = get_info_location(dist)
|
| 57 |
+
if info_location is None:
|
| 58 |
+
return None, None
|
| 59 |
+
|
| 60 |
+
stem, suffix = os.path.splitext(info_location.name)
|
| 61 |
+
if suffix == ".dist-info":
|
| 62 |
+
name, sep, version = stem.partition("-")
|
| 63 |
+
if sep:
|
| 64 |
+
return name, version
|
| 65 |
+
|
| 66 |
+
if suffix == ".egg-info":
|
| 67 |
+
name = stem.split("-", 1)[0]
|
| 68 |
+
return name, None
|
| 69 |
+
|
| 70 |
+
return None, None
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def get_dist_canonical_name(dist: importlib.metadata.Distribution) -> NormalizedName:
|
| 74 |
+
"""Get the distribution's normalized name.
|
| 75 |
+
|
| 76 |
+
The ``name`` attribute is only available in Python 3.10 or later. We are
|
| 77 |
+
targeting exactly that, but Mypy does not know this.
|
| 78 |
+
"""
|
| 79 |
+
if name := parse_name_and_version_from_info_directory(dist)[0]:
|
| 80 |
+
return canonicalize_name(name)
|
| 81 |
+
|
| 82 |
+
name = cast(Any, dist).name
|
| 83 |
+
if not isinstance(name, str):
|
| 84 |
+
raise BadMetadata(dist, reason="invalid metadata entry 'name'")
|
| 85 |
+
return canonicalize_name(name)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/_envs.py
ADDED
|
@@ -0,0 +1,189 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import importlib.metadata
|
| 3 |
+
import logging
|
| 4 |
+
import os
|
| 5 |
+
import pathlib
|
| 6 |
+
import sys
|
| 7 |
+
import zipfile
|
| 8 |
+
import zipimport
|
| 9 |
+
from typing import Iterator, List, Optional, Sequence, Set, Tuple
|
| 10 |
+
|
| 11 |
+
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
| 12 |
+
|
| 13 |
+
from pip._internal.metadata.base import BaseDistribution, BaseEnvironment
|
| 14 |
+
from pip._internal.models.wheel import Wheel
|
| 15 |
+
from pip._internal.utils.deprecation import deprecated
|
| 16 |
+
from pip._internal.utils.filetypes import WHEEL_EXTENSION
|
| 17 |
+
|
| 18 |
+
from ._compat import BadMetadata, BasePath, get_dist_canonical_name, get_info_location
|
| 19 |
+
from ._dists import Distribution
|
| 20 |
+
|
| 21 |
+
logger = logging.getLogger(__name__)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def _looks_like_wheel(location: str) -> bool:
|
| 25 |
+
if not location.endswith(WHEEL_EXTENSION):
|
| 26 |
+
return False
|
| 27 |
+
if not os.path.isfile(location):
|
| 28 |
+
return False
|
| 29 |
+
if not Wheel.wheel_file_re.match(os.path.basename(location)):
|
| 30 |
+
return False
|
| 31 |
+
return zipfile.is_zipfile(location)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class _DistributionFinder:
|
| 35 |
+
"""Finder to locate distributions.
|
| 36 |
+
|
| 37 |
+
The main purpose of this class is to memoize found distributions' names, so
|
| 38 |
+
only one distribution is returned for each package name. At lot of pip code
|
| 39 |
+
assumes this (because it is setuptools's behavior), and not doing the same
|
| 40 |
+
can potentially cause a distribution in lower precedence path to override a
|
| 41 |
+
higher precedence one if the caller is not careful.
|
| 42 |
+
|
| 43 |
+
Eventually we probably want to make it possible to see lower precedence
|
| 44 |
+
installations as well. It's useful feature, after all.
|
| 45 |
+
"""
|
| 46 |
+
|
| 47 |
+
FoundResult = Tuple[importlib.metadata.Distribution, Optional[BasePath]]
|
| 48 |
+
|
| 49 |
+
def __init__(self) -> None:
|
| 50 |
+
self._found_names: Set[NormalizedName] = set()
|
| 51 |
+
|
| 52 |
+
def _find_impl(self, location: str) -> Iterator[FoundResult]:
|
| 53 |
+
"""Find distributions in a location."""
|
| 54 |
+
# Skip looking inside a wheel. Since a package inside a wheel is not
|
| 55 |
+
# always valid (due to .data directories etc.), its .dist-info entry
|
| 56 |
+
# should not be considered an installed distribution.
|
| 57 |
+
if _looks_like_wheel(location):
|
| 58 |
+
return
|
| 59 |
+
# To know exactly where we find a distribution, we have to feed in the
|
| 60 |
+
# paths one by one, instead of dumping the list to importlib.metadata.
|
| 61 |
+
for dist in importlib.metadata.distributions(path=[location]):
|
| 62 |
+
info_location = get_info_location(dist)
|
| 63 |
+
try:
|
| 64 |
+
name = get_dist_canonical_name(dist)
|
| 65 |
+
except BadMetadata as e:
|
| 66 |
+
logger.warning("Skipping %s due to %s", info_location, e.reason)
|
| 67 |
+
continue
|
| 68 |
+
if name in self._found_names:
|
| 69 |
+
continue
|
| 70 |
+
self._found_names.add(name)
|
| 71 |
+
yield dist, info_location
|
| 72 |
+
|
| 73 |
+
def find(self, location: str) -> Iterator[BaseDistribution]:
|
| 74 |
+
"""Find distributions in a location.
|
| 75 |
+
|
| 76 |
+
The path can be either a directory, or a ZIP archive.
|
| 77 |
+
"""
|
| 78 |
+
for dist, info_location in self._find_impl(location):
|
| 79 |
+
if info_location is None:
|
| 80 |
+
installed_location: Optional[BasePath] = None
|
| 81 |
+
else:
|
| 82 |
+
installed_location = info_location.parent
|
| 83 |
+
yield Distribution(dist, info_location, installed_location)
|
| 84 |
+
|
| 85 |
+
def find_linked(self, location: str) -> Iterator[BaseDistribution]:
|
| 86 |
+
"""Read location in egg-link files and return distributions in there.
|
| 87 |
+
|
| 88 |
+
The path should be a directory; otherwise this returns nothing. This
|
| 89 |
+
follows how setuptools does this for compatibility. The first non-empty
|
| 90 |
+
line in the egg-link is read as a path (resolved against the egg-link's
|
| 91 |
+
containing directory if relative). Distributions found at that linked
|
| 92 |
+
location are returned.
|
| 93 |
+
"""
|
| 94 |
+
path = pathlib.Path(location)
|
| 95 |
+
if not path.is_dir():
|
| 96 |
+
return
|
| 97 |
+
for child in path.iterdir():
|
| 98 |
+
if child.suffix != ".egg-link":
|
| 99 |
+
continue
|
| 100 |
+
with child.open() as f:
|
| 101 |
+
lines = (line.strip() for line in f)
|
| 102 |
+
target_rel = next((line for line in lines if line), "")
|
| 103 |
+
if not target_rel:
|
| 104 |
+
continue
|
| 105 |
+
target_location = str(path.joinpath(target_rel))
|
| 106 |
+
for dist, info_location in self._find_impl(target_location):
|
| 107 |
+
yield Distribution(dist, info_location, path)
|
| 108 |
+
|
| 109 |
+
def _find_eggs_in_dir(self, location: str) -> Iterator[BaseDistribution]:
|
| 110 |
+
from pip._vendor.pkg_resources import find_distributions
|
| 111 |
+
|
| 112 |
+
from pip._internal.metadata import pkg_resources as legacy
|
| 113 |
+
|
| 114 |
+
with os.scandir(location) as it:
|
| 115 |
+
for entry in it:
|
| 116 |
+
if not entry.name.endswith(".egg"):
|
| 117 |
+
continue
|
| 118 |
+
for dist in find_distributions(entry.path):
|
| 119 |
+
yield legacy.Distribution(dist)
|
| 120 |
+
|
| 121 |
+
def _find_eggs_in_zip(self, location: str) -> Iterator[BaseDistribution]:
|
| 122 |
+
from pip._vendor.pkg_resources import find_eggs_in_zip
|
| 123 |
+
|
| 124 |
+
from pip._internal.metadata import pkg_resources as legacy
|
| 125 |
+
|
| 126 |
+
try:
|
| 127 |
+
importer = zipimport.zipimporter(location)
|
| 128 |
+
except zipimport.ZipImportError:
|
| 129 |
+
return
|
| 130 |
+
for dist in find_eggs_in_zip(importer, location):
|
| 131 |
+
yield legacy.Distribution(dist)
|
| 132 |
+
|
| 133 |
+
def find_eggs(self, location: str) -> Iterator[BaseDistribution]:
|
| 134 |
+
"""Find eggs in a location.
|
| 135 |
+
|
| 136 |
+
This actually uses the old *pkg_resources* backend. We likely want to
|
| 137 |
+
deprecate this so we can eventually remove the *pkg_resources*
|
| 138 |
+
dependency entirely. Before that, this should first emit a deprecation
|
| 139 |
+
warning for some versions when using the fallback since importing
|
| 140 |
+
*pkg_resources* is slow for those who don't need it.
|
| 141 |
+
"""
|
| 142 |
+
if os.path.isdir(location):
|
| 143 |
+
yield from self._find_eggs_in_dir(location)
|
| 144 |
+
if zipfile.is_zipfile(location):
|
| 145 |
+
yield from self._find_eggs_in_zip(location)
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
@functools.lru_cache(maxsize=None) # Warn a distribution exactly once.
|
| 149 |
+
def _emit_egg_deprecation(location: Optional[str]) -> None:
|
| 150 |
+
deprecated(
|
| 151 |
+
reason=f"Loading egg at {location} is deprecated.",
|
| 152 |
+
replacement="to use pip for package installation",
|
| 153 |
+
gone_in="25.1",
|
| 154 |
+
issue=12330,
|
| 155 |
+
)
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
class Environment(BaseEnvironment):
|
| 159 |
+
def __init__(self, paths: Sequence[str]) -> None:
|
| 160 |
+
self._paths = paths
|
| 161 |
+
|
| 162 |
+
@classmethod
|
| 163 |
+
def default(cls) -> BaseEnvironment:
|
| 164 |
+
return cls(sys.path)
|
| 165 |
+
|
| 166 |
+
@classmethod
|
| 167 |
+
def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment:
|
| 168 |
+
if paths is None:
|
| 169 |
+
return cls(sys.path)
|
| 170 |
+
return cls(paths)
|
| 171 |
+
|
| 172 |
+
def _iter_distributions(self) -> Iterator[BaseDistribution]:
|
| 173 |
+
finder = _DistributionFinder()
|
| 174 |
+
for location in self._paths:
|
| 175 |
+
yield from finder.find(location)
|
| 176 |
+
for dist in finder.find_eggs(location):
|
| 177 |
+
_emit_egg_deprecation(dist.location)
|
| 178 |
+
yield dist
|
| 179 |
+
# This must go last because that's how pkg_resources tie-breaks.
|
| 180 |
+
yield from finder.find_linked(location)
|
| 181 |
+
|
| 182 |
+
def get_distribution(self, name: str) -> Optional[BaseDistribution]:
|
| 183 |
+
canonical_name = canonicalize_name(name)
|
| 184 |
+
matches = (
|
| 185 |
+
distribution
|
| 186 |
+
for distribution in self.iter_all_distributions()
|
| 187 |
+
if distribution.canonical_name == canonical_name
|
| 188 |
+
)
|
| 189 |
+
return next(matches, None)
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/auth.cpython-311.pyc
ADDED
|
Binary file (24.1 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/cache.cpython-311.pyc
ADDED
|
Binary file (7.96 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-311.pyc
ADDED
|
Binary file (13 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-311.pyc
ADDED
|
Binary file (3.27 kB). View file
|
|
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/network/auth.py
ADDED
|
@@ -0,0 +1,566 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Network Authentication Helpers
|
| 2 |
+
|
| 3 |
+
Contains interface (MultiDomainBasicAuth) and associated glue code for
|
| 4 |
+
providing credentials in the context of network requests.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import logging
|
| 8 |
+
import os
|
| 9 |
+
import shutil
|
| 10 |
+
import subprocess
|
| 11 |
+
import sysconfig
|
| 12 |
+
import typing
|
| 13 |
+
import urllib.parse
|
| 14 |
+
from abc import ABC, abstractmethod
|
| 15 |
+
from functools import lru_cache
|
| 16 |
+
from os.path import commonprefix
|
| 17 |
+
from pathlib import Path
|
| 18 |
+
from typing import Any, Dict, List, NamedTuple, Optional, Tuple
|
| 19 |
+
|
| 20 |
+
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
|
| 21 |
+
from pip._vendor.requests.models import Request, Response
|
| 22 |
+
from pip._vendor.requests.utils import get_netrc_auth
|
| 23 |
+
|
| 24 |
+
from pip._internal.utils.logging import getLogger
|
| 25 |
+
from pip._internal.utils.misc import (
|
| 26 |
+
ask,
|
| 27 |
+
ask_input,
|
| 28 |
+
ask_password,
|
| 29 |
+
remove_auth_from_url,
|
| 30 |
+
split_auth_netloc_from_url,
|
| 31 |
+
)
|
| 32 |
+
from pip._internal.vcs.versioncontrol import AuthInfo
|
| 33 |
+
|
| 34 |
+
logger = getLogger(__name__)
|
| 35 |
+
|
| 36 |
+
KEYRING_DISABLED = False
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class Credentials(NamedTuple):
|
| 40 |
+
url: str
|
| 41 |
+
username: str
|
| 42 |
+
password: str
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class KeyRingBaseProvider(ABC):
|
| 46 |
+
"""Keyring base provider interface"""
|
| 47 |
+
|
| 48 |
+
has_keyring: bool
|
| 49 |
+
|
| 50 |
+
@abstractmethod
|
| 51 |
+
def get_auth_info(
|
| 52 |
+
self, url: str, username: Optional[str]
|
| 53 |
+
) -> Optional[AuthInfo]: ...
|
| 54 |
+
|
| 55 |
+
@abstractmethod
|
| 56 |
+
def save_auth_info(self, url: str, username: str, password: str) -> None: ...
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class KeyRingNullProvider(KeyRingBaseProvider):
|
| 60 |
+
"""Keyring null provider"""
|
| 61 |
+
|
| 62 |
+
has_keyring = False
|
| 63 |
+
|
| 64 |
+
def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
|
| 65 |
+
return None
|
| 66 |
+
|
| 67 |
+
def save_auth_info(self, url: str, username: str, password: str) -> None:
|
| 68 |
+
return None
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class KeyRingPythonProvider(KeyRingBaseProvider):
|
| 72 |
+
"""Keyring interface which uses locally imported `keyring`"""
|
| 73 |
+
|
| 74 |
+
has_keyring = True
|
| 75 |
+
|
| 76 |
+
def __init__(self) -> None:
|
| 77 |
+
import keyring
|
| 78 |
+
|
| 79 |
+
self.keyring = keyring
|
| 80 |
+
|
| 81 |
+
def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
|
| 82 |
+
# Support keyring's get_credential interface which supports getting
|
| 83 |
+
# credentials without a username. This is only available for
|
| 84 |
+
# keyring>=15.2.0.
|
| 85 |
+
if hasattr(self.keyring, "get_credential"):
|
| 86 |
+
logger.debug("Getting credentials from keyring for %s", url)
|
| 87 |
+
cred = self.keyring.get_credential(url, username)
|
| 88 |
+
if cred is not None:
|
| 89 |
+
return cred.username, cred.password
|
| 90 |
+
return None
|
| 91 |
+
|
| 92 |
+
if username is not None:
|
| 93 |
+
logger.debug("Getting password from keyring for %s", url)
|
| 94 |
+
password = self.keyring.get_password(url, username)
|
| 95 |
+
if password:
|
| 96 |
+
return username, password
|
| 97 |
+
return None
|
| 98 |
+
|
| 99 |
+
def save_auth_info(self, url: str, username: str, password: str) -> None:
|
| 100 |
+
self.keyring.set_password(url, username, password)
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
class KeyRingCliProvider(KeyRingBaseProvider):
|
| 104 |
+
"""Provider which uses `keyring` cli
|
| 105 |
+
|
| 106 |
+
Instead of calling the keyring package installed alongside pip
|
| 107 |
+
we call keyring on the command line which will enable pip to
|
| 108 |
+
use which ever installation of keyring is available first in
|
| 109 |
+
PATH.
|
| 110 |
+
"""
|
| 111 |
+
|
| 112 |
+
has_keyring = True
|
| 113 |
+
|
| 114 |
+
def __init__(self, cmd: str) -> None:
|
| 115 |
+
self.keyring = cmd
|
| 116 |
+
|
| 117 |
+
def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
|
| 118 |
+
# This is the default implementation of keyring.get_credential
|
| 119 |
+
# https://github.com/jaraco/keyring/blob/97689324abcf01bd1793d49063e7ca01e03d7d07/keyring/backend.py#L134-L139
|
| 120 |
+
if username is not None:
|
| 121 |
+
password = self._get_password(url, username)
|
| 122 |
+
if password is not None:
|
| 123 |
+
return username, password
|
| 124 |
+
return None
|
| 125 |
+
|
| 126 |
+
def save_auth_info(self, url: str, username: str, password: str) -> None:
|
| 127 |
+
return self._set_password(url, username, password)
|
| 128 |
+
|
| 129 |
+
def _get_password(self, service_name: str, username: str) -> Optional[str]:
|
| 130 |
+
"""Mirror the implementation of keyring.get_password using cli"""
|
| 131 |
+
if self.keyring is None:
|
| 132 |
+
return None
|
| 133 |
+
|
| 134 |
+
cmd = [self.keyring, "get", service_name, username]
|
| 135 |
+
env = os.environ.copy()
|
| 136 |
+
env["PYTHONIOENCODING"] = "utf-8"
|
| 137 |
+
res = subprocess.run(
|
| 138 |
+
cmd,
|
| 139 |
+
stdin=subprocess.DEVNULL,
|
| 140 |
+
stdout=subprocess.PIPE,
|
| 141 |
+
env=env,
|
| 142 |
+
)
|
| 143 |
+
if res.returncode:
|
| 144 |
+
return None
|
| 145 |
+
return res.stdout.decode("utf-8").strip(os.linesep)
|
| 146 |
+
|
| 147 |
+
def _set_password(self, service_name: str, username: str, password: str) -> None:
|
| 148 |
+
"""Mirror the implementation of keyring.set_password using cli"""
|
| 149 |
+
if self.keyring is None:
|
| 150 |
+
return None
|
| 151 |
+
env = os.environ.copy()
|
| 152 |
+
env["PYTHONIOENCODING"] = "utf-8"
|
| 153 |
+
subprocess.run(
|
| 154 |
+
[self.keyring, "set", service_name, username],
|
| 155 |
+
input=f"{password}{os.linesep}".encode(),
|
| 156 |
+
env=env,
|
| 157 |
+
check=True,
|
| 158 |
+
)
|
| 159 |
+
return None
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
@lru_cache(maxsize=None)
|
| 163 |
+
def get_keyring_provider(provider: str) -> KeyRingBaseProvider:
|
| 164 |
+
logger.verbose("Keyring provider requested: %s", provider)
|
| 165 |
+
|
| 166 |
+
# keyring has previously failed and been disabled
|
| 167 |
+
if KEYRING_DISABLED:
|
| 168 |
+
provider = "disabled"
|
| 169 |
+
if provider in ["import", "auto"]:
|
| 170 |
+
try:
|
| 171 |
+
impl = KeyRingPythonProvider()
|
| 172 |
+
logger.verbose("Keyring provider set: import")
|
| 173 |
+
return impl
|
| 174 |
+
except ImportError:
|
| 175 |
+
pass
|
| 176 |
+
except Exception as exc:
|
| 177 |
+
# In the event of an unexpected exception
|
| 178 |
+
# we should warn the user
|
| 179 |
+
msg = "Installed copy of keyring fails with exception %s"
|
| 180 |
+
if provider == "auto":
|
| 181 |
+
msg = msg + ", trying to find a keyring executable as a fallback"
|
| 182 |
+
logger.warning(msg, exc, exc_info=logger.isEnabledFor(logging.DEBUG))
|
| 183 |
+
if provider in ["subprocess", "auto"]:
|
| 184 |
+
cli = shutil.which("keyring")
|
| 185 |
+
if cli and cli.startswith(sysconfig.get_path("scripts")):
|
| 186 |
+
# all code within this function is stolen from shutil.which implementation
|
| 187 |
+
@typing.no_type_check
|
| 188 |
+
def PATH_as_shutil_which_determines_it() -> str:
|
| 189 |
+
path = os.environ.get("PATH", None)
|
| 190 |
+
if path is None:
|
| 191 |
+
try:
|
| 192 |
+
path = os.confstr("CS_PATH")
|
| 193 |
+
except (AttributeError, ValueError):
|
| 194 |
+
# os.confstr() or CS_PATH is not available
|
| 195 |
+
path = os.defpath
|
| 196 |
+
# bpo-35755: Don't use os.defpath if the PATH environment variable is
|
| 197 |
+
# set to an empty string
|
| 198 |
+
|
| 199 |
+
return path
|
| 200 |
+
|
| 201 |
+
scripts = Path(sysconfig.get_path("scripts"))
|
| 202 |
+
|
| 203 |
+
paths = []
|
| 204 |
+
for path in PATH_as_shutil_which_determines_it().split(os.pathsep):
|
| 205 |
+
p = Path(path)
|
| 206 |
+
try:
|
| 207 |
+
if not p.samefile(scripts):
|
| 208 |
+
paths.append(path)
|
| 209 |
+
except FileNotFoundError:
|
| 210 |
+
pass
|
| 211 |
+
|
| 212 |
+
path = os.pathsep.join(paths)
|
| 213 |
+
|
| 214 |
+
cli = shutil.which("keyring", path=path)
|
| 215 |
+
|
| 216 |
+
if cli:
|
| 217 |
+
logger.verbose("Keyring provider set: subprocess with executable %s", cli)
|
| 218 |
+
return KeyRingCliProvider(cli)
|
| 219 |
+
|
| 220 |
+
logger.verbose("Keyring provider set: disabled")
|
| 221 |
+
return KeyRingNullProvider()
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
class MultiDomainBasicAuth(AuthBase):
|
| 225 |
+
def __init__(
|
| 226 |
+
self,
|
| 227 |
+
prompting: bool = True,
|
| 228 |
+
index_urls: Optional[List[str]] = None,
|
| 229 |
+
keyring_provider: str = "auto",
|
| 230 |
+
) -> None:
|
| 231 |
+
self.prompting = prompting
|
| 232 |
+
self.index_urls = index_urls
|
| 233 |
+
self.keyring_provider = keyring_provider # type: ignore[assignment]
|
| 234 |
+
self.passwords: Dict[str, AuthInfo] = {}
|
| 235 |
+
# When the user is prompted to enter credentials and keyring is
|
| 236 |
+
# available, we will offer to save them. If the user accepts,
|
| 237 |
+
# this value is set to the credentials they entered. After the
|
| 238 |
+
# request authenticates, the caller should call
|
| 239 |
+
# ``save_credentials`` to save these.
|
| 240 |
+
self._credentials_to_save: Optional[Credentials] = None
|
| 241 |
+
|
| 242 |
+
@property
|
| 243 |
+
def keyring_provider(self) -> KeyRingBaseProvider:
|
| 244 |
+
return get_keyring_provider(self._keyring_provider)
|
| 245 |
+
|
| 246 |
+
@keyring_provider.setter
|
| 247 |
+
def keyring_provider(self, provider: str) -> None:
|
| 248 |
+
# The free function get_keyring_provider has been decorated with
|
| 249 |
+
# functools.cache. If an exception occurs in get_keyring_auth that
|
| 250 |
+
# cache will be cleared and keyring disabled, take that into account
|
| 251 |
+
# if you want to remove this indirection.
|
| 252 |
+
self._keyring_provider = provider
|
| 253 |
+
|
| 254 |
+
@property
|
| 255 |
+
def use_keyring(self) -> bool:
|
| 256 |
+
# We won't use keyring when --no-input is passed unless
|
| 257 |
+
# a specific provider is requested because it might require
|
| 258 |
+
# user interaction
|
| 259 |
+
return self.prompting or self._keyring_provider not in ["auto", "disabled"]
|
| 260 |
+
|
| 261 |
+
def _get_keyring_auth(
|
| 262 |
+
self,
|
| 263 |
+
url: Optional[str],
|
| 264 |
+
username: Optional[str],
|
| 265 |
+
) -> Optional[AuthInfo]:
|
| 266 |
+
"""Return the tuple auth for a given url from keyring."""
|
| 267 |
+
# Do nothing if no url was provided
|
| 268 |
+
if not url:
|
| 269 |
+
return None
|
| 270 |
+
|
| 271 |
+
try:
|
| 272 |
+
return self.keyring_provider.get_auth_info(url, username)
|
| 273 |
+
except Exception as exc:
|
| 274 |
+
# Log the full exception (with stacktrace) at debug, so it'll only
|
| 275 |
+
# show up when running in verbose mode.
|
| 276 |
+
logger.debug("Keyring is skipped due to an exception", exc_info=True)
|
| 277 |
+
# Always log a shortened version of the exception.
|
| 278 |
+
logger.warning(
|
| 279 |
+
"Keyring is skipped due to an exception: %s",
|
| 280 |
+
str(exc),
|
| 281 |
+
)
|
| 282 |
+
global KEYRING_DISABLED
|
| 283 |
+
KEYRING_DISABLED = True
|
| 284 |
+
get_keyring_provider.cache_clear()
|
| 285 |
+
return None
|
| 286 |
+
|
| 287 |
+
def _get_index_url(self, url: str) -> Optional[str]:
|
| 288 |
+
"""Return the original index URL matching the requested URL.
|
| 289 |
+
|
| 290 |
+
Cached or dynamically generated credentials may work against
|
| 291 |
+
the original index URL rather than just the netloc.
|
| 292 |
+
|
| 293 |
+
The provided url should have had its username and password
|
| 294 |
+
removed already. If the original index url had credentials then
|
| 295 |
+
they will be included in the return value.
|
| 296 |
+
|
| 297 |
+
Returns None if no matching index was found, or if --no-index
|
| 298 |
+
was specified by the user.
|
| 299 |
+
"""
|
| 300 |
+
if not url or not self.index_urls:
|
| 301 |
+
return None
|
| 302 |
+
|
| 303 |
+
url = remove_auth_from_url(url).rstrip("/") + "/"
|
| 304 |
+
parsed_url = urllib.parse.urlsplit(url)
|
| 305 |
+
|
| 306 |
+
candidates = []
|
| 307 |
+
|
| 308 |
+
for index in self.index_urls:
|
| 309 |
+
index = index.rstrip("/") + "/"
|
| 310 |
+
parsed_index = urllib.parse.urlsplit(remove_auth_from_url(index))
|
| 311 |
+
if parsed_url == parsed_index:
|
| 312 |
+
return index
|
| 313 |
+
|
| 314 |
+
if parsed_url.netloc != parsed_index.netloc:
|
| 315 |
+
continue
|
| 316 |
+
|
| 317 |
+
candidate = urllib.parse.urlsplit(index)
|
| 318 |
+
candidates.append(candidate)
|
| 319 |
+
|
| 320 |
+
if not candidates:
|
| 321 |
+
return None
|
| 322 |
+
|
| 323 |
+
candidates.sort(
|
| 324 |
+
reverse=True,
|
| 325 |
+
key=lambda candidate: commonprefix(
|
| 326 |
+
[
|
| 327 |
+
parsed_url.path,
|
| 328 |
+
candidate.path,
|
| 329 |
+
]
|
| 330 |
+
).rfind("/"),
|
| 331 |
+
)
|
| 332 |
+
|
| 333 |
+
return urllib.parse.urlunsplit(candidates[0])
|
| 334 |
+
|
| 335 |
+
def _get_new_credentials(
|
| 336 |
+
self,
|
| 337 |
+
original_url: str,
|
| 338 |
+
*,
|
| 339 |
+
allow_netrc: bool = True,
|
| 340 |
+
allow_keyring: bool = False,
|
| 341 |
+
) -> AuthInfo:
|
| 342 |
+
"""Find and return credentials for the specified URL."""
|
| 343 |
+
# Split the credentials and netloc from the url.
|
| 344 |
+
url, netloc, url_user_password = split_auth_netloc_from_url(
|
| 345 |
+
original_url,
|
| 346 |
+
)
|
| 347 |
+
|
| 348 |
+
# Start with the credentials embedded in the url
|
| 349 |
+
username, password = url_user_password
|
| 350 |
+
if username is not None and password is not None:
|
| 351 |
+
logger.debug("Found credentials in url for %s", netloc)
|
| 352 |
+
return url_user_password
|
| 353 |
+
|
| 354 |
+
# Find a matching index url for this request
|
| 355 |
+
index_url = self._get_index_url(url)
|
| 356 |
+
if index_url:
|
| 357 |
+
# Split the credentials from the url.
|
| 358 |
+
index_info = split_auth_netloc_from_url(index_url)
|
| 359 |
+
if index_info:
|
| 360 |
+
index_url, _, index_url_user_password = index_info
|
| 361 |
+
logger.debug("Found index url %s", index_url)
|
| 362 |
+
|
| 363 |
+
# If an index URL was found, try its embedded credentials
|
| 364 |
+
if index_url and index_url_user_password[0] is not None:
|
| 365 |
+
username, password = index_url_user_password
|
| 366 |
+
if username is not None and password is not None:
|
| 367 |
+
logger.debug("Found credentials in index url for %s", netloc)
|
| 368 |
+
return index_url_user_password
|
| 369 |
+
|
| 370 |
+
# Get creds from netrc if we still don't have them
|
| 371 |
+
if allow_netrc:
|
| 372 |
+
netrc_auth = get_netrc_auth(original_url)
|
| 373 |
+
if netrc_auth:
|
| 374 |
+
logger.debug("Found credentials in netrc for %s", netloc)
|
| 375 |
+
return netrc_auth
|
| 376 |
+
|
| 377 |
+
# If we don't have a password and keyring is available, use it.
|
| 378 |
+
if allow_keyring:
|
| 379 |
+
# The index url is more specific than the netloc, so try it first
|
| 380 |
+
# fmt: off
|
| 381 |
+
kr_auth = (
|
| 382 |
+
self._get_keyring_auth(index_url, username) or
|
| 383 |
+
self._get_keyring_auth(netloc, username)
|
| 384 |
+
)
|
| 385 |
+
# fmt: on
|
| 386 |
+
if kr_auth:
|
| 387 |
+
logger.debug("Found credentials in keyring for %s", netloc)
|
| 388 |
+
return kr_auth
|
| 389 |
+
|
| 390 |
+
return username, password
|
| 391 |
+
|
| 392 |
+
def _get_url_and_credentials(
|
| 393 |
+
self, original_url: str
|
| 394 |
+
) -> Tuple[str, Optional[str], Optional[str]]:
|
| 395 |
+
"""Return the credentials to use for the provided URL.
|
| 396 |
+
|
| 397 |
+
If allowed, netrc and keyring may be used to obtain the
|
| 398 |
+
correct credentials.
|
| 399 |
+
|
| 400 |
+
Returns (url_without_credentials, username, password). Note
|
| 401 |
+
that even if the original URL contains credentials, this
|
| 402 |
+
function may return a different username and password.
|
| 403 |
+
"""
|
| 404 |
+
url, netloc, _ = split_auth_netloc_from_url(original_url)
|
| 405 |
+
|
| 406 |
+
# Try to get credentials from original url
|
| 407 |
+
username, password = self._get_new_credentials(original_url)
|
| 408 |
+
|
| 409 |
+
# If credentials not found, use any stored credentials for this netloc.
|
| 410 |
+
# Do this if either the username or the password is missing.
|
| 411 |
+
# This accounts for the situation in which the user has specified
|
| 412 |
+
# the username in the index url, but the password comes from keyring.
|
| 413 |
+
if (username is None or password is None) and netloc in self.passwords:
|
| 414 |
+
un, pw = self.passwords[netloc]
|
| 415 |
+
# It is possible that the cached credentials are for a different username,
|
| 416 |
+
# in which case the cache should be ignored.
|
| 417 |
+
if username is None or username == un:
|
| 418 |
+
username, password = un, pw
|
| 419 |
+
|
| 420 |
+
if username is not None or password is not None:
|
| 421 |
+
# Convert the username and password if they're None, so that
|
| 422 |
+
# this netloc will show up as "cached" in the conditional above.
|
| 423 |
+
# Further, HTTPBasicAuth doesn't accept None, so it makes sense to
|
| 424 |
+
# cache the value that is going to be used.
|
| 425 |
+
username = username or ""
|
| 426 |
+
password = password or ""
|
| 427 |
+
|
| 428 |
+
# Store any acquired credentials.
|
| 429 |
+
self.passwords[netloc] = (username, password)
|
| 430 |
+
|
| 431 |
+
assert (
|
| 432 |
+
# Credentials were found
|
| 433 |
+
(username is not None and password is not None)
|
| 434 |
+
# Credentials were not found
|
| 435 |
+
or (username is None and password is None)
|
| 436 |
+
), f"Could not load credentials from url: {original_url}"
|
| 437 |
+
|
| 438 |
+
return url, username, password
|
| 439 |
+
|
| 440 |
+
def __call__(self, req: Request) -> Request:
|
| 441 |
+
# Get credentials for this request
|
| 442 |
+
url, username, password = self._get_url_and_credentials(req.url)
|
| 443 |
+
|
| 444 |
+
# Set the url of the request to the url without any credentials
|
| 445 |
+
req.url = url
|
| 446 |
+
|
| 447 |
+
if username is not None and password is not None:
|
| 448 |
+
# Send the basic auth with this request
|
| 449 |
+
req = HTTPBasicAuth(username, password)(req)
|
| 450 |
+
|
| 451 |
+
# Attach a hook to handle 401 responses
|
| 452 |
+
req.register_hook("response", self.handle_401)
|
| 453 |
+
|
| 454 |
+
return req
|
| 455 |
+
|
| 456 |
+
# Factored out to allow for easy patching in tests
|
| 457 |
+
def _prompt_for_password(
|
| 458 |
+
self, netloc: str
|
| 459 |
+
) -> Tuple[Optional[str], Optional[str], bool]:
|
| 460 |
+
username = ask_input(f"User for {netloc}: ") if self.prompting else None
|
| 461 |
+
if not username:
|
| 462 |
+
return None, None, False
|
| 463 |
+
if self.use_keyring:
|
| 464 |
+
auth = self._get_keyring_auth(netloc, username)
|
| 465 |
+
if auth and auth[0] is not None and auth[1] is not None:
|
| 466 |
+
return auth[0], auth[1], False
|
| 467 |
+
password = ask_password("Password: ")
|
| 468 |
+
return username, password, True
|
| 469 |
+
|
| 470 |
+
# Factored out to allow for easy patching in tests
|
| 471 |
+
def _should_save_password_to_keyring(self) -> bool:
|
| 472 |
+
if (
|
| 473 |
+
not self.prompting
|
| 474 |
+
or not self.use_keyring
|
| 475 |
+
or not self.keyring_provider.has_keyring
|
| 476 |
+
):
|
| 477 |
+
return False
|
| 478 |
+
return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
|
| 479 |
+
|
| 480 |
+
def handle_401(self, resp: Response, **kwargs: Any) -> Response:
|
| 481 |
+
# We only care about 401 responses, anything else we want to just
|
| 482 |
+
# pass through the actual response
|
| 483 |
+
if resp.status_code != 401:
|
| 484 |
+
return resp
|
| 485 |
+
|
| 486 |
+
username, password = None, None
|
| 487 |
+
|
| 488 |
+
# Query the keyring for credentials:
|
| 489 |
+
if self.use_keyring:
|
| 490 |
+
username, password = self._get_new_credentials(
|
| 491 |
+
resp.url,
|
| 492 |
+
allow_netrc=False,
|
| 493 |
+
allow_keyring=True,
|
| 494 |
+
)
|
| 495 |
+
|
| 496 |
+
# We are not able to prompt the user so simply return the response
|
| 497 |
+
if not self.prompting and not username and not password:
|
| 498 |
+
return resp
|
| 499 |
+
|
| 500 |
+
parsed = urllib.parse.urlparse(resp.url)
|
| 501 |
+
|
| 502 |
+
# Prompt the user for a new username and password
|
| 503 |
+
save = False
|
| 504 |
+
if not username and not password:
|
| 505 |
+
username, password, save = self._prompt_for_password(parsed.netloc)
|
| 506 |
+
|
| 507 |
+
# Store the new username and password to use for future requests
|
| 508 |
+
self._credentials_to_save = None
|
| 509 |
+
if username is not None and password is not None:
|
| 510 |
+
self.passwords[parsed.netloc] = (username, password)
|
| 511 |
+
|
| 512 |
+
# Prompt to save the password to keyring
|
| 513 |
+
if save and self._should_save_password_to_keyring():
|
| 514 |
+
self._credentials_to_save = Credentials(
|
| 515 |
+
url=parsed.netloc,
|
| 516 |
+
username=username,
|
| 517 |
+
password=password,
|
| 518 |
+
)
|
| 519 |
+
|
| 520 |
+
# Consume content and release the original connection to allow our new
|
| 521 |
+
# request to reuse the same one.
|
| 522 |
+
# The result of the assignment isn't used, it's just needed to consume
|
| 523 |
+
# the content.
|
| 524 |
+
_ = resp.content
|
| 525 |
+
resp.raw.release_conn()
|
| 526 |
+
|
| 527 |
+
# Add our new username and password to the request
|
| 528 |
+
req = HTTPBasicAuth(username or "", password or "")(resp.request)
|
| 529 |
+
req.register_hook("response", self.warn_on_401)
|
| 530 |
+
|
| 531 |
+
# On successful request, save the credentials that were used to
|
| 532 |
+
# keyring. (Note that if the user responded "no" above, this member
|
| 533 |
+
# is not set and nothing will be saved.)
|
| 534 |
+
if self._credentials_to_save:
|
| 535 |
+
req.register_hook("response", self.save_credentials)
|
| 536 |
+
|
| 537 |
+
# Send our new request
|
| 538 |
+
new_resp = resp.connection.send(req, **kwargs)
|
| 539 |
+
new_resp.history.append(resp)
|
| 540 |
+
|
| 541 |
+
return new_resp
|
| 542 |
+
|
| 543 |
+
def warn_on_401(self, resp: Response, **kwargs: Any) -> None:
|
| 544 |
+
"""Response callback to warn about incorrect credentials."""
|
| 545 |
+
if resp.status_code == 401:
|
| 546 |
+
logger.warning(
|
| 547 |
+
"401 Error, Credentials not correct for %s",
|
| 548 |
+
resp.request.url,
|
| 549 |
+
)
|
| 550 |
+
|
| 551 |
+
def save_credentials(self, resp: Response, **kwargs: Any) -> None:
|
| 552 |
+
"""Response callback to save credentials on success."""
|
| 553 |
+
assert (
|
| 554 |
+
self.keyring_provider.has_keyring
|
| 555 |
+
), "should never reach here without keyring"
|
| 556 |
+
|
| 557 |
+
creds = self._credentials_to_save
|
| 558 |
+
self._credentials_to_save = None
|
| 559 |
+
if creds and resp.status_code < 400:
|
| 560 |
+
try:
|
| 561 |
+
logger.info("Saving credentials to keyring")
|
| 562 |
+
self.keyring_provider.save_auth_info(
|
| 563 |
+
creds.url, creds.username, creds.password
|
| 564 |
+
)
|
| 565 |
+
except Exception:
|
| 566 |
+
logger.exception("Failed to save credentials")
|
tuning-competition-baseline/.venv/lib/python3.11/site-packages/pip/_internal/network/cache.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""HTTP cache implementation.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import os
|
| 5 |
+
from contextlib import contextmanager
|
| 6 |
+
from datetime import datetime
|
| 7 |
+
from typing import BinaryIO, Generator, Optional, Union
|
| 8 |
+
|
| 9 |
+
from pip._vendor.cachecontrol.cache import SeparateBodyBaseCache
|
| 10 |
+
from pip._vendor.cachecontrol.caches import SeparateBodyFileCache
|
| 11 |
+
from pip._vendor.requests.models import Response
|
| 12 |
+
|
| 13 |
+
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
|
| 14 |
+
from pip._internal.utils.misc import ensure_dir
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def is_from_cache(response: Response) -> bool:
|
| 18 |
+
return getattr(response, "from_cache", False)
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
@contextmanager
|
| 22 |
+
def suppressed_cache_errors() -> Generator[None, None, None]:
|
| 23 |
+
"""If we can't access the cache then we can just skip caching and process
|
| 24 |
+
requests as if caching wasn't enabled.
|
| 25 |
+
"""
|
| 26 |
+
try:
|
| 27 |
+
yield
|
| 28 |
+
except OSError:
|
| 29 |
+
pass
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class SafeFileCache(SeparateBodyBaseCache):
|
| 33 |
+
"""
|
| 34 |
+
A file based cache which is safe to use even when the target directory may
|
| 35 |
+
not be accessible or writable.
|
| 36 |
+
|
| 37 |
+
There is a race condition when two processes try to write and/or read the
|
| 38 |
+
same entry at the same time, since each entry consists of two separate
|
| 39 |
+
files (https://github.com/psf/cachecontrol/issues/324). We therefore have
|
| 40 |
+
additional logic that makes sure that both files to be present before
|
| 41 |
+
returning an entry; this fixes the read side of the race condition.
|
| 42 |
+
|
| 43 |
+
For the write side, we assume that the server will only ever return the
|
| 44 |
+
same data for the same URL, which ought to be the case for files pip is
|
| 45 |
+
downloading. PyPI does not have a mechanism to swap out a wheel for
|
| 46 |
+
another wheel, for example. If this assumption is not true, the
|
| 47 |
+
CacheControl issue will need to be fixed.
|
| 48 |
+
"""
|
| 49 |
+
|
| 50 |
+
def __init__(self, directory: str) -> None:
|
| 51 |
+
assert directory is not None, "Cache directory must not be None."
|
| 52 |
+
super().__init__()
|
| 53 |
+
self.directory = directory
|
| 54 |
+
|
| 55 |
+
def _get_cache_path(self, name: str) -> str:
|
| 56 |
+
# From cachecontrol.caches.file_cache.FileCache._fn, brought into our
|
| 57 |
+
# class for backwards-compatibility and to avoid using a non-public
|
| 58 |
+
# method.
|
| 59 |
+
hashed = SeparateBodyFileCache.encode(name)
|
| 60 |
+
parts = list(hashed[:5]) + [hashed]
|
| 61 |
+
return os.path.join(self.directory, *parts)
|
| 62 |
+
|
| 63 |
+
def get(self, key: str) -> Optional[bytes]:
|
| 64 |
+
# The cache entry is only valid if both metadata and body exist.
|
| 65 |
+
metadata_path = self._get_cache_path(key)
|
| 66 |
+
body_path = metadata_path + ".body"
|
| 67 |
+
if not (os.path.exists(metadata_path) and os.path.exists(body_path)):
|
| 68 |
+
return None
|
| 69 |
+
with suppressed_cache_errors():
|
| 70 |
+
with open(metadata_path, "rb") as f:
|
| 71 |
+
return f.read()
|
| 72 |
+
|
| 73 |
+
def _write(self, path: str, data: bytes) -> None:
|
| 74 |
+
with suppressed_cache_errors():
|
| 75 |
+
ensure_dir(os.path.dirname(path))
|
| 76 |
+
|
| 77 |
+
with adjacent_tmp_file(path) as f:
|
| 78 |
+
f.write(data)
|
| 79 |
+
|
| 80 |
+
replace(f.name, path)
|
| 81 |
+
|
| 82 |
+
def set(
|
| 83 |
+
self, key: str, value: bytes, expires: Union[int, datetime, None] = None
|
| 84 |
+
) -> None:
|
| 85 |
+
path = self._get_cache_path(key)
|
| 86 |
+
self._write(path, value)
|
| 87 |
+
|
| 88 |
+
def delete(self, key: str) -> None:
|
| 89 |
+
path = self._get_cache_path(key)
|
| 90 |
+
with suppressed_cache_errors():
|
| 91 |
+
os.remove(path)
|
| 92 |
+
with suppressed_cache_errors():
|
| 93 |
+
os.remove(path + ".body")
|
| 94 |
+
|
| 95 |
+
def get_body(self, key: str) -> Optional[BinaryIO]:
|
| 96 |
+
# The cache entry is only valid if both metadata and body exist.
|
| 97 |
+
metadata_path = self._get_cache_path(key)
|
| 98 |
+
body_path = metadata_path + ".body"
|
| 99 |
+
if not (os.path.exists(metadata_path) and os.path.exists(body_path)):
|
| 100 |
+
return None
|
| 101 |
+
with suppressed_cache_errors():
|
| 102 |
+
return open(body_path, "rb")
|
| 103 |
+
|
| 104 |
+
def set_body(self, key: str, body: bytes) -> None:
|
| 105 |
+
path = self._get_cache_path(key) + ".body"
|
| 106 |
+
self._write(path, body)
|