Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +1 -0
- videollama2/lib/python3.10/site-packages/setuptools/_vendor/autocommand/__init__.py +27 -0
- videollama2/lib/python3.10/site-packages/setuptools/_vendor/autocommand/__pycache__/autocommand.cpython-310.pyc +0 -0
- videollama2/lib/python3.10/site-packages/setuptools/_vendor/autocommand/__pycache__/automain.cpython-310.pyc +0 -0
- videollama2/lib/python3.10/site-packages/setuptools/_vendor/autocommand/autoasync.py +142 -0
- videollama2/lib/python3.10/site-packages/setuptools/_vendor/autocommand/automain.py +59 -0
- videollama2/lib/python3.10/site-packages/setuptools/_vendor/autocommand/autoparse.py +333 -0
- videollama2/lib/python3.10/site-packages/setuptools/_vendor/autocommand/errors.py +23 -0
- videollama2/lib/python3.10/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/INSTALLER +1 -0
- videollama2/lib/python3.10/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/METADATA +85 -0
- videollama2/lib/python3.10/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/REQUESTED +0 -0
- videollama2/lib/python3.10/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/WHEEL +5 -0
- videollama2/lib/python3.10/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/top_level.txt +1 -0
- videollama2/lib/python3.10/site-packages/setuptools/_vendor/jaraco.context-5.3.0.dist-info/LICENSE +17 -0
- videollama2/lib/python3.10/site-packages/setuptools/_vendor/jaraco.context-5.3.0.dist-info/METADATA +75 -0
- videollama2/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/RECORD +25 -0
- videollama2/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/REQUESTED +0 -0
- videollama2/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/WHEEL +4 -0
- videollama2/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/__init__.cpython-310.pyc +0 -0
- videollama2/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_checkers.cpython-310.pyc +0 -0
- videollama2/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_pytest_plugin.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/blake3/blake3.cpython-310-x86_64-linux-gnu.so +3 -0
- vllm/lib/python3.10/site-packages/et_xmlfile-2.0.0.dist-info/AUTHORS.txt +5 -0
- vllm/lib/python3.10/site-packages/et_xmlfile-2.0.0.dist-info/LICENCE.python +298 -0
- vllm/lib/python3.10/site-packages/et_xmlfile-2.0.0.dist-info/LICENCE.rst +23 -0
- vllm/lib/python3.10/site-packages/et_xmlfile-2.0.0.dist-info/RECORD +15 -0
- vllm/lib/python3.10/site-packages/et_xmlfile-2.0.0.dist-info/REQUESTED +0 -0
- vllm/lib/python3.10/site-packages/pip/__init__.py +13 -0
- vllm/lib/python3.10/site-packages/pip/__main__.py +24 -0
- vllm/lib/python3.10/site-packages/pip/__pip-runner__.py +50 -0
- vllm/lib/python3.10/site-packages/pip/_internal/__init__.py +18 -0
- vllm/lib/python3.10/site-packages/pip/_internal/build_env.py +322 -0
- vllm/lib/python3.10/site-packages/pip/_internal/cache.py +290 -0
- vllm/lib/python3.10/site-packages/pip/_internal/configuration.py +383 -0
- vllm/lib/python3.10/site-packages/pip/_internal/exceptions.py +809 -0
- vllm/lib/python3.10/site-packages/pip/_internal/main.py +12 -0
- vllm/lib/python3.10/site-packages/pip/_internal/pyproject.py +185 -0
- vllm/lib/python3.10/site-packages/pip/_internal/req/__init__.py +90 -0
- vllm/lib/python3.10/site-packages/pip/_internal/req/__pycache__/__init__.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pip/_internal/req/__pycache__/constructors.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_file.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_install.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_set.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-310.pyc +0 -0
- vllm/lib/python3.10/site-packages/pip/_internal/req/constructors.py +560 -0
- vllm/lib/python3.10/site-packages/pip/_internal/req/req_file.py +623 -0
- vllm/lib/python3.10/site-packages/pip/_internal/req/req_install.py +934 -0
- vllm/lib/python3.10/site-packages/pip/_internal/req/req_set.py +82 -0
- vllm/lib/python3.10/site-packages/pip/_internal/req/req_uninstall.py +633 -0
- vllm/lib/python3.10/site-packages/pip/_internal/resolution/__init__.py +0 -0
.gitattributes
CHANGED
|
@@ -917,3 +917,4 @@ videochat2/lib/python3.10/site-packages/torch/fx/experimental/__pycache__/symbol
|
|
| 917 |
parrot/lib/python3.10/site-packages/scipy/interpolate/_bspl.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 918 |
videollama2/lib/python3.10/site-packages/sklearn/linear_model/_sgd_fast.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 919 |
videollama2/lib/python3.10/site-packages/setuptools/_vendor/__pycache__/typing_extensions.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 917 |
parrot/lib/python3.10/site-packages/scipy/interpolate/_bspl.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 918 |
videollama2/lib/python3.10/site-packages/sklearn/linear_model/_sgd_fast.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 919 |
videollama2/lib/python3.10/site-packages/setuptools/_vendor/__pycache__/typing_extensions.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 920 |
+
vllm/lib/python3.10/site-packages/blake3/blake3.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
videollama2/lib/python3.10/site-packages/setuptools/_vendor/autocommand/__init__.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2014-2016 Nathan West
|
| 2 |
+
#
|
| 3 |
+
# This file is part of autocommand.
|
| 4 |
+
#
|
| 5 |
+
# autocommand is free software: you can redistribute it and/or modify
|
| 6 |
+
# it under the terms of the GNU Lesser General Public License as published by
|
| 7 |
+
# the Free Software Foundation, either version 3 of the License, or
|
| 8 |
+
# (at your option) any later version.
|
| 9 |
+
#
|
| 10 |
+
# autocommand is distributed in the hope that it will be useful,
|
| 11 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
| 12 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
| 13 |
+
# GNU Lesser General Public License for more details.
|
| 14 |
+
#
|
| 15 |
+
# You should have received a copy of the GNU Lesser General Public License
|
| 16 |
+
# along with autocommand. If not, see <http://www.gnu.org/licenses/>.
|
| 17 |
+
|
| 18 |
+
# flake8 flags all these imports as unused, hence the NOQAs everywhere.
|
| 19 |
+
|
| 20 |
+
from .automain import automain # NOQA
|
| 21 |
+
from .autoparse import autoparse, smart_open # NOQA
|
| 22 |
+
from .autocommand import autocommand # NOQA
|
| 23 |
+
|
| 24 |
+
try:
|
| 25 |
+
from .autoasync import autoasync # NOQA
|
| 26 |
+
except ImportError: # pragma: no cover
|
| 27 |
+
pass
|
videollama2/lib/python3.10/site-packages/setuptools/_vendor/autocommand/__pycache__/autocommand.cpython-310.pyc
ADDED
|
Binary file (1 kB). View file
|
|
|
videollama2/lib/python3.10/site-packages/setuptools/_vendor/autocommand/__pycache__/automain.cpython-310.pyc
ADDED
|
Binary file (1.64 kB). View file
|
|
|
videollama2/lib/python3.10/site-packages/setuptools/_vendor/autocommand/autoasync.py
ADDED
|
@@ -0,0 +1,142 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2014-2015 Nathan West
|
| 2 |
+
#
|
| 3 |
+
# This file is part of autocommand.
|
| 4 |
+
#
|
| 5 |
+
# autocommand is free software: you can redistribute it and/or modify
|
| 6 |
+
# it under the terms of the GNU Lesser General Public License as published by
|
| 7 |
+
# the Free Software Foundation, either version 3 of the License, or
|
| 8 |
+
# (at your option) any later version.
|
| 9 |
+
#
|
| 10 |
+
# autocommand is distributed in the hope that it will be useful,
|
| 11 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
| 12 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
| 13 |
+
# GNU Lesser General Public License for more details.
|
| 14 |
+
#
|
| 15 |
+
# You should have received a copy of the GNU Lesser General Public License
|
| 16 |
+
# along with autocommand. If not, see <http://www.gnu.org/licenses/>.
|
| 17 |
+
|
| 18 |
+
from asyncio import get_event_loop, iscoroutine
|
| 19 |
+
from functools import wraps
|
| 20 |
+
from inspect import signature
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
async def _run_forever_coro(coro, args, kwargs, loop):
|
| 24 |
+
'''
|
| 25 |
+
This helper function launches an async main function that was tagged with
|
| 26 |
+
forever=True. There are two possibilities:
|
| 27 |
+
|
| 28 |
+
- The function is a normal function, which handles initializing the event
|
| 29 |
+
loop, which is then run forever
|
| 30 |
+
- The function is a coroutine, which needs to be scheduled in the event
|
| 31 |
+
loop, which is then run forever
|
| 32 |
+
- There is also the possibility that the function is a normal function
|
| 33 |
+
wrapping a coroutine function
|
| 34 |
+
|
| 35 |
+
The function is therefore called unconditionally and scheduled in the event
|
| 36 |
+
loop if the return value is a coroutine object.
|
| 37 |
+
|
| 38 |
+
The reason this is a separate function is to make absolutely sure that all
|
| 39 |
+
the objects created are garbage collected after all is said and done; we
|
| 40 |
+
do this to ensure that any exceptions raised in the tasks are collected
|
| 41 |
+
ASAP.
|
| 42 |
+
'''
|
| 43 |
+
|
| 44 |
+
# Personal note: I consider this an antipattern, as it relies on the use of
|
| 45 |
+
# unowned resources. The setup function dumps some stuff into the event
|
| 46 |
+
# loop where it just whirls in the ether without a well defined owner or
|
| 47 |
+
# lifetime. For this reason, there's a good chance I'll remove the
|
| 48 |
+
# forever=True feature from autoasync at some point in the future.
|
| 49 |
+
thing = coro(*args, **kwargs)
|
| 50 |
+
if iscoroutine(thing):
|
| 51 |
+
await thing
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def autoasync(coro=None, *, loop=None, forever=False, pass_loop=False):
|
| 55 |
+
'''
|
| 56 |
+
Convert an asyncio coroutine into a function which, when called, is
|
| 57 |
+
evaluted in an event loop, and the return value returned. This is intented
|
| 58 |
+
to make it easy to write entry points into asyncio coroutines, which
|
| 59 |
+
otherwise need to be explictly evaluted with an event loop's
|
| 60 |
+
run_until_complete.
|
| 61 |
+
|
| 62 |
+
If `loop` is given, it is used as the event loop to run the coro in. If it
|
| 63 |
+
is None (the default), the loop is retreived using asyncio.get_event_loop.
|
| 64 |
+
This call is defered until the decorated function is called, so that
|
| 65 |
+
callers can install custom event loops or event loop policies after
|
| 66 |
+
@autoasync is applied.
|
| 67 |
+
|
| 68 |
+
If `forever` is True, the loop is run forever after the decorated coroutine
|
| 69 |
+
is finished. Use this for servers created with asyncio.start_server and the
|
| 70 |
+
like.
|
| 71 |
+
|
| 72 |
+
If `pass_loop` is True, the event loop object is passed into the coroutine
|
| 73 |
+
as the `loop` kwarg when the wrapper function is called. In this case, the
|
| 74 |
+
wrapper function's __signature__ is updated to remove this parameter, so
|
| 75 |
+
that autoparse can still be used on it without generating a parameter for
|
| 76 |
+
`loop`.
|
| 77 |
+
|
| 78 |
+
This coroutine can be called with ( @autoasync(...) ) or without
|
| 79 |
+
( @autoasync ) arguments.
|
| 80 |
+
|
| 81 |
+
Examples:
|
| 82 |
+
|
| 83 |
+
@autoasync
|
| 84 |
+
def get_file(host, port):
|
| 85 |
+
reader, writer = yield from asyncio.open_connection(host, port)
|
| 86 |
+
data = reader.read()
|
| 87 |
+
sys.stdout.write(data.decode())
|
| 88 |
+
|
| 89 |
+
get_file(host, port)
|
| 90 |
+
|
| 91 |
+
@autoasync(forever=True, pass_loop=True)
|
| 92 |
+
def server(host, port, loop):
|
| 93 |
+
yield_from loop.create_server(Proto, host, port)
|
| 94 |
+
|
| 95 |
+
server('localhost', 8899)
|
| 96 |
+
|
| 97 |
+
'''
|
| 98 |
+
if coro is None:
|
| 99 |
+
return lambda c: autoasync(
|
| 100 |
+
c, loop=loop,
|
| 101 |
+
forever=forever,
|
| 102 |
+
pass_loop=pass_loop)
|
| 103 |
+
|
| 104 |
+
# The old and new signatures are required to correctly bind the loop
|
| 105 |
+
# parameter in 100% of cases, even if it's a positional parameter.
|
| 106 |
+
# NOTE: A future release will probably require the loop parameter to be
|
| 107 |
+
# a kwonly parameter.
|
| 108 |
+
if pass_loop:
|
| 109 |
+
old_sig = signature(coro)
|
| 110 |
+
new_sig = old_sig.replace(parameters=(
|
| 111 |
+
param for name, param in old_sig.parameters.items()
|
| 112 |
+
if name != "loop"))
|
| 113 |
+
|
| 114 |
+
@wraps(coro)
|
| 115 |
+
def autoasync_wrapper(*args, **kwargs):
|
| 116 |
+
# Defer the call to get_event_loop so that, if a custom policy is
|
| 117 |
+
# installed after the autoasync decorator, it is respected at call time
|
| 118 |
+
local_loop = get_event_loop() if loop is None else loop
|
| 119 |
+
|
| 120 |
+
# Inject the 'loop' argument. We have to use this signature binding to
|
| 121 |
+
# ensure it's injected in the correct place (positional, keyword, etc)
|
| 122 |
+
if pass_loop:
|
| 123 |
+
bound_args = old_sig.bind_partial()
|
| 124 |
+
bound_args.arguments.update(
|
| 125 |
+
loop=local_loop,
|
| 126 |
+
**new_sig.bind(*args, **kwargs).arguments)
|
| 127 |
+
args, kwargs = bound_args.args, bound_args.kwargs
|
| 128 |
+
|
| 129 |
+
if forever:
|
| 130 |
+
local_loop.create_task(_run_forever_coro(
|
| 131 |
+
coro, args, kwargs, local_loop
|
| 132 |
+
))
|
| 133 |
+
local_loop.run_forever()
|
| 134 |
+
else:
|
| 135 |
+
return local_loop.run_until_complete(coro(*args, **kwargs))
|
| 136 |
+
|
| 137 |
+
# Attach the updated signature. This allows 'pass_loop' to be used with
|
| 138 |
+
# autoparse
|
| 139 |
+
if pass_loop:
|
| 140 |
+
autoasync_wrapper.__signature__ = new_sig
|
| 141 |
+
|
| 142 |
+
return autoasync_wrapper
|
videollama2/lib/python3.10/site-packages/setuptools/_vendor/autocommand/automain.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2014-2015 Nathan West
|
| 2 |
+
#
|
| 3 |
+
# This file is part of autocommand.
|
| 4 |
+
#
|
| 5 |
+
# autocommand is free software: you can redistribute it and/or modify
|
| 6 |
+
# it under the terms of the GNU Lesser General Public License as published by
|
| 7 |
+
# the Free Software Foundation, either version 3 of the License, or
|
| 8 |
+
# (at your option) any later version.
|
| 9 |
+
#
|
| 10 |
+
# autocommand is distributed in the hope that it will be useful,
|
| 11 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
| 12 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
| 13 |
+
# GNU Lesser General Public License for more details.
|
| 14 |
+
#
|
| 15 |
+
# You should have received a copy of the GNU Lesser General Public License
|
| 16 |
+
# along with autocommand. If not, see <http://www.gnu.org/licenses/>.
|
| 17 |
+
|
| 18 |
+
import sys
|
| 19 |
+
from .errors import AutocommandError
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class AutomainRequiresModuleError(AutocommandError, TypeError):
|
| 23 |
+
pass
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def automain(module, *, args=(), kwargs=None):
|
| 27 |
+
'''
|
| 28 |
+
This decorator automatically invokes a function if the module is being run
|
| 29 |
+
as the "__main__" module. Optionally, provide args or kwargs with which to
|
| 30 |
+
call the function. If `module` is "__main__", the function is called, and
|
| 31 |
+
the program is `sys.exit`ed with the return value. You can also pass `True`
|
| 32 |
+
to cause the function to be called unconditionally. If the function is not
|
| 33 |
+
called, it is returned unchanged by the decorator.
|
| 34 |
+
|
| 35 |
+
Usage:
|
| 36 |
+
|
| 37 |
+
@automain(__name__) # Pass __name__ to check __name__=="__main__"
|
| 38 |
+
def main():
|
| 39 |
+
...
|
| 40 |
+
|
| 41 |
+
If __name__ is "__main__" here, the main function is called, and then
|
| 42 |
+
sys.exit called with the return value.
|
| 43 |
+
'''
|
| 44 |
+
|
| 45 |
+
# Check that @automain(...) was called, rather than @automain
|
| 46 |
+
if callable(module):
|
| 47 |
+
raise AutomainRequiresModuleError(module)
|
| 48 |
+
|
| 49 |
+
if module == '__main__' or module is True:
|
| 50 |
+
if kwargs is None:
|
| 51 |
+
kwargs = {}
|
| 52 |
+
|
| 53 |
+
# Use a function definition instead of a lambda for a neater traceback
|
| 54 |
+
def automain_decorator(main):
|
| 55 |
+
sys.exit(main(*args, **kwargs))
|
| 56 |
+
|
| 57 |
+
return automain_decorator
|
| 58 |
+
else:
|
| 59 |
+
return lambda main: main
|
videollama2/lib/python3.10/site-packages/setuptools/_vendor/autocommand/autoparse.py
ADDED
|
@@ -0,0 +1,333 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2014-2015 Nathan West
|
| 2 |
+
#
|
| 3 |
+
# This file is part of autocommand.
|
| 4 |
+
#
|
| 5 |
+
# autocommand is free software: you can redistribute it and/or modify
|
| 6 |
+
# it under the terms of the GNU Lesser General Public License as published by
|
| 7 |
+
# the Free Software Foundation, either version 3 of the License, or
|
| 8 |
+
# (at your option) any later version.
|
| 9 |
+
#
|
| 10 |
+
# autocommand is distributed in the hope that it will be useful,
|
| 11 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
| 12 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
| 13 |
+
# GNU Lesser General Public License for more details.
|
| 14 |
+
#
|
| 15 |
+
# You should have received a copy of the GNU Lesser General Public License
|
| 16 |
+
# along with autocommand. If not, see <http://www.gnu.org/licenses/>.
|
| 17 |
+
|
| 18 |
+
import sys
|
| 19 |
+
from re import compile as compile_regex
|
| 20 |
+
from inspect import signature, getdoc, Parameter
|
| 21 |
+
from argparse import ArgumentParser
|
| 22 |
+
from contextlib import contextmanager
|
| 23 |
+
from functools import wraps
|
| 24 |
+
from io import IOBase
|
| 25 |
+
from autocommand.errors import AutocommandError
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
_empty = Parameter.empty
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class AnnotationError(AutocommandError):
|
| 32 |
+
'''Annotation error: annotation must be a string, type, or tuple of both'''
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class PositionalArgError(AutocommandError):
|
| 36 |
+
'''
|
| 37 |
+
Postional Arg Error: autocommand can't handle postional-only parameters
|
| 38 |
+
'''
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class KWArgError(AutocommandError):
|
| 42 |
+
'''kwarg Error: autocommand can't handle a **kwargs parameter'''
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class DocstringError(AutocommandError):
|
| 46 |
+
'''Docstring error'''
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class TooManySplitsError(DocstringError):
|
| 50 |
+
'''
|
| 51 |
+
The docstring had too many ---- section splits. Currently we only support
|
| 52 |
+
using up to a single split, to split the docstring into description and
|
| 53 |
+
epilog parts.
|
| 54 |
+
'''
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def _get_type_description(annotation):
|
| 58 |
+
'''
|
| 59 |
+
Given an annotation, return the (type, description) for the parameter.
|
| 60 |
+
If you provide an annotation that is somehow both a string and a callable,
|
| 61 |
+
the behavior is undefined.
|
| 62 |
+
'''
|
| 63 |
+
if annotation is _empty:
|
| 64 |
+
return None, None
|
| 65 |
+
elif callable(annotation):
|
| 66 |
+
return annotation, None
|
| 67 |
+
elif isinstance(annotation, str):
|
| 68 |
+
return None, annotation
|
| 69 |
+
elif isinstance(annotation, tuple):
|
| 70 |
+
try:
|
| 71 |
+
arg1, arg2 = annotation
|
| 72 |
+
except ValueError as e:
|
| 73 |
+
raise AnnotationError(annotation) from e
|
| 74 |
+
else:
|
| 75 |
+
if callable(arg1) and isinstance(arg2, str):
|
| 76 |
+
return arg1, arg2
|
| 77 |
+
elif isinstance(arg1, str) and callable(arg2):
|
| 78 |
+
return arg2, arg1
|
| 79 |
+
|
| 80 |
+
raise AnnotationError(annotation)
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def _add_arguments(param, parser, used_char_args, add_nos):
|
| 84 |
+
'''
|
| 85 |
+
Add the argument(s) to an ArgumentParser (using add_argument) for a given
|
| 86 |
+
parameter. used_char_args is the set of -short options currently already in
|
| 87 |
+
use, and is updated (if necessary) by this function. If add_nos is True,
|
| 88 |
+
this will also add an inverse switch for all boolean options. For
|
| 89 |
+
instance, for the boolean parameter "verbose", this will create --verbose
|
| 90 |
+
and --no-verbose.
|
| 91 |
+
'''
|
| 92 |
+
|
| 93 |
+
# Impl note: This function is kept separate from make_parser because it's
|
| 94 |
+
# already very long and I wanted to separate out as much as possible into
|
| 95 |
+
# its own call scope, to prevent even the possibility of suble mutation
|
| 96 |
+
# bugs.
|
| 97 |
+
if param.kind is param.POSITIONAL_ONLY:
|
| 98 |
+
raise PositionalArgError(param)
|
| 99 |
+
elif param.kind is param.VAR_KEYWORD:
|
| 100 |
+
raise KWArgError(param)
|
| 101 |
+
|
| 102 |
+
# These are the kwargs for the add_argument function.
|
| 103 |
+
arg_spec = {}
|
| 104 |
+
is_option = False
|
| 105 |
+
|
| 106 |
+
# Get the type and default from the annotation.
|
| 107 |
+
arg_type, description = _get_type_description(param.annotation)
|
| 108 |
+
|
| 109 |
+
# Get the default value
|
| 110 |
+
default = param.default
|
| 111 |
+
|
| 112 |
+
# If there is no explicit type, and the default is present and not None,
|
| 113 |
+
# infer the type from the default.
|
| 114 |
+
if arg_type is None and default not in {_empty, None}:
|
| 115 |
+
arg_type = type(default)
|
| 116 |
+
|
| 117 |
+
# Add default. The presence of a default means this is an option, not an
|
| 118 |
+
# argument.
|
| 119 |
+
if default is not _empty:
|
| 120 |
+
arg_spec['default'] = default
|
| 121 |
+
is_option = True
|
| 122 |
+
|
| 123 |
+
# Add the type
|
| 124 |
+
if arg_type is not None:
|
| 125 |
+
# Special case for bool: make it just a --switch
|
| 126 |
+
if arg_type is bool:
|
| 127 |
+
if not default or default is _empty:
|
| 128 |
+
arg_spec['action'] = 'store_true'
|
| 129 |
+
else:
|
| 130 |
+
arg_spec['action'] = 'store_false'
|
| 131 |
+
|
| 132 |
+
# Switches are always options
|
| 133 |
+
is_option = True
|
| 134 |
+
|
| 135 |
+
# Special case for file types: make it a string type, for filename
|
| 136 |
+
elif isinstance(default, IOBase):
|
| 137 |
+
arg_spec['type'] = str
|
| 138 |
+
|
| 139 |
+
# TODO: special case for list type.
|
| 140 |
+
# - How to specificy type of list members?
|
| 141 |
+
# - param: [int]
|
| 142 |
+
# - param: int =[]
|
| 143 |
+
# - action='append' vs nargs='*'
|
| 144 |
+
|
| 145 |
+
else:
|
| 146 |
+
arg_spec['type'] = arg_type
|
| 147 |
+
|
| 148 |
+
# nargs: if the signature includes *args, collect them as trailing CLI
|
| 149 |
+
# arguments in a list. *args can't have a default value, so it can never be
|
| 150 |
+
# an option.
|
| 151 |
+
if param.kind is param.VAR_POSITIONAL:
|
| 152 |
+
# TODO: consider depluralizing metavar/name here.
|
| 153 |
+
arg_spec['nargs'] = '*'
|
| 154 |
+
|
| 155 |
+
# Add description.
|
| 156 |
+
if description is not None:
|
| 157 |
+
arg_spec['help'] = description
|
| 158 |
+
|
| 159 |
+
# Get the --flags
|
| 160 |
+
flags = []
|
| 161 |
+
name = param.name
|
| 162 |
+
|
| 163 |
+
if is_option:
|
| 164 |
+
# Add the first letter as a -short option.
|
| 165 |
+
for letter in name[0], name[0].swapcase():
|
| 166 |
+
if letter not in used_char_args:
|
| 167 |
+
used_char_args.add(letter)
|
| 168 |
+
flags.append('-{}'.format(letter))
|
| 169 |
+
break
|
| 170 |
+
|
| 171 |
+
# If the parameter is a --long option, or is a -short option that
|
| 172 |
+
# somehow failed to get a flag, add it.
|
| 173 |
+
if len(name) > 1 or not flags:
|
| 174 |
+
flags.append('--{}'.format(name))
|
| 175 |
+
|
| 176 |
+
arg_spec['dest'] = name
|
| 177 |
+
else:
|
| 178 |
+
flags.append(name)
|
| 179 |
+
|
| 180 |
+
parser.add_argument(*flags, **arg_spec)
|
| 181 |
+
|
| 182 |
+
# Create the --no- version for boolean switches
|
| 183 |
+
if add_nos and arg_type is bool:
|
| 184 |
+
parser.add_argument(
|
| 185 |
+
'--no-{}'.format(name),
|
| 186 |
+
action='store_const',
|
| 187 |
+
dest=name,
|
| 188 |
+
const=default if default is not _empty else False)
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
def make_parser(func_sig, description, epilog, add_nos):
|
| 192 |
+
'''
|
| 193 |
+
Given the signature of a function, create an ArgumentParser
|
| 194 |
+
'''
|
| 195 |
+
parser = ArgumentParser(description=description, epilog=epilog)
|
| 196 |
+
|
| 197 |
+
used_char_args = {'h'}
|
| 198 |
+
|
| 199 |
+
# Arange the params so that single-character arguments are first. This
|
| 200 |
+
# esnures they don't have to get --long versions. sorted is stable, so the
|
| 201 |
+
# parameters will otherwise still be in relative order.
|
| 202 |
+
params = sorted(
|
| 203 |
+
func_sig.parameters.values(),
|
| 204 |
+
key=lambda param: len(param.name) > 1)
|
| 205 |
+
|
| 206 |
+
for param in params:
|
| 207 |
+
_add_arguments(param, parser, used_char_args, add_nos)
|
| 208 |
+
|
| 209 |
+
return parser
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
_DOCSTRING_SPLIT = compile_regex(r'\n\s*-{4,}\s*\n')
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
def parse_docstring(docstring):
|
| 216 |
+
'''
|
| 217 |
+
Given a docstring, parse it into a description and epilog part
|
| 218 |
+
'''
|
| 219 |
+
if docstring is None:
|
| 220 |
+
return '', ''
|
| 221 |
+
|
| 222 |
+
parts = _DOCSTRING_SPLIT.split(docstring)
|
| 223 |
+
|
| 224 |
+
if len(parts) == 1:
|
| 225 |
+
return docstring, ''
|
| 226 |
+
elif len(parts) == 2:
|
| 227 |
+
return parts[0], parts[1]
|
| 228 |
+
else:
|
| 229 |
+
raise TooManySplitsError()
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
def autoparse(
|
| 233 |
+
func=None, *,
|
| 234 |
+
description=None,
|
| 235 |
+
epilog=None,
|
| 236 |
+
add_nos=False,
|
| 237 |
+
parser=None):
|
| 238 |
+
'''
|
| 239 |
+
This decorator converts a function that takes normal arguments into a
|
| 240 |
+
function which takes a single optional argument, argv, parses it using an
|
| 241 |
+
argparse.ArgumentParser, and calls the underlying function with the parsed
|
| 242 |
+
arguments. If it is not given, sys.argv[1:] is used. This is so that the
|
| 243 |
+
function can be used as a setuptools entry point, as well as a normal main
|
| 244 |
+
function. sys.argv[1:] is not evaluated until the function is called, to
|
| 245 |
+
allow injecting different arguments for testing.
|
| 246 |
+
|
| 247 |
+
It uses the argument signature of the function to create an
|
| 248 |
+
ArgumentParser. Parameters without defaults become positional parameters,
|
| 249 |
+
while parameters *with* defaults become --options. Use annotations to set
|
| 250 |
+
the type of the parameter.
|
| 251 |
+
|
| 252 |
+
The `desctiption` and `epilog` parameters corrospond to the same respective
|
| 253 |
+
argparse parameters. If no description is given, it defaults to the
|
| 254 |
+
decorated functions's docstring, if present.
|
| 255 |
+
|
| 256 |
+
If add_nos is True, every boolean option (that is, every parameter with a
|
| 257 |
+
default of True/False or a type of bool) will have a --no- version created
|
| 258 |
+
as well, which inverts the option. For instance, the --verbose option will
|
| 259 |
+
have a --no-verbose counterpart. These are not mutually exclusive-
|
| 260 |
+
whichever one appears last in the argument list will have precedence.
|
| 261 |
+
|
| 262 |
+
If a parser is given, it is used instead of one generated from the function
|
| 263 |
+
signature. In this case, no parser is created; instead, the given parser is
|
| 264 |
+
used to parse the argv argument. The parser's results' argument names must
|
| 265 |
+
match up with the parameter names of the decorated function.
|
| 266 |
+
|
| 267 |
+
The decorated function is attached to the result as the `func` attribute,
|
| 268 |
+
and the parser is attached as the `parser` attribute.
|
| 269 |
+
'''
|
| 270 |
+
|
| 271 |
+
# If @autoparse(...) is used instead of @autoparse
|
| 272 |
+
if func is None:
|
| 273 |
+
return lambda f: autoparse(
|
| 274 |
+
f, description=description,
|
| 275 |
+
epilog=epilog,
|
| 276 |
+
add_nos=add_nos,
|
| 277 |
+
parser=parser)
|
| 278 |
+
|
| 279 |
+
func_sig = signature(func)
|
| 280 |
+
|
| 281 |
+
docstr_description, docstr_epilog = parse_docstring(getdoc(func))
|
| 282 |
+
|
| 283 |
+
if parser is None:
|
| 284 |
+
parser = make_parser(
|
| 285 |
+
func_sig,
|
| 286 |
+
description or docstr_description,
|
| 287 |
+
epilog or docstr_epilog,
|
| 288 |
+
add_nos)
|
| 289 |
+
|
| 290 |
+
@wraps(func)
|
| 291 |
+
def autoparse_wrapper(argv=None):
|
| 292 |
+
if argv is None:
|
| 293 |
+
argv = sys.argv[1:]
|
| 294 |
+
|
| 295 |
+
# Get empty argument binding, to fill with parsed arguments. This
|
| 296 |
+
# object does all the heavy lifting of turning named arguments into
|
| 297 |
+
# into correctly bound *args and **kwargs.
|
| 298 |
+
parsed_args = func_sig.bind_partial()
|
| 299 |
+
parsed_args.arguments.update(vars(parser.parse_args(argv)))
|
| 300 |
+
|
| 301 |
+
return func(*parsed_args.args, **parsed_args.kwargs)
|
| 302 |
+
|
| 303 |
+
# TODO: attach an updated __signature__ to autoparse_wrapper, just in case.
|
| 304 |
+
|
| 305 |
+
# Attach the wrapped function and parser, and return the wrapper.
|
| 306 |
+
autoparse_wrapper.func = func
|
| 307 |
+
autoparse_wrapper.parser = parser
|
| 308 |
+
return autoparse_wrapper
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
@contextmanager
|
| 312 |
+
def smart_open(filename_or_file, *args, **kwargs):
|
| 313 |
+
'''
|
| 314 |
+
This context manager allows you to open a filename, if you want to default
|
| 315 |
+
some already-existing file object, like sys.stdout, which shouldn't be
|
| 316 |
+
closed at the end of the context. If the filename argument is a str, bytes,
|
| 317 |
+
or int, the file object is created via a call to open with the given *args
|
| 318 |
+
and **kwargs, sent to the context, and closed at the end of the context,
|
| 319 |
+
just like "with open(filename) as f:". If it isn't one of the openable
|
| 320 |
+
types, the object simply sent to the context unchanged, and left unclosed
|
| 321 |
+
at the end of the context. Example:
|
| 322 |
+
|
| 323 |
+
def work_with_file(name=sys.stdout):
|
| 324 |
+
with smart_open(name) as f:
|
| 325 |
+
# Works correctly if name is a str filename or sys.stdout
|
| 326 |
+
print("Some stuff", file=f)
|
| 327 |
+
# If it was a filename, f is closed at the end here.
|
| 328 |
+
'''
|
| 329 |
+
if isinstance(filename_or_file, (str, bytes, int)):
|
| 330 |
+
with open(filename_or_file, *args, **kwargs) as file:
|
| 331 |
+
yield file
|
| 332 |
+
else:
|
| 333 |
+
yield filename_or_file
|
videollama2/lib/python3.10/site-packages/setuptools/_vendor/autocommand/errors.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2014-2016 Nathan West
|
| 2 |
+
#
|
| 3 |
+
# This file is part of autocommand.
|
| 4 |
+
#
|
| 5 |
+
# autocommand is free software: you can redistribute it and/or modify
|
| 6 |
+
# it under the terms of the GNU Lesser General Public License as published by
|
| 7 |
+
# the Free Software Foundation, either version 3 of the License, or
|
| 8 |
+
# (at your option) any later version.
|
| 9 |
+
#
|
| 10 |
+
# autocommand is distributed in the hope that it will be useful,
|
| 11 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
| 12 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
| 13 |
+
# GNU Lesser General Public License for more details.
|
| 14 |
+
#
|
| 15 |
+
# You should have received a copy of the GNU Lesser General Public License
|
| 16 |
+
# along with autocommand. If not, see <http://www.gnu.org/licenses/>.
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class AutocommandError(Exception):
|
| 20 |
+
'''Base class for autocommand exceptions'''
|
| 21 |
+
pass
|
| 22 |
+
|
| 23 |
+
# Individual modules will define errors specific to that module.
|
videollama2/lib/python3.10/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
conda
|
videollama2/lib/python3.10/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/METADATA
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: jaraco.collections
|
| 3 |
+
Version: 5.1.0
|
| 4 |
+
Summary: Collection objects similar to those in stdlib by jaraco
|
| 5 |
+
Author-email: "Jason R. Coombs" <jaraco@jaraco.com>
|
| 6 |
+
Project-URL: Source, https://github.com/jaraco/jaraco.collections
|
| 7 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 8 |
+
Classifier: Intended Audience :: Developers
|
| 9 |
+
Classifier: License :: OSI Approved :: MIT License
|
| 10 |
+
Classifier: Programming Language :: Python :: 3
|
| 11 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 12 |
+
Requires-Python: >=3.8
|
| 13 |
+
Description-Content-Type: text/x-rst
|
| 14 |
+
License-File: LICENSE
|
| 15 |
+
Requires-Dist: jaraco.text
|
| 16 |
+
Provides-Extra: check
|
| 17 |
+
Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'check'
|
| 18 |
+
Requires-Dist: pytest-ruff >=0.2.1 ; (sys_platform != "cygwin") and extra == 'check'
|
| 19 |
+
Provides-Extra: cover
|
| 20 |
+
Requires-Dist: pytest-cov ; extra == 'cover'
|
| 21 |
+
Provides-Extra: doc
|
| 22 |
+
Requires-Dist: sphinx >=3.5 ; extra == 'doc'
|
| 23 |
+
Requires-Dist: jaraco.packaging >=9.3 ; extra == 'doc'
|
| 24 |
+
Requires-Dist: rst.linker >=1.9 ; extra == 'doc'
|
| 25 |
+
Requires-Dist: furo ; extra == 'doc'
|
| 26 |
+
Requires-Dist: sphinx-lint ; extra == 'doc'
|
| 27 |
+
Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'doc'
|
| 28 |
+
Provides-Extra: enabler
|
| 29 |
+
Requires-Dist: pytest-enabler >=2.2 ; extra == 'enabler'
|
| 30 |
+
Provides-Extra: test
|
| 31 |
+
Requires-Dist: pytest !=8.1.*,>=6 ; extra == 'test'
|
| 32 |
+
Provides-Extra: type
|
| 33 |
+
Requires-Dist: pytest-mypy ; extra == 'type'
|
| 34 |
+
|
| 35 |
+
.. image:: https://img.shields.io/pypi/v/jaraco.collections.svg
|
| 36 |
+
:target: https://pypi.org/project/jaraco.collections
|
| 37 |
+
|
| 38 |
+
.. image:: https://img.shields.io/pypi/pyversions/jaraco.collections.svg
|
| 39 |
+
|
| 40 |
+
.. image:: https://github.com/jaraco/jaraco.collections/actions/workflows/main.yml/badge.svg
|
| 41 |
+
:target: https://github.com/jaraco/jaraco.collections/actions?query=workflow%3A%22tests%22
|
| 42 |
+
:alt: tests
|
| 43 |
+
|
| 44 |
+
.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
|
| 45 |
+
:target: https://github.com/astral-sh/ruff
|
| 46 |
+
:alt: Ruff
|
| 47 |
+
|
| 48 |
+
.. image:: https://readthedocs.org/projects/jaracocollections/badge/?version=latest
|
| 49 |
+
:target: https://jaracocollections.readthedocs.io/en/latest/?badge=latest
|
| 50 |
+
|
| 51 |
+
.. image:: https://img.shields.io/badge/skeleton-2024-informational
|
| 52 |
+
:target: https://blog.jaraco.com/skeleton
|
| 53 |
+
|
| 54 |
+
.. image:: https://tidelift.com/badges/package/pypi/jaraco.collections
|
| 55 |
+
:target: https://tidelift.com/subscription/pkg/pypi-jaraco.collections?utm_source=pypi-jaraco.collections&utm_medium=readme
|
| 56 |
+
|
| 57 |
+
Models and classes to supplement the stdlib 'collections' module.
|
| 58 |
+
|
| 59 |
+
See the docs, linked above, for descriptions and usage examples.
|
| 60 |
+
|
| 61 |
+
Highlights include:
|
| 62 |
+
|
| 63 |
+
- RangeMap: A mapping that accepts a range of values for keys.
|
| 64 |
+
- Projection: A subset over an existing mapping.
|
| 65 |
+
- KeyTransformingDict: Generalized mapping with keys transformed by a function.
|
| 66 |
+
- FoldedCaseKeyedDict: A dict whose string keys are case-insensitive.
|
| 67 |
+
- BijectiveMap: A map where keys map to values and values back to their keys.
|
| 68 |
+
- ItemsAsAttributes: A mapping mix-in exposing items as attributes.
|
| 69 |
+
- IdentityOverrideMap: A map whose keys map by default to themselves unless overridden.
|
| 70 |
+
- FrozenDict: A hashable, immutable map.
|
| 71 |
+
- Enumeration: An object whose keys are enumerated.
|
| 72 |
+
- Everything: A container that contains all things.
|
| 73 |
+
- Least, Greatest: Objects that are always less than or greater than any other.
|
| 74 |
+
- pop_all: Return all items from the mutable sequence and remove them from that sequence.
|
| 75 |
+
- DictStack: A stack of dicts, great for sharing scopes.
|
| 76 |
+
- WeightedLookup: A specialized RangeMap for selecting an item by weights.
|
| 77 |
+
|
| 78 |
+
For Enterprise
|
| 79 |
+
==============
|
| 80 |
+
|
| 81 |
+
Available as part of the Tidelift Subscription.
|
| 82 |
+
|
| 83 |
+
This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
|
| 84 |
+
|
| 85 |
+
`Learn more <https://tidelift.com/subscription/pkg/pypi-jaraco.collections?utm_source=pypi-jaraco.collections&utm_medium=referral&utm_campaign=github>`_.
|
videollama2/lib/python3.10/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/REQUESTED
ADDED
|
File without changes
|
videollama2/lib/python3.10/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: setuptools (73.0.1)
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
| 5 |
+
|
videollama2/lib/python3.10/site-packages/setuptools/_vendor/jaraco.collections-5.1.0.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
jaraco
|
videollama2/lib/python3.10/site-packages/setuptools/_vendor/jaraco.context-5.3.0.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 2 |
+
of this software and associated documentation files (the "Software"), to
|
| 3 |
+
deal in the Software without restriction, including without limitation the
|
| 4 |
+
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
| 5 |
+
sell copies of the Software, and to permit persons to whom the Software is
|
| 6 |
+
furnished to do so, subject to the following conditions:
|
| 7 |
+
|
| 8 |
+
The above copyright notice and this permission notice shall be included in
|
| 9 |
+
all copies or substantial portions of the Software.
|
| 10 |
+
|
| 11 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 12 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 13 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 14 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 15 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
| 16 |
+
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
| 17 |
+
IN THE SOFTWARE.
|
videollama2/lib/python3.10/site-packages/setuptools/_vendor/jaraco.context-5.3.0.dist-info/METADATA
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: jaraco.context
|
| 3 |
+
Version: 5.3.0
|
| 4 |
+
Summary: Useful decorators and context managers
|
| 5 |
+
Home-page: https://github.com/jaraco/jaraco.context
|
| 6 |
+
Author: Jason R. Coombs
|
| 7 |
+
Author-email: jaraco@jaraco.com
|
| 8 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 9 |
+
Classifier: Intended Audience :: Developers
|
| 10 |
+
Classifier: License :: OSI Approved :: MIT License
|
| 11 |
+
Classifier: Programming Language :: Python :: 3
|
| 12 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 13 |
+
Requires-Python: >=3.8
|
| 14 |
+
License-File: LICENSE
|
| 15 |
+
Requires-Dist: backports.tarfile ; python_version < "3.12"
|
| 16 |
+
Provides-Extra: docs
|
| 17 |
+
Requires-Dist: sphinx >=3.5 ; extra == 'docs'
|
| 18 |
+
Requires-Dist: jaraco.packaging >=9.3 ; extra == 'docs'
|
| 19 |
+
Requires-Dist: rst.linker >=1.9 ; extra == 'docs'
|
| 20 |
+
Requires-Dist: furo ; extra == 'docs'
|
| 21 |
+
Requires-Dist: sphinx-lint ; extra == 'docs'
|
| 22 |
+
Requires-Dist: jaraco.tidelift >=1.4 ; extra == 'docs'
|
| 23 |
+
Provides-Extra: testing
|
| 24 |
+
Requires-Dist: pytest !=8.1.1,>=6 ; extra == 'testing'
|
| 25 |
+
Requires-Dist: pytest-checkdocs >=2.4 ; extra == 'testing'
|
| 26 |
+
Requires-Dist: pytest-cov ; extra == 'testing'
|
| 27 |
+
Requires-Dist: pytest-mypy ; extra == 'testing'
|
| 28 |
+
Requires-Dist: pytest-enabler >=2.2 ; extra == 'testing'
|
| 29 |
+
Requires-Dist: pytest-ruff >=0.2.1 ; extra == 'testing'
|
| 30 |
+
Requires-Dist: portend ; extra == 'testing'
|
| 31 |
+
|
| 32 |
+
.. image:: https://img.shields.io/pypi/v/jaraco.context.svg
|
| 33 |
+
:target: https://pypi.org/project/jaraco.context
|
| 34 |
+
|
| 35 |
+
.. image:: https://img.shields.io/pypi/pyversions/jaraco.context.svg
|
| 36 |
+
|
| 37 |
+
.. image:: https://github.com/jaraco/jaraco.context/actions/workflows/main.yml/badge.svg
|
| 38 |
+
:target: https://github.com/jaraco/jaraco.context/actions?query=workflow%3A%22tests%22
|
| 39 |
+
:alt: tests
|
| 40 |
+
|
| 41 |
+
.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
|
| 42 |
+
:target: https://github.com/astral-sh/ruff
|
| 43 |
+
:alt: Ruff
|
| 44 |
+
|
| 45 |
+
.. image:: https://readthedocs.org/projects/jaracocontext/badge/?version=latest
|
| 46 |
+
:target: https://jaracocontext.readthedocs.io/en/latest/?badge=latest
|
| 47 |
+
|
| 48 |
+
.. image:: https://img.shields.io/badge/skeleton-2024-informational
|
| 49 |
+
:target: https://blog.jaraco.com/skeleton
|
| 50 |
+
|
| 51 |
+
.. image:: https://tidelift.com/badges/package/pypi/jaraco.context
|
| 52 |
+
:target: https://tidelift.com/subscription/pkg/pypi-jaraco.context?utm_source=pypi-jaraco.context&utm_medium=readme
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
Highlights
|
| 56 |
+
==========
|
| 57 |
+
|
| 58 |
+
See the docs linked from the badge above for the full details, but here are some features that may be of interest.
|
| 59 |
+
|
| 60 |
+
- ``ExceptionTrap`` provides a general-purpose wrapper for trapping exceptions and then acting on the outcome. Includes ``passes`` and ``raises`` decorators to replace the result of a wrapped function by a boolean indicating the outcome of the exception trap. See `this keyring commit <https://github.com/jaraco/keyring/commit/a85a7cbc6c909f8121660ed1f7b487f99a1c2bf7>`_ for an example of it in production.
|
| 61 |
+
- ``suppress`` simply enables ``contextlib.suppress`` as a decorator.
|
| 62 |
+
- ``on_interrupt`` is a decorator used by CLI entry points to affect the handling of a ``KeyboardInterrupt``. Inspired by `Lucretiel/autocommand#18 <https://github.com/Lucretiel/autocommand/issues/18>`_.
|
| 63 |
+
- ``pushd`` is similar to pytest's ``monkeypatch.chdir`` or path's `default context <https://path.readthedocs.io/en/latest/api.html>`_, changes the current working directory for the duration of the context.
|
| 64 |
+
- ``tarball`` will download a tarball, extract it, change directory, yield, then clean up after. Convenient when working with web assets.
|
| 65 |
+
- ``null`` is there for those times when one code branch needs a context and the other doesn't; this null context provides symmetry across those branches.
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
For Enterprise
|
| 69 |
+
==============
|
| 70 |
+
|
| 71 |
+
Available as part of the Tidelift Subscription.
|
| 72 |
+
|
| 73 |
+
This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
|
| 74 |
+
|
| 75 |
+
`Learn more <https://tidelift.com/subscription/pkg/pypi-jaraco.context?utm_source=pypi-jaraco.context&utm_medium=referral&utm_campaign=github>`_.
|
videollama2/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/RECORD
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
packaging-24.2.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2
|
| 2 |
+
packaging-24.2.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
|
| 3 |
+
packaging-24.2.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
|
| 4 |
+
packaging-24.2.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
|
| 5 |
+
packaging-24.2.dist-info/METADATA,sha256=ohH86s6k5mIfQxY2TS0LcSfADeOFa4BiCC-bxZV-pNs,3204
|
| 6 |
+
packaging-24.2.dist-info/RECORD,,
|
| 7 |
+
packaging-24.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 8 |
+
packaging-24.2.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
|
| 9 |
+
packaging/__init__.py,sha256=dk4Ta_vmdVJxYHDcfyhvQNw8V3PgSBomKNXqg-D2JDY,494
|
| 10 |
+
packaging/_elffile.py,sha256=cflAQAkE25tzhYmq_aCi72QfbT_tn891tPzfpbeHOwE,3306
|
| 11 |
+
packaging/_manylinux.py,sha256=vl5OCoz4kx80H5rwXKeXWjl9WNISGmr4ZgTpTP9lU9c,9612
|
| 12 |
+
packaging/_musllinux.py,sha256=p9ZqNYiOItGee8KcZFeHF_YcdhVwGHdK6r-8lgixvGQ,2694
|
| 13 |
+
packaging/_parser.py,sha256=s_TvTvDNK0NrM2QB3VKThdWFM4Nc0P6JnkObkl3MjpM,10236
|
| 14 |
+
packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
|
| 15 |
+
packaging/_tokenizer.py,sha256=J6v5H7Jzvb-g81xp_2QACKwO7LxHQA6ikryMU7zXwN8,5273
|
| 16 |
+
packaging/licenses/__init__.py,sha256=1x5M1nEYjcgwEbLt0dXwz2ukjr18DiCzC0sraQqJ-Ww,5715
|
| 17 |
+
packaging/licenses/_spdx.py,sha256=oAm1ztPFwlsmCKe7lAAsv_OIOfS1cWDu9bNBkeu-2ns,48398
|
| 18 |
+
packaging/markers.py,sha256=c89TNzB7ZdGYhkovm6PYmqGyHxXlYVaLW591PHUNKD8,10561
|
| 19 |
+
packaging/metadata.py,sha256=YJibM7GYe4re8-0a3OlXmGS-XDgTEoO4tlBt2q25Bng,34762
|
| 20 |
+
packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 21 |
+
packaging/requirements.py,sha256=gYyRSAdbrIyKDY66ugIDUQjRMvxkH2ALioTmX3tnL6o,2947
|
| 22 |
+
packaging/specifiers.py,sha256=GG1wPNMcL0fMJO68vF53wKMdwnfehDcaI-r9NpTfilA,40074
|
| 23 |
+
packaging/tags.py,sha256=CFqrJzAzc2XNGexerH__T-Y5Iwq7WbsYXsiLERLWxY0,21014
|
| 24 |
+
packaging/utils.py,sha256=0F3Hh9OFuRgrhTgGZUl5K22Fv1YP2tZl1z_2gO6kJiA,5050
|
| 25 |
+
packaging/version.py,sha256=olfyuk_DPbflNkJ4wBWetXQ17c74x3DB501degUv7DY,16676
|
videollama2/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/REQUESTED
ADDED
|
File without changes
|
videollama2/lib/python3.10/site-packages/setuptools/_vendor/packaging-24.2.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: flit 3.10.1
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
videollama2/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (1.75 kB). View file
|
|
|
videollama2/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_checkers.cpython-310.pyc
ADDED
|
Binary file (19.8 kB). View file
|
|
|
videollama2/lib/python3.10/site-packages/setuptools/_vendor/typeguard/__pycache__/_pytest_plugin.cpython-310.pyc
ADDED
|
Binary file (4.06 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/blake3/blake3.cpython-310-x86_64-linux-gnu.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:00f3efe5f49542c2597365926c5754aaeb08f6f6760b93419ed3b36ed05ccd13
|
| 3 |
+
size 965432
|
vllm/lib/python3.10/site-packages/et_xmlfile-2.0.0.dist-info/AUTHORS.txt
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
The authors in alphabetical order
|
| 2 |
+
|
| 3 |
+
* Charlie Clark
|
| 4 |
+
* Daniel Hillier
|
| 5 |
+
* Elias Rabel
|
vllm/lib/python3.10/site-packages/et_xmlfile-2.0.0.dist-info/LICENCE.python
ADDED
|
@@ -0,0 +1,298 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
et_xml is licensed under the MIT license; see the file LICENCE for details.
|
| 2 |
+
|
| 3 |
+
et_xml includes code from the Python standard library, which is licensed under
|
| 4 |
+
the Python license, a permissive open source license. The copyright and license
|
| 5 |
+
is included below for compliance with Python's terms.
|
| 6 |
+
|
| 7 |
+
This module includes corrections and new features as follows:
|
| 8 |
+
- Correct handling of attributes namespaces when a default namespace
|
| 9 |
+
has been registered.
|
| 10 |
+
- Records the namespaces for an Element during parsing and utilises them to
|
| 11 |
+
allow inspection of namespaces at specific elements in the xml tree and
|
| 12 |
+
during serialisation.
|
| 13 |
+
|
| 14 |
+
Misc:
|
| 15 |
+
- Includes the test_xml_etree with small modifications for testing the
|
| 16 |
+
modifications in this package.
|
| 17 |
+
|
| 18 |
+
----------------------------------------------------------------------
|
| 19 |
+
|
| 20 |
+
Copyright (c) 2001-present Python Software Foundation; All Rights Reserved
|
| 21 |
+
|
| 22 |
+
A. HISTORY OF THE SOFTWARE
|
| 23 |
+
==========================
|
| 24 |
+
|
| 25 |
+
Python was created in the early 1990s by Guido van Rossum at Stichting
|
| 26 |
+
Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands
|
| 27 |
+
as a successor of a language called ABC. Guido remains Python's
|
| 28 |
+
principal author, although it includes many contributions from others.
|
| 29 |
+
|
| 30 |
+
In 1995, Guido continued his work on Python at the Corporation for
|
| 31 |
+
National Research Initiatives (CNRI, see https://www.cnri.reston.va.us)
|
| 32 |
+
in Reston, Virginia where he released several versions of the
|
| 33 |
+
software.
|
| 34 |
+
|
| 35 |
+
In May 2000, Guido and the Python core development team moved to
|
| 36 |
+
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
|
| 37 |
+
year, the PythonLabs team moved to Digital Creations, which became
|
| 38 |
+
Zope Corporation. In 2001, the Python Software Foundation (PSF, see
|
| 39 |
+
https://www.python.org/psf/) was formed, a non-profit organization
|
| 40 |
+
created specifically to own Python-related Intellectual Property.
|
| 41 |
+
Zope Corporation was a sponsoring member of the PSF.
|
| 42 |
+
|
| 43 |
+
All Python releases are Open Source (see https://opensource.org for
|
| 44 |
+
the Open Source Definition). Historically, most, but not all, Python
|
| 45 |
+
releases have also been GPL-compatible; the table below summarizes
|
| 46 |
+
the various releases.
|
| 47 |
+
|
| 48 |
+
Release Derived Year Owner GPL-
|
| 49 |
+
from compatible? (1)
|
| 50 |
+
|
| 51 |
+
0.9.0 thru 1.2 1991-1995 CWI yes
|
| 52 |
+
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
|
| 53 |
+
1.6 1.5.2 2000 CNRI no
|
| 54 |
+
2.0 1.6 2000 BeOpen.com no
|
| 55 |
+
1.6.1 1.6 2001 CNRI yes (2)
|
| 56 |
+
2.1 2.0+1.6.1 2001 PSF no
|
| 57 |
+
2.0.1 2.0+1.6.1 2001 PSF yes
|
| 58 |
+
2.1.1 2.1+2.0.1 2001 PSF yes
|
| 59 |
+
2.1.2 2.1.1 2002 PSF yes
|
| 60 |
+
2.1.3 2.1.2 2002 PSF yes
|
| 61 |
+
2.2 and above 2.1.1 2001-now PSF yes
|
| 62 |
+
|
| 63 |
+
Footnotes:
|
| 64 |
+
|
| 65 |
+
(1) GPL-compatible doesn't mean that we're distributing Python under
|
| 66 |
+
the GPL. All Python licenses, unlike the GPL, let you distribute
|
| 67 |
+
a modified version without making your changes open source. The
|
| 68 |
+
GPL-compatible licenses make it possible to combine Python with
|
| 69 |
+
other software that is released under the GPL; the others don't.
|
| 70 |
+
|
| 71 |
+
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
|
| 72 |
+
because its license has a choice of law clause. According to
|
| 73 |
+
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
|
| 74 |
+
is "not incompatible" with the GPL.
|
| 75 |
+
|
| 76 |
+
Thanks to the many outside volunteers who have worked under Guido's
|
| 77 |
+
direction to make these releases possible.
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
|
| 81 |
+
===============================================================
|
| 82 |
+
|
| 83 |
+
Python software and documentation are licensed under the
|
| 84 |
+
Python Software Foundation License Version 2.
|
| 85 |
+
|
| 86 |
+
Starting with Python 3.8.6, examples, recipes, and other code in
|
| 87 |
+
the documentation are dual licensed under the PSF License Version 2
|
| 88 |
+
and the Zero-Clause BSD license.
|
| 89 |
+
|
| 90 |
+
Some software incorporated into Python is under different licenses.
|
| 91 |
+
The licenses are listed with code falling under that license.
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
| 95 |
+
--------------------------------------------
|
| 96 |
+
|
| 97 |
+
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
| 98 |
+
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
| 99 |
+
otherwise using this software ("Python") in source or binary form and
|
| 100 |
+
its associated documentation.
|
| 101 |
+
|
| 102 |
+
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
| 103 |
+
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
| 104 |
+
analyze, test, perform and/or display publicly, prepare derivative works,
|
| 105 |
+
distribute, and otherwise use Python alone or in any derivative version,
|
| 106 |
+
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
| 107 |
+
i.e., "Copyright (c) 2001-2024 Python Software Foundation; All Rights Reserved"
|
| 108 |
+
are retained in Python alone or in any derivative version prepared by Licensee.
|
| 109 |
+
|
| 110 |
+
3. In the event Licensee prepares a derivative work that is based on
|
| 111 |
+
or incorporates Python or any part thereof, and wants to make
|
| 112 |
+
the derivative work available to others as provided herein, then
|
| 113 |
+
Licensee hereby agrees to include in any such work a brief summary of
|
| 114 |
+
the changes made to Python.
|
| 115 |
+
|
| 116 |
+
4. PSF is making Python available to Licensee on an "AS IS"
|
| 117 |
+
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
| 118 |
+
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
| 119 |
+
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
| 120 |
+
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
| 121 |
+
INFRINGE ANY THIRD PARTY RIGHTS.
|
| 122 |
+
|
| 123 |
+
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
| 124 |
+
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
| 125 |
+
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
| 126 |
+
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
| 127 |
+
|
| 128 |
+
6. This License Agreement will automatically terminate upon a material
|
| 129 |
+
breach of its terms and conditions.
|
| 130 |
+
|
| 131 |
+
7. Nothing in this License Agreement shall be deemed to create any
|
| 132 |
+
relationship of agency, partnership, or joint venture between PSF and
|
| 133 |
+
Licensee. This License Agreement does not grant permission to use PSF
|
| 134 |
+
trademarks or trade name in a trademark sense to endorse or promote
|
| 135 |
+
products or services of Licensee, or any third party.
|
| 136 |
+
|
| 137 |
+
8. By copying, installing or otherwise using Python, Licensee
|
| 138 |
+
agrees to be bound by the terms and conditions of this License
|
| 139 |
+
Agreement.
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
|
| 143 |
+
-------------------------------------------
|
| 144 |
+
|
| 145 |
+
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
|
| 146 |
+
|
| 147 |
+
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
|
| 148 |
+
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
|
| 149 |
+
Individual or Organization ("Licensee") accessing and otherwise using
|
| 150 |
+
this software in source or binary form and its associated
|
| 151 |
+
documentation ("the Software").
|
| 152 |
+
|
| 153 |
+
2. Subject to the terms and conditions of this BeOpen Python License
|
| 154 |
+
Agreement, BeOpen hereby grants Licensee a non-exclusive,
|
| 155 |
+
royalty-free, world-wide license to reproduce, analyze, test, perform
|
| 156 |
+
and/or display publicly, prepare derivative works, distribute, and
|
| 157 |
+
otherwise use the Software alone or in any derivative version,
|
| 158 |
+
provided, however, that the BeOpen Python License is retained in the
|
| 159 |
+
Software, alone or in any derivative version prepared by Licensee.
|
| 160 |
+
|
| 161 |
+
3. BeOpen is making the Software available to Licensee on an "AS IS"
|
| 162 |
+
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
| 163 |
+
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
|
| 164 |
+
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
| 165 |
+
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
|
| 166 |
+
INFRINGE ANY THIRD PARTY RIGHTS.
|
| 167 |
+
|
| 168 |
+
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
|
| 169 |
+
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
|
| 170 |
+
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
|
| 171 |
+
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
| 172 |
+
|
| 173 |
+
5. This License Agreement will automatically terminate upon a material
|
| 174 |
+
breach of its terms and conditions.
|
| 175 |
+
|
| 176 |
+
6. This License Agreement shall be governed by and interpreted in all
|
| 177 |
+
respects by the law of the State of California, excluding conflict of
|
| 178 |
+
law provisions. Nothing in this License Agreement shall be deemed to
|
| 179 |
+
create any relationship of agency, partnership, or joint venture
|
| 180 |
+
between BeOpen and Licensee. This License Agreement does not grant
|
| 181 |
+
permission to use BeOpen trademarks or trade names in a trademark
|
| 182 |
+
sense to endorse or promote products or services of Licensee, or any
|
| 183 |
+
third party. As an exception, the "BeOpen Python" logos available at
|
| 184 |
+
http://www.pythonlabs.com/logos.html may be used according to the
|
| 185 |
+
permissions granted on that web page.
|
| 186 |
+
|
| 187 |
+
7. By copying, installing or otherwise using the software, Licensee
|
| 188 |
+
agrees to be bound by the terms and conditions of this License
|
| 189 |
+
Agreement.
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
|
| 193 |
+
---------------------------------------
|
| 194 |
+
|
| 195 |
+
1. This LICENSE AGREEMENT is between the Corporation for National
|
| 196 |
+
Research Initiatives, having an office at 1895 Preston White Drive,
|
| 197 |
+
Reston, VA 20191 ("CNRI"), and the Individual or Organization
|
| 198 |
+
("Licensee") accessing and otherwise using Python 1.6.1 software in
|
| 199 |
+
source or binary form and its associated documentation.
|
| 200 |
+
|
| 201 |
+
2. Subject to the terms and conditions of this License Agreement, CNRI
|
| 202 |
+
hereby grants Licensee a nonexclusive, royalty-free, world-wide
|
| 203 |
+
license to reproduce, analyze, test, perform and/or display publicly,
|
| 204 |
+
prepare derivative works, distribute, and otherwise use Python 1.6.1
|
| 205 |
+
alone or in any derivative version, provided, however, that CNRI's
|
| 206 |
+
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
|
| 207 |
+
1995-2001 Corporation for National Research Initiatives; All Rights
|
| 208 |
+
Reserved" are retained in Python 1.6.1 alone or in any derivative
|
| 209 |
+
version prepared by Licensee. Alternately, in lieu of CNRI's License
|
| 210 |
+
Agreement, Licensee may substitute the following text (omitting the
|
| 211 |
+
quotes): "Python 1.6.1 is made available subject to the terms and
|
| 212 |
+
conditions in CNRI's License Agreement. This Agreement together with
|
| 213 |
+
Python 1.6.1 may be located on the internet using the following
|
| 214 |
+
unique, persistent identifier (known as a handle): 1895.22/1013. This
|
| 215 |
+
Agreement may also be obtained from a proxy server on the internet
|
| 216 |
+
using the following URL: http://hdl.handle.net/1895.22/1013".
|
| 217 |
+
|
| 218 |
+
3. In the event Licensee prepares a derivative work that is based on
|
| 219 |
+
or incorporates Python 1.6.1 or any part thereof, and wants to make
|
| 220 |
+
the derivative work available to others as provided herein, then
|
| 221 |
+
Licensee hereby agrees to include in any such work a brief summary of
|
| 222 |
+
the changes made to Python 1.6.1.
|
| 223 |
+
|
| 224 |
+
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
|
| 225 |
+
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
| 226 |
+
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
|
| 227 |
+
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
| 228 |
+
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
|
| 229 |
+
INFRINGE ANY THIRD PARTY RIGHTS.
|
| 230 |
+
|
| 231 |
+
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
| 232 |
+
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
| 233 |
+
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
|
| 234 |
+
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
| 235 |
+
|
| 236 |
+
6. This License Agreement will automatically terminate upon a material
|
| 237 |
+
breach of its terms and conditions.
|
| 238 |
+
|
| 239 |
+
7. This License Agreement shall be governed by the federal
|
| 240 |
+
intellectual property law of the United States, including without
|
| 241 |
+
limitation the federal copyright law, and, to the extent such
|
| 242 |
+
U.S. federal law does not apply, by the law of the Commonwealth of
|
| 243 |
+
Virginia, excluding Virginia's conflict of law provisions.
|
| 244 |
+
Notwithstanding the foregoing, with regard to derivative works based
|
| 245 |
+
on Python 1.6.1 that incorporate non-separable material that was
|
| 246 |
+
previously distributed under the GNU General Public License (GPL), the
|
| 247 |
+
law of the Commonwealth of Virginia shall govern this License
|
| 248 |
+
Agreement only as to issues arising under or with respect to
|
| 249 |
+
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
|
| 250 |
+
License Agreement shall be deemed to create any relationship of
|
| 251 |
+
agency, partnership, or joint venture between CNRI and Licensee. This
|
| 252 |
+
License Agreement does not grant permission to use CNRI trademarks or
|
| 253 |
+
trade name in a trademark sense to endorse or promote products or
|
| 254 |
+
services of Licensee, or any third party.
|
| 255 |
+
|
| 256 |
+
8. By clicking on the "ACCEPT" button where indicated, or by copying,
|
| 257 |
+
installing or otherwise using Python 1.6.1, Licensee agrees to be
|
| 258 |
+
bound by the terms and conditions of this License Agreement.
|
| 259 |
+
|
| 260 |
+
ACCEPT
|
| 261 |
+
|
| 262 |
+
|
| 263 |
+
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
|
| 264 |
+
--------------------------------------------------
|
| 265 |
+
|
| 266 |
+
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
|
| 267 |
+
The Netherlands. All rights reserved.
|
| 268 |
+
|
| 269 |
+
Permission to use, copy, modify, and distribute this software and its
|
| 270 |
+
documentation for any purpose and without fee is hereby granted,
|
| 271 |
+
provided that the above copyright notice appear in all copies and that
|
| 272 |
+
both that copyright notice and this permission notice appear in
|
| 273 |
+
supporting documentation, and that the name of Stichting Mathematisch
|
| 274 |
+
Centrum or CWI not be used in advertising or publicity pertaining to
|
| 275 |
+
distribution of the software without specific, written prior
|
| 276 |
+
permission.
|
| 277 |
+
|
| 278 |
+
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
|
| 279 |
+
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
| 280 |
+
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
|
| 281 |
+
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
| 282 |
+
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
| 283 |
+
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
| 284 |
+
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
| 285 |
+
|
| 286 |
+
ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
|
| 287 |
+
----------------------------------------------------------------------
|
| 288 |
+
|
| 289 |
+
Permission to use, copy, modify, and/or distribute this software for any
|
| 290 |
+
purpose with or without fee is hereby granted.
|
| 291 |
+
|
| 292 |
+
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
| 293 |
+
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
| 294 |
+
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
| 295 |
+
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
| 296 |
+
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
| 297 |
+
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
| 298 |
+
PERFORMANCE OF THIS SOFTWARE.
|
vllm/lib/python3.10/site-packages/et_xmlfile-2.0.0.dist-info/LICENCE.rst
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
This software is under the MIT Licence
|
| 2 |
+
======================================
|
| 3 |
+
|
| 4 |
+
Copyright (c) 2010 openpyxl
|
| 5 |
+
|
| 6 |
+
Permission is hereby granted, free of charge, to any person obtaining a
|
| 7 |
+
copy of this software and associated documentation files (the
|
| 8 |
+
"Software"), to deal in the Software without restriction, including
|
| 9 |
+
without limitation the rights to use, copy, modify, merge, publish,
|
| 10 |
+
distribute, sublicense, and/or sell copies of the Software, and to
|
| 11 |
+
permit persons to whom the Software is furnished to do so, subject to
|
| 12 |
+
the following conditions:
|
| 13 |
+
|
| 14 |
+
The above copyright notice and this permission notice shall be included
|
| 15 |
+
in all copies or substantial portions of the Software.
|
| 16 |
+
|
| 17 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
| 18 |
+
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
| 19 |
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
| 20 |
+
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
| 21 |
+
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
| 22 |
+
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
| 23 |
+
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
vllm/lib/python3.10/site-packages/et_xmlfile-2.0.0.dist-info/RECORD
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
et_xmlfile-2.0.0.dist-info/AUTHORS.txt,sha256=fwOAKepUY2Bd0ieNMACZo4G86ekN2oPMqyBCNGtsgQc,82
|
| 2 |
+
et_xmlfile-2.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 3 |
+
et_xmlfile-2.0.0.dist-info/LICENCE.python,sha256=TM2q68D0S4NyDsA5m7erMprc4GfdYvc8VTWi3AViirI,14688
|
| 4 |
+
et_xmlfile-2.0.0.dist-info/LICENCE.rst,sha256=DIS7QvXTZ-Xr-fwt3jWxYUHfXuD9wYklCFi8bFVg9p4,1131
|
| 5 |
+
et_xmlfile-2.0.0.dist-info/METADATA,sha256=DpfX6pCe0PvgPYi8i29YZ3zuGwe9M1PONhzSQFkVIE4,2711
|
| 6 |
+
et_xmlfile-2.0.0.dist-info/RECORD,,
|
| 7 |
+
et_xmlfile-2.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 8 |
+
et_xmlfile-2.0.0.dist-info/WHEEL,sha256=HiCZjzuy6Dw0hdX5R3LCFPDmFS4BWl8H-8W39XfmgX4,91
|
| 9 |
+
et_xmlfile-2.0.0.dist-info/top_level.txt,sha256=34-74d5NNARgTsPxCMta5o28XpBNmSN0iCZhtmx2Fk8,11
|
| 10 |
+
et_xmlfile/__init__.py,sha256=AQ4_2cNUEyUHlHo-Y3Gd6-8S_6eyKd55jYO4eh23UHw,228
|
| 11 |
+
et_xmlfile/__pycache__/__init__.cpython-310.pyc,,
|
| 12 |
+
et_xmlfile/__pycache__/incremental_tree.cpython-310.pyc,,
|
| 13 |
+
et_xmlfile/__pycache__/xmlfile.cpython-310.pyc,,
|
| 14 |
+
et_xmlfile/incremental_tree.py,sha256=lX4VStfzUNK0jtrVsvshPENu7E_zQirglkyRtzGDwEg,34534
|
| 15 |
+
et_xmlfile/xmlfile.py,sha256=6QdxBq2P0Cf35R-oyXjLl5wOItfJJ4Yy6AlIF9RX7Bg,4886
|
vllm/lib/python3.10/site-packages/et_xmlfile-2.0.0.dist-info/REQUESTED
ADDED
|
File without changes
|
vllm/lib/python3.10/site-packages/pip/__init__.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List, Optional
|
| 2 |
+
|
| 3 |
+
__version__ = "25.0.1"
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def main(args: Optional[List[str]] = None) -> int:
|
| 7 |
+
"""This is an internal API only meant for use by pip's own console scripts.
|
| 8 |
+
|
| 9 |
+
For additional details, see https://github.com/pypa/pip/issues/7498.
|
| 10 |
+
"""
|
| 11 |
+
from pip._internal.utils.entrypoints import _wrapper
|
| 12 |
+
|
| 13 |
+
return _wrapper(args)
|
vllm/lib/python3.10/site-packages/pip/__main__.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sys
|
| 3 |
+
|
| 4 |
+
# Remove '' and current working directory from the first entry
|
| 5 |
+
# of sys.path, if present to avoid using current directory
|
| 6 |
+
# in pip commands check, freeze, install, list and show,
|
| 7 |
+
# when invoked as python -m pip <command>
|
| 8 |
+
if sys.path[0] in ("", os.getcwd()):
|
| 9 |
+
sys.path.pop(0)
|
| 10 |
+
|
| 11 |
+
# If we are running from a wheel, add the wheel to sys.path
|
| 12 |
+
# This allows the usage python pip-*.whl/pip install pip-*.whl
|
| 13 |
+
if __package__ == "":
|
| 14 |
+
# __file__ is pip-*.whl/pip/__main__.py
|
| 15 |
+
# first dirname call strips of '/__main__.py', second strips off '/pip'
|
| 16 |
+
# Resulting path is the name of the wheel itself
|
| 17 |
+
# Add that to sys.path so we can import pip
|
| 18 |
+
path = os.path.dirname(os.path.dirname(__file__))
|
| 19 |
+
sys.path.insert(0, path)
|
| 20 |
+
|
| 21 |
+
if __name__ == "__main__":
|
| 22 |
+
from pip._internal.cli.main import main as _main
|
| 23 |
+
|
| 24 |
+
sys.exit(_main())
|
vllm/lib/python3.10/site-packages/pip/__pip-runner__.py
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Execute exactly this copy of pip, within a different environment.
|
| 2 |
+
|
| 3 |
+
This file is named as it is, to ensure that this module can't be imported via
|
| 4 |
+
an import statement.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
# /!\ This version compatibility check section must be Python 2 compatible. /!\
|
| 8 |
+
|
| 9 |
+
import sys
|
| 10 |
+
|
| 11 |
+
# Copied from pyproject.toml
|
| 12 |
+
PYTHON_REQUIRES = (3, 8)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def version_str(version): # type: ignore
|
| 16 |
+
return ".".join(str(v) for v in version)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
if sys.version_info[:2] < PYTHON_REQUIRES:
|
| 20 |
+
raise SystemExit(
|
| 21 |
+
"This version of pip does not support python {} (requires >={}).".format(
|
| 22 |
+
version_str(sys.version_info[:2]), version_str(PYTHON_REQUIRES)
|
| 23 |
+
)
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
# From here on, we can use Python 3 features, but the syntax must remain
|
| 27 |
+
# Python 2 compatible.
|
| 28 |
+
|
| 29 |
+
import runpy # noqa: E402
|
| 30 |
+
from importlib.machinery import PathFinder # noqa: E402
|
| 31 |
+
from os.path import dirname # noqa: E402
|
| 32 |
+
|
| 33 |
+
PIP_SOURCES_ROOT = dirname(dirname(__file__))
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class PipImportRedirectingFinder:
|
| 37 |
+
@classmethod
|
| 38 |
+
def find_spec(self, fullname, path=None, target=None): # type: ignore
|
| 39 |
+
if fullname != "pip":
|
| 40 |
+
return None
|
| 41 |
+
|
| 42 |
+
spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target)
|
| 43 |
+
assert spec, (PIP_SOURCES_ROOT, fullname)
|
| 44 |
+
return spec
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
sys.meta_path.insert(0, PipImportRedirectingFinder())
|
| 48 |
+
|
| 49 |
+
assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module"
|
| 50 |
+
runpy.run_module("pip", run_name="__main__", alter_sys=True)
|
vllm/lib/python3.10/site-packages/pip/_internal/__init__.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List, Optional
|
| 2 |
+
|
| 3 |
+
from pip._internal.utils import _log
|
| 4 |
+
|
| 5 |
+
# init_logging() must be called before any call to logging.getLogger()
|
| 6 |
+
# which happens at import of most modules.
|
| 7 |
+
_log.init_logging()
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def main(args: Optional[List[str]] = None) -> int:
|
| 11 |
+
"""This is preserved for old console scripts that may still be referencing
|
| 12 |
+
it.
|
| 13 |
+
|
| 14 |
+
For additional details, see https://github.com/pypa/pip/issues/7498.
|
| 15 |
+
"""
|
| 16 |
+
from pip._internal.utils.entrypoints import _wrapper
|
| 17 |
+
|
| 18 |
+
return _wrapper(args)
|
vllm/lib/python3.10/site-packages/pip/_internal/build_env.py
ADDED
|
@@ -0,0 +1,322 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Build Environment used for isolation during sdist building
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import logging
|
| 5 |
+
import os
|
| 6 |
+
import pathlib
|
| 7 |
+
import site
|
| 8 |
+
import sys
|
| 9 |
+
import textwrap
|
| 10 |
+
from collections import OrderedDict
|
| 11 |
+
from types import TracebackType
|
| 12 |
+
from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type, Union
|
| 13 |
+
|
| 14 |
+
from pip._vendor.packaging.version import Version
|
| 15 |
+
|
| 16 |
+
from pip import __file__ as pip_location
|
| 17 |
+
from pip._internal.cli.spinners import open_spinner
|
| 18 |
+
from pip._internal.locations import get_platlib, get_purelib, get_scheme
|
| 19 |
+
from pip._internal.metadata import get_default_environment, get_environment
|
| 20 |
+
from pip._internal.utils.logging import VERBOSE
|
| 21 |
+
from pip._internal.utils.packaging import get_requirement
|
| 22 |
+
from pip._internal.utils.subprocess import call_subprocess
|
| 23 |
+
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
| 24 |
+
|
| 25 |
+
if TYPE_CHECKING:
|
| 26 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 27 |
+
|
| 28 |
+
logger = logging.getLogger(__name__)
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def _dedup(a: str, b: str) -> Union[Tuple[str], Tuple[str, str]]:
|
| 32 |
+
return (a, b) if a != b else (a,)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class _Prefix:
|
| 36 |
+
def __init__(self, path: str) -> None:
|
| 37 |
+
self.path = path
|
| 38 |
+
self.setup = False
|
| 39 |
+
scheme = get_scheme("", prefix=path)
|
| 40 |
+
self.bin_dir = scheme.scripts
|
| 41 |
+
self.lib_dirs = _dedup(scheme.purelib, scheme.platlib)
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def get_runnable_pip() -> str:
|
| 45 |
+
"""Get a file to pass to a Python executable, to run the currently-running pip.
|
| 46 |
+
|
| 47 |
+
This is used to run a pip subprocess, for installing requirements into the build
|
| 48 |
+
environment.
|
| 49 |
+
"""
|
| 50 |
+
source = pathlib.Path(pip_location).resolve().parent
|
| 51 |
+
|
| 52 |
+
if not source.is_dir():
|
| 53 |
+
# This would happen if someone is using pip from inside a zip file. In that
|
| 54 |
+
# case, we can use that directly.
|
| 55 |
+
return str(source)
|
| 56 |
+
|
| 57 |
+
return os.fsdecode(source / "__pip-runner__.py")
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def _get_system_sitepackages() -> Set[str]:
|
| 61 |
+
"""Get system site packages
|
| 62 |
+
|
| 63 |
+
Usually from site.getsitepackages,
|
| 64 |
+
but fallback on `get_purelib()/get_platlib()` if unavailable
|
| 65 |
+
(e.g. in a virtualenv created by virtualenv<20)
|
| 66 |
+
|
| 67 |
+
Returns normalized set of strings.
|
| 68 |
+
"""
|
| 69 |
+
if hasattr(site, "getsitepackages"):
|
| 70 |
+
system_sites = site.getsitepackages()
|
| 71 |
+
else:
|
| 72 |
+
# virtualenv < 20 overwrites site.py without getsitepackages
|
| 73 |
+
# fallback on get_purelib/get_platlib.
|
| 74 |
+
# this is known to miss things, but shouldn't in the cases
|
| 75 |
+
# where getsitepackages() has been removed (inside a virtualenv)
|
| 76 |
+
system_sites = [get_purelib(), get_platlib()]
|
| 77 |
+
return {os.path.normcase(path) for path in system_sites}
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class BuildEnvironment:
|
| 81 |
+
"""Creates and manages an isolated environment to install build deps"""
|
| 82 |
+
|
| 83 |
+
def __init__(self) -> None:
|
| 84 |
+
temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True)
|
| 85 |
+
|
| 86 |
+
self._prefixes = OrderedDict(
|
| 87 |
+
(name, _Prefix(os.path.join(temp_dir.path, name)))
|
| 88 |
+
for name in ("normal", "overlay")
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
self._bin_dirs: List[str] = []
|
| 92 |
+
self._lib_dirs: List[str] = []
|
| 93 |
+
for prefix in reversed(list(self._prefixes.values())):
|
| 94 |
+
self._bin_dirs.append(prefix.bin_dir)
|
| 95 |
+
self._lib_dirs.extend(prefix.lib_dirs)
|
| 96 |
+
|
| 97 |
+
# Customize site to:
|
| 98 |
+
# - ensure .pth files are honored
|
| 99 |
+
# - prevent access to system site packages
|
| 100 |
+
system_sites = _get_system_sitepackages()
|
| 101 |
+
|
| 102 |
+
self._site_dir = os.path.join(temp_dir.path, "site")
|
| 103 |
+
if not os.path.exists(self._site_dir):
|
| 104 |
+
os.mkdir(self._site_dir)
|
| 105 |
+
with open(
|
| 106 |
+
os.path.join(self._site_dir, "sitecustomize.py"), "w", encoding="utf-8"
|
| 107 |
+
) as fp:
|
| 108 |
+
fp.write(
|
| 109 |
+
textwrap.dedent(
|
| 110 |
+
"""
|
| 111 |
+
import os, site, sys
|
| 112 |
+
|
| 113 |
+
# First, drop system-sites related paths.
|
| 114 |
+
original_sys_path = sys.path[:]
|
| 115 |
+
known_paths = set()
|
| 116 |
+
for path in {system_sites!r}:
|
| 117 |
+
site.addsitedir(path, known_paths=known_paths)
|
| 118 |
+
system_paths = set(
|
| 119 |
+
os.path.normcase(path)
|
| 120 |
+
for path in sys.path[len(original_sys_path):]
|
| 121 |
+
)
|
| 122 |
+
original_sys_path = [
|
| 123 |
+
path for path in original_sys_path
|
| 124 |
+
if os.path.normcase(path) not in system_paths
|
| 125 |
+
]
|
| 126 |
+
sys.path = original_sys_path
|
| 127 |
+
|
| 128 |
+
# Second, add lib directories.
|
| 129 |
+
# ensuring .pth file are processed.
|
| 130 |
+
for path in {lib_dirs!r}:
|
| 131 |
+
assert not path in sys.path
|
| 132 |
+
site.addsitedir(path)
|
| 133 |
+
"""
|
| 134 |
+
).format(system_sites=system_sites, lib_dirs=self._lib_dirs)
|
| 135 |
+
)
|
| 136 |
+
|
| 137 |
+
def __enter__(self) -> None:
|
| 138 |
+
self._save_env = {
|
| 139 |
+
name: os.environ.get(name, None)
|
| 140 |
+
for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH")
|
| 141 |
+
}
|
| 142 |
+
|
| 143 |
+
path = self._bin_dirs[:]
|
| 144 |
+
old_path = self._save_env["PATH"]
|
| 145 |
+
if old_path:
|
| 146 |
+
path.extend(old_path.split(os.pathsep))
|
| 147 |
+
|
| 148 |
+
pythonpath = [self._site_dir]
|
| 149 |
+
|
| 150 |
+
os.environ.update(
|
| 151 |
+
{
|
| 152 |
+
"PATH": os.pathsep.join(path),
|
| 153 |
+
"PYTHONNOUSERSITE": "1",
|
| 154 |
+
"PYTHONPATH": os.pathsep.join(pythonpath),
|
| 155 |
+
}
|
| 156 |
+
)
|
| 157 |
+
|
| 158 |
+
def __exit__(
|
| 159 |
+
self,
|
| 160 |
+
exc_type: Optional[Type[BaseException]],
|
| 161 |
+
exc_val: Optional[BaseException],
|
| 162 |
+
exc_tb: Optional[TracebackType],
|
| 163 |
+
) -> None:
|
| 164 |
+
for varname, old_value in self._save_env.items():
|
| 165 |
+
if old_value is None:
|
| 166 |
+
os.environ.pop(varname, None)
|
| 167 |
+
else:
|
| 168 |
+
os.environ[varname] = old_value
|
| 169 |
+
|
| 170 |
+
def check_requirements(
|
| 171 |
+
self, reqs: Iterable[str]
|
| 172 |
+
) -> Tuple[Set[Tuple[str, str]], Set[str]]:
|
| 173 |
+
"""Return 2 sets:
|
| 174 |
+
- conflicting requirements: set of (installed, wanted) reqs tuples
|
| 175 |
+
- missing requirements: set of reqs
|
| 176 |
+
"""
|
| 177 |
+
missing = set()
|
| 178 |
+
conflicting = set()
|
| 179 |
+
if reqs:
|
| 180 |
+
env = (
|
| 181 |
+
get_environment(self._lib_dirs)
|
| 182 |
+
if hasattr(self, "_lib_dirs")
|
| 183 |
+
else get_default_environment()
|
| 184 |
+
)
|
| 185 |
+
for req_str in reqs:
|
| 186 |
+
req = get_requirement(req_str)
|
| 187 |
+
# We're explicitly evaluating with an empty extra value, since build
|
| 188 |
+
# environments are not provided any mechanism to select specific extras.
|
| 189 |
+
if req.marker is not None and not req.marker.evaluate({"extra": ""}):
|
| 190 |
+
continue
|
| 191 |
+
dist = env.get_distribution(req.name)
|
| 192 |
+
if not dist:
|
| 193 |
+
missing.add(req_str)
|
| 194 |
+
continue
|
| 195 |
+
if isinstance(dist.version, Version):
|
| 196 |
+
installed_req_str = f"{req.name}=={dist.version}"
|
| 197 |
+
else:
|
| 198 |
+
installed_req_str = f"{req.name}==={dist.version}"
|
| 199 |
+
if not req.specifier.contains(dist.version, prereleases=True):
|
| 200 |
+
conflicting.add((installed_req_str, req_str))
|
| 201 |
+
# FIXME: Consider direct URL?
|
| 202 |
+
return conflicting, missing
|
| 203 |
+
|
| 204 |
+
def install_requirements(
|
| 205 |
+
self,
|
| 206 |
+
finder: "PackageFinder",
|
| 207 |
+
requirements: Iterable[str],
|
| 208 |
+
prefix_as_string: str,
|
| 209 |
+
*,
|
| 210 |
+
kind: str,
|
| 211 |
+
) -> None:
|
| 212 |
+
prefix = self._prefixes[prefix_as_string]
|
| 213 |
+
assert not prefix.setup
|
| 214 |
+
prefix.setup = True
|
| 215 |
+
if not requirements:
|
| 216 |
+
return
|
| 217 |
+
self._install_requirements(
|
| 218 |
+
get_runnable_pip(),
|
| 219 |
+
finder,
|
| 220 |
+
requirements,
|
| 221 |
+
prefix,
|
| 222 |
+
kind=kind,
|
| 223 |
+
)
|
| 224 |
+
|
| 225 |
+
@staticmethod
|
| 226 |
+
def _install_requirements(
|
| 227 |
+
pip_runnable: str,
|
| 228 |
+
finder: "PackageFinder",
|
| 229 |
+
requirements: Iterable[str],
|
| 230 |
+
prefix: _Prefix,
|
| 231 |
+
*,
|
| 232 |
+
kind: str,
|
| 233 |
+
) -> None:
|
| 234 |
+
args: List[str] = [
|
| 235 |
+
sys.executable,
|
| 236 |
+
pip_runnable,
|
| 237 |
+
"install",
|
| 238 |
+
"--ignore-installed",
|
| 239 |
+
"--no-user",
|
| 240 |
+
"--prefix",
|
| 241 |
+
prefix.path,
|
| 242 |
+
"--no-warn-script-location",
|
| 243 |
+
"--disable-pip-version-check",
|
| 244 |
+
# The prefix specified two lines above, thus
|
| 245 |
+
# target from config file or env var should be ignored
|
| 246 |
+
"--target",
|
| 247 |
+
"",
|
| 248 |
+
]
|
| 249 |
+
if logger.getEffectiveLevel() <= logging.DEBUG:
|
| 250 |
+
args.append("-vv")
|
| 251 |
+
elif logger.getEffectiveLevel() <= VERBOSE:
|
| 252 |
+
args.append("-v")
|
| 253 |
+
for format_control in ("no_binary", "only_binary"):
|
| 254 |
+
formats = getattr(finder.format_control, format_control)
|
| 255 |
+
args.extend(
|
| 256 |
+
(
|
| 257 |
+
"--" + format_control.replace("_", "-"),
|
| 258 |
+
",".join(sorted(formats or {":none:"})),
|
| 259 |
+
)
|
| 260 |
+
)
|
| 261 |
+
|
| 262 |
+
index_urls = finder.index_urls
|
| 263 |
+
if index_urls:
|
| 264 |
+
args.extend(["-i", index_urls[0]])
|
| 265 |
+
for extra_index in index_urls[1:]:
|
| 266 |
+
args.extend(["--extra-index-url", extra_index])
|
| 267 |
+
else:
|
| 268 |
+
args.append("--no-index")
|
| 269 |
+
for link in finder.find_links:
|
| 270 |
+
args.extend(["--find-links", link])
|
| 271 |
+
|
| 272 |
+
if finder.proxy:
|
| 273 |
+
args.extend(["--proxy", finder.proxy])
|
| 274 |
+
for host in finder.trusted_hosts:
|
| 275 |
+
args.extend(["--trusted-host", host])
|
| 276 |
+
if finder.custom_cert:
|
| 277 |
+
args.extend(["--cert", finder.custom_cert])
|
| 278 |
+
if finder.client_cert:
|
| 279 |
+
args.extend(["--client-cert", finder.client_cert])
|
| 280 |
+
if finder.allow_all_prereleases:
|
| 281 |
+
args.append("--pre")
|
| 282 |
+
if finder.prefer_binary:
|
| 283 |
+
args.append("--prefer-binary")
|
| 284 |
+
args.append("--")
|
| 285 |
+
args.extend(requirements)
|
| 286 |
+
with open_spinner(f"Installing {kind}") as spinner:
|
| 287 |
+
call_subprocess(
|
| 288 |
+
args,
|
| 289 |
+
command_desc=f"pip subprocess to install {kind}",
|
| 290 |
+
spinner=spinner,
|
| 291 |
+
)
|
| 292 |
+
|
| 293 |
+
|
| 294 |
+
class NoOpBuildEnvironment(BuildEnvironment):
|
| 295 |
+
"""A no-op drop-in replacement for BuildEnvironment"""
|
| 296 |
+
|
| 297 |
+
def __init__(self) -> None:
|
| 298 |
+
pass
|
| 299 |
+
|
| 300 |
+
def __enter__(self) -> None:
|
| 301 |
+
pass
|
| 302 |
+
|
| 303 |
+
def __exit__(
|
| 304 |
+
self,
|
| 305 |
+
exc_type: Optional[Type[BaseException]],
|
| 306 |
+
exc_val: Optional[BaseException],
|
| 307 |
+
exc_tb: Optional[TracebackType],
|
| 308 |
+
) -> None:
|
| 309 |
+
pass
|
| 310 |
+
|
| 311 |
+
def cleanup(self) -> None:
|
| 312 |
+
pass
|
| 313 |
+
|
| 314 |
+
def install_requirements(
|
| 315 |
+
self,
|
| 316 |
+
finder: "PackageFinder",
|
| 317 |
+
requirements: Iterable[str],
|
| 318 |
+
prefix_as_string: str,
|
| 319 |
+
*,
|
| 320 |
+
kind: str,
|
| 321 |
+
) -> None:
|
| 322 |
+
raise NotImplementedError()
|
vllm/lib/python3.10/site-packages/pip/_internal/cache.py
ADDED
|
@@ -0,0 +1,290 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Cache Management
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import hashlib
|
| 5 |
+
import json
|
| 6 |
+
import logging
|
| 7 |
+
import os
|
| 8 |
+
from pathlib import Path
|
| 9 |
+
from typing import Any, Dict, List, Optional
|
| 10 |
+
|
| 11 |
+
from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
|
| 12 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 13 |
+
|
| 14 |
+
from pip._internal.exceptions import InvalidWheelFilename
|
| 15 |
+
from pip._internal.models.direct_url import DirectUrl
|
| 16 |
+
from pip._internal.models.link import Link
|
| 17 |
+
from pip._internal.models.wheel import Wheel
|
| 18 |
+
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
| 19 |
+
from pip._internal.utils.urls import path_to_url
|
| 20 |
+
|
| 21 |
+
logger = logging.getLogger(__name__)
|
| 22 |
+
|
| 23 |
+
ORIGIN_JSON_NAME = "origin.json"
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def _hash_dict(d: Dict[str, str]) -> str:
|
| 27 |
+
"""Return a stable sha224 of a dictionary."""
|
| 28 |
+
s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
|
| 29 |
+
return hashlib.sha224(s.encode("ascii")).hexdigest()
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class Cache:
|
| 33 |
+
"""An abstract class - provides cache directories for data from links
|
| 34 |
+
|
| 35 |
+
:param cache_dir: The root of the cache.
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
def __init__(self, cache_dir: str) -> None:
|
| 39 |
+
super().__init__()
|
| 40 |
+
assert not cache_dir or os.path.isabs(cache_dir)
|
| 41 |
+
self.cache_dir = cache_dir or None
|
| 42 |
+
|
| 43 |
+
def _get_cache_path_parts(self, link: Link) -> List[str]:
|
| 44 |
+
"""Get parts of part that must be os.path.joined with cache_dir"""
|
| 45 |
+
|
| 46 |
+
# We want to generate an url to use as our cache key, we don't want to
|
| 47 |
+
# just reuse the URL because it might have other items in the fragment
|
| 48 |
+
# and we don't care about those.
|
| 49 |
+
key_parts = {"url": link.url_without_fragment}
|
| 50 |
+
if link.hash_name is not None and link.hash is not None:
|
| 51 |
+
key_parts[link.hash_name] = link.hash
|
| 52 |
+
if link.subdirectory_fragment:
|
| 53 |
+
key_parts["subdirectory"] = link.subdirectory_fragment
|
| 54 |
+
|
| 55 |
+
# Include interpreter name, major and minor version in cache key
|
| 56 |
+
# to cope with ill-behaved sdists that build a different wheel
|
| 57 |
+
# depending on the python version their setup.py is being run on,
|
| 58 |
+
# and don't encode the difference in compatibility tags.
|
| 59 |
+
# https://github.com/pypa/pip/issues/7296
|
| 60 |
+
key_parts["interpreter_name"] = interpreter_name()
|
| 61 |
+
key_parts["interpreter_version"] = interpreter_version()
|
| 62 |
+
|
| 63 |
+
# Encode our key url with sha224, we'll use this because it has similar
|
| 64 |
+
# security properties to sha256, but with a shorter total output (and
|
| 65 |
+
# thus less secure). However the differences don't make a lot of
|
| 66 |
+
# difference for our use case here.
|
| 67 |
+
hashed = _hash_dict(key_parts)
|
| 68 |
+
|
| 69 |
+
# We want to nest the directories some to prevent having a ton of top
|
| 70 |
+
# level directories where we might run out of sub directories on some
|
| 71 |
+
# FS.
|
| 72 |
+
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
|
| 73 |
+
|
| 74 |
+
return parts
|
| 75 |
+
|
| 76 |
+
def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]:
|
| 77 |
+
can_not_cache = not self.cache_dir or not canonical_package_name or not link
|
| 78 |
+
if can_not_cache:
|
| 79 |
+
return []
|
| 80 |
+
|
| 81 |
+
path = self.get_path_for_link(link)
|
| 82 |
+
if os.path.isdir(path):
|
| 83 |
+
return [(candidate, path) for candidate in os.listdir(path)]
|
| 84 |
+
return []
|
| 85 |
+
|
| 86 |
+
def get_path_for_link(self, link: Link) -> str:
|
| 87 |
+
"""Return a directory to store cached items in for link."""
|
| 88 |
+
raise NotImplementedError()
|
| 89 |
+
|
| 90 |
+
def get(
|
| 91 |
+
self,
|
| 92 |
+
link: Link,
|
| 93 |
+
package_name: Optional[str],
|
| 94 |
+
supported_tags: List[Tag],
|
| 95 |
+
) -> Link:
|
| 96 |
+
"""Returns a link to a cached item if it exists, otherwise returns the
|
| 97 |
+
passed link.
|
| 98 |
+
"""
|
| 99 |
+
raise NotImplementedError()
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
class SimpleWheelCache(Cache):
|
| 103 |
+
"""A cache of wheels for future installs."""
|
| 104 |
+
|
| 105 |
+
def __init__(self, cache_dir: str) -> None:
|
| 106 |
+
super().__init__(cache_dir)
|
| 107 |
+
|
| 108 |
+
def get_path_for_link(self, link: Link) -> str:
|
| 109 |
+
"""Return a directory to store cached wheels for link
|
| 110 |
+
|
| 111 |
+
Because there are M wheels for any one sdist, we provide a directory
|
| 112 |
+
to cache them in, and then consult that directory when looking up
|
| 113 |
+
cache hits.
|
| 114 |
+
|
| 115 |
+
We only insert things into the cache if they have plausible version
|
| 116 |
+
numbers, so that we don't contaminate the cache with things that were
|
| 117 |
+
not unique. E.g. ./package might have dozens of installs done for it
|
| 118 |
+
and build a version of 0.0...and if we built and cached a wheel, we'd
|
| 119 |
+
end up using the same wheel even if the source has been edited.
|
| 120 |
+
|
| 121 |
+
:param link: The link of the sdist for which this will cache wheels.
|
| 122 |
+
"""
|
| 123 |
+
parts = self._get_cache_path_parts(link)
|
| 124 |
+
assert self.cache_dir
|
| 125 |
+
# Store wheels within the root cache_dir
|
| 126 |
+
return os.path.join(self.cache_dir, "wheels", *parts)
|
| 127 |
+
|
| 128 |
+
def get(
|
| 129 |
+
self,
|
| 130 |
+
link: Link,
|
| 131 |
+
package_name: Optional[str],
|
| 132 |
+
supported_tags: List[Tag],
|
| 133 |
+
) -> Link:
|
| 134 |
+
candidates = []
|
| 135 |
+
|
| 136 |
+
if not package_name:
|
| 137 |
+
return link
|
| 138 |
+
|
| 139 |
+
canonical_package_name = canonicalize_name(package_name)
|
| 140 |
+
for wheel_name, wheel_dir in self._get_candidates(link, canonical_package_name):
|
| 141 |
+
try:
|
| 142 |
+
wheel = Wheel(wheel_name)
|
| 143 |
+
except InvalidWheelFilename:
|
| 144 |
+
continue
|
| 145 |
+
if canonicalize_name(wheel.name) != canonical_package_name:
|
| 146 |
+
logger.debug(
|
| 147 |
+
"Ignoring cached wheel %s for %s as it "
|
| 148 |
+
"does not match the expected distribution name %s.",
|
| 149 |
+
wheel_name,
|
| 150 |
+
link,
|
| 151 |
+
package_name,
|
| 152 |
+
)
|
| 153 |
+
continue
|
| 154 |
+
if not wheel.supported(supported_tags):
|
| 155 |
+
# Built for a different python/arch/etc
|
| 156 |
+
continue
|
| 157 |
+
candidates.append(
|
| 158 |
+
(
|
| 159 |
+
wheel.support_index_min(supported_tags),
|
| 160 |
+
wheel_name,
|
| 161 |
+
wheel_dir,
|
| 162 |
+
)
|
| 163 |
+
)
|
| 164 |
+
|
| 165 |
+
if not candidates:
|
| 166 |
+
return link
|
| 167 |
+
|
| 168 |
+
_, wheel_name, wheel_dir = min(candidates)
|
| 169 |
+
return Link(path_to_url(os.path.join(wheel_dir, wheel_name)))
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
class EphemWheelCache(SimpleWheelCache):
|
| 173 |
+
"""A SimpleWheelCache that creates it's own temporary cache directory"""
|
| 174 |
+
|
| 175 |
+
def __init__(self) -> None:
|
| 176 |
+
self._temp_dir = TempDirectory(
|
| 177 |
+
kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
|
| 178 |
+
globally_managed=True,
|
| 179 |
+
)
|
| 180 |
+
|
| 181 |
+
super().__init__(self._temp_dir.path)
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
class CacheEntry:
|
| 185 |
+
def __init__(
|
| 186 |
+
self,
|
| 187 |
+
link: Link,
|
| 188 |
+
persistent: bool,
|
| 189 |
+
):
|
| 190 |
+
self.link = link
|
| 191 |
+
self.persistent = persistent
|
| 192 |
+
self.origin: Optional[DirectUrl] = None
|
| 193 |
+
origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME
|
| 194 |
+
if origin_direct_url_path.exists():
|
| 195 |
+
try:
|
| 196 |
+
self.origin = DirectUrl.from_json(
|
| 197 |
+
origin_direct_url_path.read_text(encoding="utf-8")
|
| 198 |
+
)
|
| 199 |
+
except Exception as e:
|
| 200 |
+
logger.warning(
|
| 201 |
+
"Ignoring invalid cache entry origin file %s for %s (%s)",
|
| 202 |
+
origin_direct_url_path,
|
| 203 |
+
link.filename,
|
| 204 |
+
e,
|
| 205 |
+
)
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
class WheelCache(Cache):
|
| 209 |
+
"""Wraps EphemWheelCache and SimpleWheelCache into a single Cache
|
| 210 |
+
|
| 211 |
+
This Cache allows for gracefully degradation, using the ephem wheel cache
|
| 212 |
+
when a certain link is not found in the simple wheel cache first.
|
| 213 |
+
"""
|
| 214 |
+
|
| 215 |
+
def __init__(self, cache_dir: str) -> None:
|
| 216 |
+
super().__init__(cache_dir)
|
| 217 |
+
self._wheel_cache = SimpleWheelCache(cache_dir)
|
| 218 |
+
self._ephem_cache = EphemWheelCache()
|
| 219 |
+
|
| 220 |
+
def get_path_for_link(self, link: Link) -> str:
|
| 221 |
+
return self._wheel_cache.get_path_for_link(link)
|
| 222 |
+
|
| 223 |
+
def get_ephem_path_for_link(self, link: Link) -> str:
|
| 224 |
+
return self._ephem_cache.get_path_for_link(link)
|
| 225 |
+
|
| 226 |
+
def get(
|
| 227 |
+
self,
|
| 228 |
+
link: Link,
|
| 229 |
+
package_name: Optional[str],
|
| 230 |
+
supported_tags: List[Tag],
|
| 231 |
+
) -> Link:
|
| 232 |
+
cache_entry = self.get_cache_entry(link, package_name, supported_tags)
|
| 233 |
+
if cache_entry is None:
|
| 234 |
+
return link
|
| 235 |
+
return cache_entry.link
|
| 236 |
+
|
| 237 |
+
def get_cache_entry(
|
| 238 |
+
self,
|
| 239 |
+
link: Link,
|
| 240 |
+
package_name: Optional[str],
|
| 241 |
+
supported_tags: List[Tag],
|
| 242 |
+
) -> Optional[CacheEntry]:
|
| 243 |
+
"""Returns a CacheEntry with a link to a cached item if it exists or
|
| 244 |
+
None. The cache entry indicates if the item was found in the persistent
|
| 245 |
+
or ephemeral cache.
|
| 246 |
+
"""
|
| 247 |
+
retval = self._wheel_cache.get(
|
| 248 |
+
link=link,
|
| 249 |
+
package_name=package_name,
|
| 250 |
+
supported_tags=supported_tags,
|
| 251 |
+
)
|
| 252 |
+
if retval is not link:
|
| 253 |
+
return CacheEntry(retval, persistent=True)
|
| 254 |
+
|
| 255 |
+
retval = self._ephem_cache.get(
|
| 256 |
+
link=link,
|
| 257 |
+
package_name=package_name,
|
| 258 |
+
supported_tags=supported_tags,
|
| 259 |
+
)
|
| 260 |
+
if retval is not link:
|
| 261 |
+
return CacheEntry(retval, persistent=False)
|
| 262 |
+
|
| 263 |
+
return None
|
| 264 |
+
|
| 265 |
+
@staticmethod
|
| 266 |
+
def record_download_origin(cache_dir: str, download_info: DirectUrl) -> None:
|
| 267 |
+
origin_path = Path(cache_dir) / ORIGIN_JSON_NAME
|
| 268 |
+
if origin_path.exists():
|
| 269 |
+
try:
|
| 270 |
+
origin = DirectUrl.from_json(origin_path.read_text(encoding="utf-8"))
|
| 271 |
+
except Exception as e:
|
| 272 |
+
logger.warning(
|
| 273 |
+
"Could not read origin file %s in cache entry (%s). "
|
| 274 |
+
"Will attempt to overwrite it.",
|
| 275 |
+
origin_path,
|
| 276 |
+
e,
|
| 277 |
+
)
|
| 278 |
+
else:
|
| 279 |
+
# TODO: use DirectUrl.equivalent when
|
| 280 |
+
# https://github.com/pypa/pip/pull/10564 is merged.
|
| 281 |
+
if origin.url != download_info.url:
|
| 282 |
+
logger.warning(
|
| 283 |
+
"Origin URL %s in cache entry %s does not match download URL "
|
| 284 |
+
"%s. This is likely a pip bug or a cache corruption issue. "
|
| 285 |
+
"Will overwrite it with the new value.",
|
| 286 |
+
origin.url,
|
| 287 |
+
cache_dir,
|
| 288 |
+
download_info.url,
|
| 289 |
+
)
|
| 290 |
+
origin_path.write_text(download_info.to_json(), encoding="utf-8")
|
vllm/lib/python3.10/site-packages/pip/_internal/configuration.py
ADDED
|
@@ -0,0 +1,383 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Configuration management setup
|
| 2 |
+
|
| 3 |
+
Some terminology:
|
| 4 |
+
- name
|
| 5 |
+
As written in config files.
|
| 6 |
+
- value
|
| 7 |
+
Value associated with a name
|
| 8 |
+
- key
|
| 9 |
+
Name combined with it's section (section.name)
|
| 10 |
+
- variant
|
| 11 |
+
A single word describing where the configuration key-value pair came from
|
| 12 |
+
"""
|
| 13 |
+
|
| 14 |
+
import configparser
|
| 15 |
+
import locale
|
| 16 |
+
import os
|
| 17 |
+
import sys
|
| 18 |
+
from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple
|
| 19 |
+
|
| 20 |
+
from pip._internal.exceptions import (
|
| 21 |
+
ConfigurationError,
|
| 22 |
+
ConfigurationFileCouldNotBeLoaded,
|
| 23 |
+
)
|
| 24 |
+
from pip._internal.utils import appdirs
|
| 25 |
+
from pip._internal.utils.compat import WINDOWS
|
| 26 |
+
from pip._internal.utils.logging import getLogger
|
| 27 |
+
from pip._internal.utils.misc import ensure_dir, enum
|
| 28 |
+
|
| 29 |
+
RawConfigParser = configparser.RawConfigParser # Shorthand
|
| 30 |
+
Kind = NewType("Kind", str)
|
| 31 |
+
|
| 32 |
+
CONFIG_BASENAME = "pip.ini" if WINDOWS else "pip.conf"
|
| 33 |
+
ENV_NAMES_IGNORED = "version", "help"
|
| 34 |
+
|
| 35 |
+
# The kinds of configurations there are.
|
| 36 |
+
kinds = enum(
|
| 37 |
+
USER="user", # User Specific
|
| 38 |
+
GLOBAL="global", # System Wide
|
| 39 |
+
SITE="site", # [Virtual] Environment Specific
|
| 40 |
+
ENV="env", # from PIP_CONFIG_FILE
|
| 41 |
+
ENV_VAR="env-var", # from Environment Variables
|
| 42 |
+
)
|
| 43 |
+
OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
|
| 44 |
+
VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE
|
| 45 |
+
|
| 46 |
+
logger = getLogger(__name__)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
# NOTE: Maybe use the optionx attribute to normalize keynames.
|
| 50 |
+
def _normalize_name(name: str) -> str:
|
| 51 |
+
"""Make a name consistent regardless of source (environment or file)"""
|
| 52 |
+
name = name.lower().replace("_", "-")
|
| 53 |
+
if name.startswith("--"):
|
| 54 |
+
name = name[2:] # only prefer long opts
|
| 55 |
+
return name
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def _disassemble_key(name: str) -> List[str]:
|
| 59 |
+
if "." not in name:
|
| 60 |
+
error_message = (
|
| 61 |
+
"Key does not contain dot separated section and key. "
|
| 62 |
+
f"Perhaps you wanted to use 'global.{name}' instead?"
|
| 63 |
+
)
|
| 64 |
+
raise ConfigurationError(error_message)
|
| 65 |
+
return name.split(".", 1)
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def get_configuration_files() -> Dict[Kind, List[str]]:
|
| 69 |
+
global_config_files = [
|
| 70 |
+
os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip")
|
| 71 |
+
]
|
| 72 |
+
|
| 73 |
+
site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
|
| 74 |
+
legacy_config_file = os.path.join(
|
| 75 |
+
os.path.expanduser("~"),
|
| 76 |
+
"pip" if WINDOWS else ".pip",
|
| 77 |
+
CONFIG_BASENAME,
|
| 78 |
+
)
|
| 79 |
+
new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME)
|
| 80 |
+
return {
|
| 81 |
+
kinds.GLOBAL: global_config_files,
|
| 82 |
+
kinds.SITE: [site_config_file],
|
| 83 |
+
kinds.USER: [legacy_config_file, new_config_file],
|
| 84 |
+
}
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
class Configuration:
|
| 88 |
+
"""Handles management of configuration.
|
| 89 |
+
|
| 90 |
+
Provides an interface to accessing and managing configuration files.
|
| 91 |
+
|
| 92 |
+
This class converts provides an API that takes "section.key-name" style
|
| 93 |
+
keys and stores the value associated with it as "key-name" under the
|
| 94 |
+
section "section".
|
| 95 |
+
|
| 96 |
+
This allows for a clean interface wherein the both the section and the
|
| 97 |
+
key-name are preserved in an easy to manage form in the configuration files
|
| 98 |
+
and the data stored is also nice.
|
| 99 |
+
"""
|
| 100 |
+
|
| 101 |
+
def __init__(self, isolated: bool, load_only: Optional[Kind] = None) -> None:
|
| 102 |
+
super().__init__()
|
| 103 |
+
|
| 104 |
+
if load_only is not None and load_only not in VALID_LOAD_ONLY:
|
| 105 |
+
raise ConfigurationError(
|
| 106 |
+
"Got invalid value for load_only - should be one of {}".format(
|
| 107 |
+
", ".join(map(repr, VALID_LOAD_ONLY))
|
| 108 |
+
)
|
| 109 |
+
)
|
| 110 |
+
self.isolated = isolated
|
| 111 |
+
self.load_only = load_only
|
| 112 |
+
|
| 113 |
+
# Because we keep track of where we got the data from
|
| 114 |
+
self._parsers: Dict[Kind, List[Tuple[str, RawConfigParser]]] = {
|
| 115 |
+
variant: [] for variant in OVERRIDE_ORDER
|
| 116 |
+
}
|
| 117 |
+
self._config: Dict[Kind, Dict[str, Any]] = {
|
| 118 |
+
variant: {} for variant in OVERRIDE_ORDER
|
| 119 |
+
}
|
| 120 |
+
self._modified_parsers: List[Tuple[str, RawConfigParser]] = []
|
| 121 |
+
|
| 122 |
+
def load(self) -> None:
|
| 123 |
+
"""Loads configuration from configuration files and environment"""
|
| 124 |
+
self._load_config_files()
|
| 125 |
+
if not self.isolated:
|
| 126 |
+
self._load_environment_vars()
|
| 127 |
+
|
| 128 |
+
def get_file_to_edit(self) -> Optional[str]:
|
| 129 |
+
"""Returns the file with highest priority in configuration"""
|
| 130 |
+
assert self.load_only is not None, "Need to be specified a file to be editing"
|
| 131 |
+
|
| 132 |
+
try:
|
| 133 |
+
return self._get_parser_to_modify()[0]
|
| 134 |
+
except IndexError:
|
| 135 |
+
return None
|
| 136 |
+
|
| 137 |
+
def items(self) -> Iterable[Tuple[str, Any]]:
|
| 138 |
+
"""Returns key-value pairs like dict.items() representing the loaded
|
| 139 |
+
configuration
|
| 140 |
+
"""
|
| 141 |
+
return self._dictionary.items()
|
| 142 |
+
|
| 143 |
+
def get_value(self, key: str) -> Any:
|
| 144 |
+
"""Get a value from the configuration."""
|
| 145 |
+
orig_key = key
|
| 146 |
+
key = _normalize_name(key)
|
| 147 |
+
try:
|
| 148 |
+
return self._dictionary[key]
|
| 149 |
+
except KeyError:
|
| 150 |
+
# disassembling triggers a more useful error message than simply
|
| 151 |
+
# "No such key" in the case that the key isn't in the form command.option
|
| 152 |
+
_disassemble_key(key)
|
| 153 |
+
raise ConfigurationError(f"No such key - {orig_key}")
|
| 154 |
+
|
| 155 |
+
def set_value(self, key: str, value: Any) -> None:
|
| 156 |
+
"""Modify a value in the configuration."""
|
| 157 |
+
key = _normalize_name(key)
|
| 158 |
+
self._ensure_have_load_only()
|
| 159 |
+
|
| 160 |
+
assert self.load_only
|
| 161 |
+
fname, parser = self._get_parser_to_modify()
|
| 162 |
+
|
| 163 |
+
if parser is not None:
|
| 164 |
+
section, name = _disassemble_key(key)
|
| 165 |
+
|
| 166 |
+
# Modify the parser and the configuration
|
| 167 |
+
if not parser.has_section(section):
|
| 168 |
+
parser.add_section(section)
|
| 169 |
+
parser.set(section, name, value)
|
| 170 |
+
|
| 171 |
+
self._config[self.load_only][key] = value
|
| 172 |
+
self._mark_as_modified(fname, parser)
|
| 173 |
+
|
| 174 |
+
def unset_value(self, key: str) -> None:
|
| 175 |
+
"""Unset a value in the configuration."""
|
| 176 |
+
orig_key = key
|
| 177 |
+
key = _normalize_name(key)
|
| 178 |
+
self._ensure_have_load_only()
|
| 179 |
+
|
| 180 |
+
assert self.load_only
|
| 181 |
+
if key not in self._config[self.load_only]:
|
| 182 |
+
raise ConfigurationError(f"No such key - {orig_key}")
|
| 183 |
+
|
| 184 |
+
fname, parser = self._get_parser_to_modify()
|
| 185 |
+
|
| 186 |
+
if parser is not None:
|
| 187 |
+
section, name = _disassemble_key(key)
|
| 188 |
+
if not (
|
| 189 |
+
parser.has_section(section) and parser.remove_option(section, name)
|
| 190 |
+
):
|
| 191 |
+
# The option was not removed.
|
| 192 |
+
raise ConfigurationError(
|
| 193 |
+
"Fatal Internal error [id=1]. Please report as a bug."
|
| 194 |
+
)
|
| 195 |
+
|
| 196 |
+
# The section may be empty after the option was removed.
|
| 197 |
+
if not parser.items(section):
|
| 198 |
+
parser.remove_section(section)
|
| 199 |
+
self._mark_as_modified(fname, parser)
|
| 200 |
+
|
| 201 |
+
del self._config[self.load_only][key]
|
| 202 |
+
|
| 203 |
+
def save(self) -> None:
|
| 204 |
+
"""Save the current in-memory state."""
|
| 205 |
+
self._ensure_have_load_only()
|
| 206 |
+
|
| 207 |
+
for fname, parser in self._modified_parsers:
|
| 208 |
+
logger.info("Writing to %s", fname)
|
| 209 |
+
|
| 210 |
+
# Ensure directory exists.
|
| 211 |
+
ensure_dir(os.path.dirname(fname))
|
| 212 |
+
|
| 213 |
+
# Ensure directory's permission(need to be writeable)
|
| 214 |
+
try:
|
| 215 |
+
with open(fname, "w") as f:
|
| 216 |
+
parser.write(f)
|
| 217 |
+
except OSError as error:
|
| 218 |
+
raise ConfigurationError(
|
| 219 |
+
f"An error occurred while writing to the configuration file "
|
| 220 |
+
f"{fname}: {error}"
|
| 221 |
+
)
|
| 222 |
+
|
| 223 |
+
#
|
| 224 |
+
# Private routines
|
| 225 |
+
#
|
| 226 |
+
|
| 227 |
+
def _ensure_have_load_only(self) -> None:
|
| 228 |
+
if self.load_only is None:
|
| 229 |
+
raise ConfigurationError("Needed a specific file to be modifying.")
|
| 230 |
+
logger.debug("Will be working with %s variant only", self.load_only)
|
| 231 |
+
|
| 232 |
+
@property
|
| 233 |
+
def _dictionary(self) -> Dict[str, Any]:
|
| 234 |
+
"""A dictionary representing the loaded configuration."""
|
| 235 |
+
# NOTE: Dictionaries are not populated if not loaded. So, conditionals
|
| 236 |
+
# are not needed here.
|
| 237 |
+
retval = {}
|
| 238 |
+
|
| 239 |
+
for variant in OVERRIDE_ORDER:
|
| 240 |
+
retval.update(self._config[variant])
|
| 241 |
+
|
| 242 |
+
return retval
|
| 243 |
+
|
| 244 |
+
def _load_config_files(self) -> None:
|
| 245 |
+
"""Loads configuration from configuration files"""
|
| 246 |
+
config_files = dict(self.iter_config_files())
|
| 247 |
+
if config_files[kinds.ENV][0:1] == [os.devnull]:
|
| 248 |
+
logger.debug(
|
| 249 |
+
"Skipping loading configuration files due to "
|
| 250 |
+
"environment's PIP_CONFIG_FILE being os.devnull"
|
| 251 |
+
)
|
| 252 |
+
return
|
| 253 |
+
|
| 254 |
+
for variant, files in config_files.items():
|
| 255 |
+
for fname in files:
|
| 256 |
+
# If there's specific variant set in `load_only`, load only
|
| 257 |
+
# that variant, not the others.
|
| 258 |
+
if self.load_only is not None and variant != self.load_only:
|
| 259 |
+
logger.debug("Skipping file '%s' (variant: %s)", fname, variant)
|
| 260 |
+
continue
|
| 261 |
+
|
| 262 |
+
parser = self._load_file(variant, fname)
|
| 263 |
+
|
| 264 |
+
# Keeping track of the parsers used
|
| 265 |
+
self._parsers[variant].append((fname, parser))
|
| 266 |
+
|
| 267 |
+
def _load_file(self, variant: Kind, fname: str) -> RawConfigParser:
|
| 268 |
+
logger.verbose("For variant '%s', will try loading '%s'", variant, fname)
|
| 269 |
+
parser = self._construct_parser(fname)
|
| 270 |
+
|
| 271 |
+
for section in parser.sections():
|
| 272 |
+
items = parser.items(section)
|
| 273 |
+
self._config[variant].update(self._normalized_keys(section, items))
|
| 274 |
+
|
| 275 |
+
return parser
|
| 276 |
+
|
| 277 |
+
def _construct_parser(self, fname: str) -> RawConfigParser:
|
| 278 |
+
parser = configparser.RawConfigParser()
|
| 279 |
+
# If there is no such file, don't bother reading it but create the
|
| 280 |
+
# parser anyway, to hold the data.
|
| 281 |
+
# Doing this is useful when modifying and saving files, where we don't
|
| 282 |
+
# need to construct a parser.
|
| 283 |
+
if os.path.exists(fname):
|
| 284 |
+
locale_encoding = locale.getpreferredencoding(False)
|
| 285 |
+
try:
|
| 286 |
+
parser.read(fname, encoding=locale_encoding)
|
| 287 |
+
except UnicodeDecodeError:
|
| 288 |
+
# See https://github.com/pypa/pip/issues/4963
|
| 289 |
+
raise ConfigurationFileCouldNotBeLoaded(
|
| 290 |
+
reason=f"contains invalid {locale_encoding} characters",
|
| 291 |
+
fname=fname,
|
| 292 |
+
)
|
| 293 |
+
except configparser.Error as error:
|
| 294 |
+
# See https://github.com/pypa/pip/issues/4893
|
| 295 |
+
raise ConfigurationFileCouldNotBeLoaded(error=error)
|
| 296 |
+
return parser
|
| 297 |
+
|
| 298 |
+
def _load_environment_vars(self) -> None:
|
| 299 |
+
"""Loads configuration from environment variables"""
|
| 300 |
+
self._config[kinds.ENV_VAR].update(
|
| 301 |
+
self._normalized_keys(":env:", self.get_environ_vars())
|
| 302 |
+
)
|
| 303 |
+
|
| 304 |
+
def _normalized_keys(
|
| 305 |
+
self, section: str, items: Iterable[Tuple[str, Any]]
|
| 306 |
+
) -> Dict[str, Any]:
|
| 307 |
+
"""Normalizes items to construct a dictionary with normalized keys.
|
| 308 |
+
|
| 309 |
+
This routine is where the names become keys and are made the same
|
| 310 |
+
regardless of source - configuration files or environment.
|
| 311 |
+
"""
|
| 312 |
+
normalized = {}
|
| 313 |
+
for name, val in items:
|
| 314 |
+
key = section + "." + _normalize_name(name)
|
| 315 |
+
normalized[key] = val
|
| 316 |
+
return normalized
|
| 317 |
+
|
| 318 |
+
def get_environ_vars(self) -> Iterable[Tuple[str, str]]:
|
| 319 |
+
"""Returns a generator with all environmental vars with prefix PIP_"""
|
| 320 |
+
for key, val in os.environ.items():
|
| 321 |
+
if key.startswith("PIP_"):
|
| 322 |
+
name = key[4:].lower()
|
| 323 |
+
if name not in ENV_NAMES_IGNORED:
|
| 324 |
+
yield name, val
|
| 325 |
+
|
| 326 |
+
# XXX: This is patched in the tests.
|
| 327 |
+
def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]:
|
| 328 |
+
"""Yields variant and configuration files associated with it.
|
| 329 |
+
|
| 330 |
+
This should be treated like items of a dictionary. The order
|
| 331 |
+
here doesn't affect what gets overridden. That is controlled
|
| 332 |
+
by OVERRIDE_ORDER. However this does control the order they are
|
| 333 |
+
displayed to the user. It's probably most ergonomic to display
|
| 334 |
+
things in the same order as OVERRIDE_ORDER
|
| 335 |
+
"""
|
| 336 |
+
# SMELL: Move the conditions out of this function
|
| 337 |
+
|
| 338 |
+
env_config_file = os.environ.get("PIP_CONFIG_FILE", None)
|
| 339 |
+
config_files = get_configuration_files()
|
| 340 |
+
|
| 341 |
+
yield kinds.GLOBAL, config_files[kinds.GLOBAL]
|
| 342 |
+
|
| 343 |
+
# per-user config is not loaded when env_config_file exists
|
| 344 |
+
should_load_user_config = not self.isolated and not (
|
| 345 |
+
env_config_file and os.path.exists(env_config_file)
|
| 346 |
+
)
|
| 347 |
+
if should_load_user_config:
|
| 348 |
+
# The legacy config file is overridden by the new config file
|
| 349 |
+
yield kinds.USER, config_files[kinds.USER]
|
| 350 |
+
|
| 351 |
+
# virtualenv config
|
| 352 |
+
yield kinds.SITE, config_files[kinds.SITE]
|
| 353 |
+
|
| 354 |
+
if env_config_file is not None:
|
| 355 |
+
yield kinds.ENV, [env_config_file]
|
| 356 |
+
else:
|
| 357 |
+
yield kinds.ENV, []
|
| 358 |
+
|
| 359 |
+
def get_values_in_config(self, variant: Kind) -> Dict[str, Any]:
|
| 360 |
+
"""Get values present in a config file"""
|
| 361 |
+
return self._config[variant]
|
| 362 |
+
|
| 363 |
+
def _get_parser_to_modify(self) -> Tuple[str, RawConfigParser]:
|
| 364 |
+
# Determine which parser to modify
|
| 365 |
+
assert self.load_only
|
| 366 |
+
parsers = self._parsers[self.load_only]
|
| 367 |
+
if not parsers:
|
| 368 |
+
# This should not happen if everything works correctly.
|
| 369 |
+
raise ConfigurationError(
|
| 370 |
+
"Fatal Internal error [id=2]. Please report as a bug."
|
| 371 |
+
)
|
| 372 |
+
|
| 373 |
+
# Use the highest priority parser.
|
| 374 |
+
return parsers[-1]
|
| 375 |
+
|
| 376 |
+
# XXX: This is patched in the tests.
|
| 377 |
+
def _mark_as_modified(self, fname: str, parser: RawConfigParser) -> None:
|
| 378 |
+
file_parser_tuple = (fname, parser)
|
| 379 |
+
if file_parser_tuple not in self._modified_parsers:
|
| 380 |
+
self._modified_parsers.append(file_parser_tuple)
|
| 381 |
+
|
| 382 |
+
def __repr__(self) -> str:
|
| 383 |
+
return f"{self.__class__.__name__}({self._dictionary!r})"
|
vllm/lib/python3.10/site-packages/pip/_internal/exceptions.py
ADDED
|
@@ -0,0 +1,809 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Exceptions used throughout package.
|
| 2 |
+
|
| 3 |
+
This module MUST NOT try to import from anything within `pip._internal` to
|
| 4 |
+
operate. This is expected to be importable from any/all files within the
|
| 5 |
+
subpackage and, thus, should not depend on them.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import configparser
|
| 9 |
+
import contextlib
|
| 10 |
+
import locale
|
| 11 |
+
import logging
|
| 12 |
+
import pathlib
|
| 13 |
+
import re
|
| 14 |
+
import sys
|
| 15 |
+
from itertools import chain, groupby, repeat
|
| 16 |
+
from typing import TYPE_CHECKING, Dict, Iterator, List, Literal, Optional, Union
|
| 17 |
+
|
| 18 |
+
from pip._vendor.packaging.requirements import InvalidRequirement
|
| 19 |
+
from pip._vendor.packaging.version import InvalidVersion
|
| 20 |
+
from pip._vendor.rich.console import Console, ConsoleOptions, RenderResult
|
| 21 |
+
from pip._vendor.rich.markup import escape
|
| 22 |
+
from pip._vendor.rich.text import Text
|
| 23 |
+
|
| 24 |
+
if TYPE_CHECKING:
|
| 25 |
+
from hashlib import _Hash
|
| 26 |
+
|
| 27 |
+
from pip._vendor.requests.models import Request, Response
|
| 28 |
+
|
| 29 |
+
from pip._internal.metadata import BaseDistribution
|
| 30 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 31 |
+
|
| 32 |
+
logger = logging.getLogger(__name__)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
#
|
| 36 |
+
# Scaffolding
|
| 37 |
+
#
|
| 38 |
+
def _is_kebab_case(s: str) -> bool:
|
| 39 |
+
return re.match(r"^[a-z]+(-[a-z]+)*$", s) is not None
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def _prefix_with_indent(
|
| 43 |
+
s: Union[Text, str],
|
| 44 |
+
console: Console,
|
| 45 |
+
*,
|
| 46 |
+
prefix: str,
|
| 47 |
+
indent: str,
|
| 48 |
+
) -> Text:
|
| 49 |
+
if isinstance(s, Text):
|
| 50 |
+
text = s
|
| 51 |
+
else:
|
| 52 |
+
text = console.render_str(s)
|
| 53 |
+
|
| 54 |
+
return console.render_str(prefix, overflow="ignore") + console.render_str(
|
| 55 |
+
f"\n{indent}", overflow="ignore"
|
| 56 |
+
).join(text.split(allow_blank=True))
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class PipError(Exception):
|
| 60 |
+
"""The base pip error."""
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class DiagnosticPipError(PipError):
|
| 64 |
+
"""An error, that presents diagnostic information to the user.
|
| 65 |
+
|
| 66 |
+
This contains a bunch of logic, to enable pretty presentation of our error
|
| 67 |
+
messages. Each error gets a unique reference. Each error can also include
|
| 68 |
+
additional context, a hint and/or a note -- which are presented with the
|
| 69 |
+
main error message in a consistent style.
|
| 70 |
+
|
| 71 |
+
This is adapted from the error output styling in `sphinx-theme-builder`.
|
| 72 |
+
"""
|
| 73 |
+
|
| 74 |
+
reference: str
|
| 75 |
+
|
| 76 |
+
def __init__(
|
| 77 |
+
self,
|
| 78 |
+
*,
|
| 79 |
+
kind: 'Literal["error", "warning"]' = "error",
|
| 80 |
+
reference: Optional[str] = None,
|
| 81 |
+
message: Union[str, Text],
|
| 82 |
+
context: Optional[Union[str, Text]],
|
| 83 |
+
hint_stmt: Optional[Union[str, Text]],
|
| 84 |
+
note_stmt: Optional[Union[str, Text]] = None,
|
| 85 |
+
link: Optional[str] = None,
|
| 86 |
+
) -> None:
|
| 87 |
+
# Ensure a proper reference is provided.
|
| 88 |
+
if reference is None:
|
| 89 |
+
assert hasattr(self, "reference"), "error reference not provided!"
|
| 90 |
+
reference = self.reference
|
| 91 |
+
assert _is_kebab_case(reference), "error reference must be kebab-case!"
|
| 92 |
+
|
| 93 |
+
self.kind = kind
|
| 94 |
+
self.reference = reference
|
| 95 |
+
|
| 96 |
+
self.message = message
|
| 97 |
+
self.context = context
|
| 98 |
+
|
| 99 |
+
self.note_stmt = note_stmt
|
| 100 |
+
self.hint_stmt = hint_stmt
|
| 101 |
+
|
| 102 |
+
self.link = link
|
| 103 |
+
|
| 104 |
+
super().__init__(f"<{self.__class__.__name__}: {self.reference}>")
|
| 105 |
+
|
| 106 |
+
def __repr__(self) -> str:
|
| 107 |
+
return (
|
| 108 |
+
f"<{self.__class__.__name__}("
|
| 109 |
+
f"reference={self.reference!r}, "
|
| 110 |
+
f"message={self.message!r}, "
|
| 111 |
+
f"context={self.context!r}, "
|
| 112 |
+
f"note_stmt={self.note_stmt!r}, "
|
| 113 |
+
f"hint_stmt={self.hint_stmt!r}"
|
| 114 |
+
")>"
|
| 115 |
+
)
|
| 116 |
+
|
| 117 |
+
def __rich_console__(
|
| 118 |
+
self,
|
| 119 |
+
console: Console,
|
| 120 |
+
options: ConsoleOptions,
|
| 121 |
+
) -> RenderResult:
|
| 122 |
+
colour = "red" if self.kind == "error" else "yellow"
|
| 123 |
+
|
| 124 |
+
yield f"[{colour} bold]{self.kind}[/]: [bold]{self.reference}[/]"
|
| 125 |
+
yield ""
|
| 126 |
+
|
| 127 |
+
if not options.ascii_only:
|
| 128 |
+
# Present the main message, with relevant context indented.
|
| 129 |
+
if self.context is not None:
|
| 130 |
+
yield _prefix_with_indent(
|
| 131 |
+
self.message,
|
| 132 |
+
console,
|
| 133 |
+
prefix=f"[{colour}]×[/] ",
|
| 134 |
+
indent=f"[{colour}]│[/] ",
|
| 135 |
+
)
|
| 136 |
+
yield _prefix_with_indent(
|
| 137 |
+
self.context,
|
| 138 |
+
console,
|
| 139 |
+
prefix=f"[{colour}]╰─>[/] ",
|
| 140 |
+
indent=f"[{colour}] [/] ",
|
| 141 |
+
)
|
| 142 |
+
else:
|
| 143 |
+
yield _prefix_with_indent(
|
| 144 |
+
self.message,
|
| 145 |
+
console,
|
| 146 |
+
prefix="[red]×[/] ",
|
| 147 |
+
indent=" ",
|
| 148 |
+
)
|
| 149 |
+
else:
|
| 150 |
+
yield self.message
|
| 151 |
+
if self.context is not None:
|
| 152 |
+
yield ""
|
| 153 |
+
yield self.context
|
| 154 |
+
|
| 155 |
+
if self.note_stmt is not None or self.hint_stmt is not None:
|
| 156 |
+
yield ""
|
| 157 |
+
|
| 158 |
+
if self.note_stmt is not None:
|
| 159 |
+
yield _prefix_with_indent(
|
| 160 |
+
self.note_stmt,
|
| 161 |
+
console,
|
| 162 |
+
prefix="[magenta bold]note[/]: ",
|
| 163 |
+
indent=" ",
|
| 164 |
+
)
|
| 165 |
+
if self.hint_stmt is not None:
|
| 166 |
+
yield _prefix_with_indent(
|
| 167 |
+
self.hint_stmt,
|
| 168 |
+
console,
|
| 169 |
+
prefix="[cyan bold]hint[/]: ",
|
| 170 |
+
indent=" ",
|
| 171 |
+
)
|
| 172 |
+
|
| 173 |
+
if self.link is not None:
|
| 174 |
+
yield ""
|
| 175 |
+
yield f"Link: {self.link}"
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
#
|
| 179 |
+
# Actual Errors
|
| 180 |
+
#
|
| 181 |
+
class ConfigurationError(PipError):
|
| 182 |
+
"""General exception in configuration"""
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
class InstallationError(PipError):
|
| 186 |
+
"""General exception during installation"""
|
| 187 |
+
|
| 188 |
+
|
| 189 |
+
class MissingPyProjectBuildRequires(DiagnosticPipError):
|
| 190 |
+
"""Raised when pyproject.toml has `build-system`, but no `build-system.requires`."""
|
| 191 |
+
|
| 192 |
+
reference = "missing-pyproject-build-system-requires"
|
| 193 |
+
|
| 194 |
+
def __init__(self, *, package: str) -> None:
|
| 195 |
+
super().__init__(
|
| 196 |
+
message=f"Can not process {escape(package)}",
|
| 197 |
+
context=Text(
|
| 198 |
+
"This package has an invalid pyproject.toml file.\n"
|
| 199 |
+
"The [build-system] table is missing the mandatory `requires` key."
|
| 200 |
+
),
|
| 201 |
+
note_stmt="This is an issue with the package mentioned above, not pip.",
|
| 202 |
+
hint_stmt=Text("See PEP 518 for the detailed specification."),
|
| 203 |
+
)
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
class InvalidPyProjectBuildRequires(DiagnosticPipError):
|
| 207 |
+
"""Raised when pyproject.toml an invalid `build-system.requires`."""
|
| 208 |
+
|
| 209 |
+
reference = "invalid-pyproject-build-system-requires"
|
| 210 |
+
|
| 211 |
+
def __init__(self, *, package: str, reason: str) -> None:
|
| 212 |
+
super().__init__(
|
| 213 |
+
message=f"Can not process {escape(package)}",
|
| 214 |
+
context=Text(
|
| 215 |
+
"This package has an invalid `build-system.requires` key in "
|
| 216 |
+
f"pyproject.toml.\n{reason}"
|
| 217 |
+
),
|
| 218 |
+
note_stmt="This is an issue with the package mentioned above, not pip.",
|
| 219 |
+
hint_stmt=Text("See PEP 518 for the detailed specification."),
|
| 220 |
+
)
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
class NoneMetadataError(PipError):
|
| 224 |
+
"""Raised when accessing a Distribution's "METADATA" or "PKG-INFO".
|
| 225 |
+
|
| 226 |
+
This signifies an inconsistency, when the Distribution claims to have
|
| 227 |
+
the metadata file (if not, raise ``FileNotFoundError`` instead), but is
|
| 228 |
+
not actually able to produce its content. This may be due to permission
|
| 229 |
+
errors.
|
| 230 |
+
"""
|
| 231 |
+
|
| 232 |
+
def __init__(
|
| 233 |
+
self,
|
| 234 |
+
dist: "BaseDistribution",
|
| 235 |
+
metadata_name: str,
|
| 236 |
+
) -> None:
|
| 237 |
+
"""
|
| 238 |
+
:param dist: A Distribution object.
|
| 239 |
+
:param metadata_name: The name of the metadata being accessed
|
| 240 |
+
(can be "METADATA" or "PKG-INFO").
|
| 241 |
+
"""
|
| 242 |
+
self.dist = dist
|
| 243 |
+
self.metadata_name = metadata_name
|
| 244 |
+
|
| 245 |
+
def __str__(self) -> str:
|
| 246 |
+
# Use `dist` in the error message because its stringification
|
| 247 |
+
# includes more information, like the version and location.
|
| 248 |
+
return f"None {self.metadata_name} metadata found for distribution: {self.dist}"
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
class UserInstallationInvalid(InstallationError):
|
| 252 |
+
"""A --user install is requested on an environment without user site."""
|
| 253 |
+
|
| 254 |
+
def __str__(self) -> str:
|
| 255 |
+
return "User base directory is not specified"
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
class InvalidSchemeCombination(InstallationError):
|
| 259 |
+
def __str__(self) -> str:
|
| 260 |
+
before = ", ".join(str(a) for a in self.args[:-1])
|
| 261 |
+
return f"Cannot set {before} and {self.args[-1]} together"
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
class DistributionNotFound(InstallationError):
|
| 265 |
+
"""Raised when a distribution cannot be found to satisfy a requirement"""
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
class RequirementsFileParseError(InstallationError):
|
| 269 |
+
"""Raised when a general error occurs parsing a requirements file line."""
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
class BestVersionAlreadyInstalled(PipError):
|
| 273 |
+
"""Raised when the most up-to-date version of a package is already
|
| 274 |
+
installed."""
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
class BadCommand(PipError):
|
| 278 |
+
"""Raised when virtualenv or a command is not found"""
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
class CommandError(PipError):
|
| 282 |
+
"""Raised when there is an error in command-line arguments"""
|
| 283 |
+
|
| 284 |
+
|
| 285 |
+
class PreviousBuildDirError(PipError):
|
| 286 |
+
"""Raised when there's a previous conflicting build directory"""
|
| 287 |
+
|
| 288 |
+
|
| 289 |
+
class NetworkConnectionError(PipError):
|
| 290 |
+
"""HTTP connection error"""
|
| 291 |
+
|
| 292 |
+
def __init__(
|
| 293 |
+
self,
|
| 294 |
+
error_msg: str,
|
| 295 |
+
response: Optional["Response"] = None,
|
| 296 |
+
request: Optional["Request"] = None,
|
| 297 |
+
) -> None:
|
| 298 |
+
"""
|
| 299 |
+
Initialize NetworkConnectionError with `request` and `response`
|
| 300 |
+
objects.
|
| 301 |
+
"""
|
| 302 |
+
self.response = response
|
| 303 |
+
self.request = request
|
| 304 |
+
self.error_msg = error_msg
|
| 305 |
+
if (
|
| 306 |
+
self.response is not None
|
| 307 |
+
and not self.request
|
| 308 |
+
and hasattr(response, "request")
|
| 309 |
+
):
|
| 310 |
+
self.request = self.response.request
|
| 311 |
+
super().__init__(error_msg, response, request)
|
| 312 |
+
|
| 313 |
+
def __str__(self) -> str:
|
| 314 |
+
return str(self.error_msg)
|
| 315 |
+
|
| 316 |
+
|
| 317 |
+
class InvalidWheelFilename(InstallationError):
|
| 318 |
+
"""Invalid wheel filename."""
|
| 319 |
+
|
| 320 |
+
|
| 321 |
+
class UnsupportedWheel(InstallationError):
|
| 322 |
+
"""Unsupported wheel."""
|
| 323 |
+
|
| 324 |
+
|
| 325 |
+
class InvalidWheel(InstallationError):
|
| 326 |
+
"""Invalid (e.g. corrupt) wheel."""
|
| 327 |
+
|
| 328 |
+
def __init__(self, location: str, name: str):
|
| 329 |
+
self.location = location
|
| 330 |
+
self.name = name
|
| 331 |
+
|
| 332 |
+
def __str__(self) -> str:
|
| 333 |
+
return f"Wheel '{self.name}' located at {self.location} is invalid."
|
| 334 |
+
|
| 335 |
+
|
| 336 |
+
class MetadataInconsistent(InstallationError):
|
| 337 |
+
"""Built metadata contains inconsistent information.
|
| 338 |
+
|
| 339 |
+
This is raised when the metadata contains values (e.g. name and version)
|
| 340 |
+
that do not match the information previously obtained from sdist filename,
|
| 341 |
+
user-supplied ``#egg=`` value, or an install requirement name.
|
| 342 |
+
"""
|
| 343 |
+
|
| 344 |
+
def __init__(
|
| 345 |
+
self, ireq: "InstallRequirement", field: str, f_val: str, m_val: str
|
| 346 |
+
) -> None:
|
| 347 |
+
self.ireq = ireq
|
| 348 |
+
self.field = field
|
| 349 |
+
self.f_val = f_val
|
| 350 |
+
self.m_val = m_val
|
| 351 |
+
|
| 352 |
+
def __str__(self) -> str:
|
| 353 |
+
return (
|
| 354 |
+
f"Requested {self.ireq} has inconsistent {self.field}: "
|
| 355 |
+
f"expected {self.f_val!r}, but metadata has {self.m_val!r}"
|
| 356 |
+
)
|
| 357 |
+
|
| 358 |
+
|
| 359 |
+
class MetadataInvalid(InstallationError):
|
| 360 |
+
"""Metadata is invalid."""
|
| 361 |
+
|
| 362 |
+
def __init__(self, ireq: "InstallRequirement", error: str) -> None:
|
| 363 |
+
self.ireq = ireq
|
| 364 |
+
self.error = error
|
| 365 |
+
|
| 366 |
+
def __str__(self) -> str:
|
| 367 |
+
return f"Requested {self.ireq} has invalid metadata: {self.error}"
|
| 368 |
+
|
| 369 |
+
|
| 370 |
+
class InstallationSubprocessError(DiagnosticPipError, InstallationError):
|
| 371 |
+
"""A subprocess call failed."""
|
| 372 |
+
|
| 373 |
+
reference = "subprocess-exited-with-error"
|
| 374 |
+
|
| 375 |
+
def __init__(
|
| 376 |
+
self,
|
| 377 |
+
*,
|
| 378 |
+
command_description: str,
|
| 379 |
+
exit_code: int,
|
| 380 |
+
output_lines: Optional[List[str]],
|
| 381 |
+
) -> None:
|
| 382 |
+
if output_lines is None:
|
| 383 |
+
output_prompt = Text("See above for output.")
|
| 384 |
+
else:
|
| 385 |
+
output_prompt = (
|
| 386 |
+
Text.from_markup(f"[red][{len(output_lines)} lines of output][/]\n")
|
| 387 |
+
+ Text("".join(output_lines))
|
| 388 |
+
+ Text.from_markup(R"[red]\[end of output][/]")
|
| 389 |
+
)
|
| 390 |
+
|
| 391 |
+
super().__init__(
|
| 392 |
+
message=(
|
| 393 |
+
f"[green]{escape(command_description)}[/] did not run successfully.\n"
|
| 394 |
+
f"exit code: {exit_code}"
|
| 395 |
+
),
|
| 396 |
+
context=output_prompt,
|
| 397 |
+
hint_stmt=None,
|
| 398 |
+
note_stmt=(
|
| 399 |
+
"This error originates from a subprocess, and is likely not a "
|
| 400 |
+
"problem with pip."
|
| 401 |
+
),
|
| 402 |
+
)
|
| 403 |
+
|
| 404 |
+
self.command_description = command_description
|
| 405 |
+
self.exit_code = exit_code
|
| 406 |
+
|
| 407 |
+
def __str__(self) -> str:
|
| 408 |
+
return f"{self.command_description} exited with {self.exit_code}"
|
| 409 |
+
|
| 410 |
+
|
| 411 |
+
class MetadataGenerationFailed(InstallationSubprocessError, InstallationError):
|
| 412 |
+
reference = "metadata-generation-failed"
|
| 413 |
+
|
| 414 |
+
def __init__(
|
| 415 |
+
self,
|
| 416 |
+
*,
|
| 417 |
+
package_details: str,
|
| 418 |
+
) -> None:
|
| 419 |
+
super(InstallationSubprocessError, self).__init__(
|
| 420 |
+
message="Encountered error while generating package metadata.",
|
| 421 |
+
context=escape(package_details),
|
| 422 |
+
hint_stmt="See above for details.",
|
| 423 |
+
note_stmt="This is an issue with the package mentioned above, not pip.",
|
| 424 |
+
)
|
| 425 |
+
|
| 426 |
+
def __str__(self) -> str:
|
| 427 |
+
return "metadata generation failed"
|
| 428 |
+
|
| 429 |
+
|
| 430 |
+
class HashErrors(InstallationError):
|
| 431 |
+
"""Multiple HashError instances rolled into one for reporting"""
|
| 432 |
+
|
| 433 |
+
def __init__(self) -> None:
|
| 434 |
+
self.errors: List[HashError] = []
|
| 435 |
+
|
| 436 |
+
def append(self, error: "HashError") -> None:
|
| 437 |
+
self.errors.append(error)
|
| 438 |
+
|
| 439 |
+
def __str__(self) -> str:
|
| 440 |
+
lines = []
|
| 441 |
+
self.errors.sort(key=lambda e: e.order)
|
| 442 |
+
for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
|
| 443 |
+
lines.append(cls.head)
|
| 444 |
+
lines.extend(e.body() for e in errors_of_cls)
|
| 445 |
+
if lines:
|
| 446 |
+
return "\n".join(lines)
|
| 447 |
+
return ""
|
| 448 |
+
|
| 449 |
+
def __bool__(self) -> bool:
|
| 450 |
+
return bool(self.errors)
|
| 451 |
+
|
| 452 |
+
|
| 453 |
+
class HashError(InstallationError):
|
| 454 |
+
"""
|
| 455 |
+
A failure to verify a package against known-good hashes
|
| 456 |
+
|
| 457 |
+
:cvar order: An int sorting hash exception classes by difficulty of
|
| 458 |
+
recovery (lower being harder), so the user doesn't bother fretting
|
| 459 |
+
about unpinned packages when he has deeper issues, like VCS
|
| 460 |
+
dependencies, to deal with. Also keeps error reports in a
|
| 461 |
+
deterministic order.
|
| 462 |
+
:cvar head: A section heading for display above potentially many
|
| 463 |
+
exceptions of this kind
|
| 464 |
+
:ivar req: The InstallRequirement that triggered this error. This is
|
| 465 |
+
pasted on after the exception is instantiated, because it's not
|
| 466 |
+
typically available earlier.
|
| 467 |
+
|
| 468 |
+
"""
|
| 469 |
+
|
| 470 |
+
req: Optional["InstallRequirement"] = None
|
| 471 |
+
head = ""
|
| 472 |
+
order: int = -1
|
| 473 |
+
|
| 474 |
+
def body(self) -> str:
|
| 475 |
+
"""Return a summary of me for display under the heading.
|
| 476 |
+
|
| 477 |
+
This default implementation simply prints a description of the
|
| 478 |
+
triggering requirement.
|
| 479 |
+
|
| 480 |
+
:param req: The InstallRequirement that provoked this error, with
|
| 481 |
+
its link already populated by the resolver's _populate_link().
|
| 482 |
+
|
| 483 |
+
"""
|
| 484 |
+
return f" {self._requirement_name()}"
|
| 485 |
+
|
| 486 |
+
def __str__(self) -> str:
|
| 487 |
+
return f"{self.head}\n{self.body()}"
|
| 488 |
+
|
| 489 |
+
def _requirement_name(self) -> str:
|
| 490 |
+
"""Return a description of the requirement that triggered me.
|
| 491 |
+
|
| 492 |
+
This default implementation returns long description of the req, with
|
| 493 |
+
line numbers
|
| 494 |
+
|
| 495 |
+
"""
|
| 496 |
+
return str(self.req) if self.req else "unknown package"
|
| 497 |
+
|
| 498 |
+
|
| 499 |
+
class VcsHashUnsupported(HashError):
|
| 500 |
+
"""A hash was provided for a version-control-system-based requirement, but
|
| 501 |
+
we don't have a method for hashing those."""
|
| 502 |
+
|
| 503 |
+
order = 0
|
| 504 |
+
head = (
|
| 505 |
+
"Can't verify hashes for these requirements because we don't "
|
| 506 |
+
"have a way to hash version control repositories:"
|
| 507 |
+
)
|
| 508 |
+
|
| 509 |
+
|
| 510 |
+
class DirectoryUrlHashUnsupported(HashError):
|
| 511 |
+
"""A hash was provided for a version-control-system-based requirement, but
|
| 512 |
+
we don't have a method for hashing those."""
|
| 513 |
+
|
| 514 |
+
order = 1
|
| 515 |
+
head = (
|
| 516 |
+
"Can't verify hashes for these file:// requirements because they "
|
| 517 |
+
"point to directories:"
|
| 518 |
+
)
|
| 519 |
+
|
| 520 |
+
|
| 521 |
+
class HashMissing(HashError):
|
| 522 |
+
"""A hash was needed for a requirement but is absent."""
|
| 523 |
+
|
| 524 |
+
order = 2
|
| 525 |
+
head = (
|
| 526 |
+
"Hashes are required in --require-hashes mode, but they are "
|
| 527 |
+
"missing from some requirements. Here is a list of those "
|
| 528 |
+
"requirements along with the hashes their downloaded archives "
|
| 529 |
+
"actually had. Add lines like these to your requirements files to "
|
| 530 |
+
"prevent tampering. (If you did not enable --require-hashes "
|
| 531 |
+
"manually, note that it turns on automatically when any package "
|
| 532 |
+
"has a hash.)"
|
| 533 |
+
)
|
| 534 |
+
|
| 535 |
+
def __init__(self, gotten_hash: str) -> None:
|
| 536 |
+
"""
|
| 537 |
+
:param gotten_hash: The hash of the (possibly malicious) archive we
|
| 538 |
+
just downloaded
|
| 539 |
+
"""
|
| 540 |
+
self.gotten_hash = gotten_hash
|
| 541 |
+
|
| 542 |
+
def body(self) -> str:
|
| 543 |
+
# Dodge circular import.
|
| 544 |
+
from pip._internal.utils.hashes import FAVORITE_HASH
|
| 545 |
+
|
| 546 |
+
package = None
|
| 547 |
+
if self.req:
|
| 548 |
+
# In the case of URL-based requirements, display the original URL
|
| 549 |
+
# seen in the requirements file rather than the package name,
|
| 550 |
+
# so the output can be directly copied into the requirements file.
|
| 551 |
+
package = (
|
| 552 |
+
self.req.original_link
|
| 553 |
+
if self.req.is_direct
|
| 554 |
+
# In case someone feeds something downright stupid
|
| 555 |
+
# to InstallRequirement's constructor.
|
| 556 |
+
else getattr(self.req, "req", None)
|
| 557 |
+
)
|
| 558 |
+
return " {} --hash={}:{}".format(
|
| 559 |
+
package or "unknown package", FAVORITE_HASH, self.gotten_hash
|
| 560 |
+
)
|
| 561 |
+
|
| 562 |
+
|
| 563 |
+
class HashUnpinned(HashError):
|
| 564 |
+
"""A requirement had a hash specified but was not pinned to a specific
|
| 565 |
+
version."""
|
| 566 |
+
|
| 567 |
+
order = 3
|
| 568 |
+
head = (
|
| 569 |
+
"In --require-hashes mode, all requirements must have their "
|
| 570 |
+
"versions pinned with ==. These do not:"
|
| 571 |
+
)
|
| 572 |
+
|
| 573 |
+
|
| 574 |
+
class HashMismatch(HashError):
|
| 575 |
+
"""
|
| 576 |
+
Distribution file hash values don't match.
|
| 577 |
+
|
| 578 |
+
:ivar package_name: The name of the package that triggered the hash
|
| 579 |
+
mismatch. Feel free to write to this after the exception is raise to
|
| 580 |
+
improve its error message.
|
| 581 |
+
|
| 582 |
+
"""
|
| 583 |
+
|
| 584 |
+
order = 4
|
| 585 |
+
head = (
|
| 586 |
+
"THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS "
|
| 587 |
+
"FILE. If you have updated the package versions, please update "
|
| 588 |
+
"the hashes. Otherwise, examine the package contents carefully; "
|
| 589 |
+
"someone may have tampered with them."
|
| 590 |
+
)
|
| 591 |
+
|
| 592 |
+
def __init__(self, allowed: Dict[str, List[str]], gots: Dict[str, "_Hash"]) -> None:
|
| 593 |
+
"""
|
| 594 |
+
:param allowed: A dict of algorithm names pointing to lists of allowed
|
| 595 |
+
hex digests
|
| 596 |
+
:param gots: A dict of algorithm names pointing to hashes we
|
| 597 |
+
actually got from the files under suspicion
|
| 598 |
+
"""
|
| 599 |
+
self.allowed = allowed
|
| 600 |
+
self.gots = gots
|
| 601 |
+
|
| 602 |
+
def body(self) -> str:
|
| 603 |
+
return f" {self._requirement_name()}:\n{self._hash_comparison()}"
|
| 604 |
+
|
| 605 |
+
def _hash_comparison(self) -> str:
|
| 606 |
+
"""
|
| 607 |
+
Return a comparison of actual and expected hash values.
|
| 608 |
+
|
| 609 |
+
Example::
|
| 610 |
+
|
| 611 |
+
Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
|
| 612 |
+
or 123451234512345123451234512345123451234512345
|
| 613 |
+
Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
|
| 614 |
+
|
| 615 |
+
"""
|
| 616 |
+
|
| 617 |
+
def hash_then_or(hash_name: str) -> "chain[str]":
|
| 618 |
+
# For now, all the decent hashes have 6-char names, so we can get
|
| 619 |
+
# away with hard-coding space literals.
|
| 620 |
+
return chain([hash_name], repeat(" or"))
|
| 621 |
+
|
| 622 |
+
lines: List[str] = []
|
| 623 |
+
for hash_name, expecteds in self.allowed.items():
|
| 624 |
+
prefix = hash_then_or(hash_name)
|
| 625 |
+
lines.extend((f" Expected {next(prefix)} {e}") for e in expecteds)
|
| 626 |
+
lines.append(
|
| 627 |
+
f" Got {self.gots[hash_name].hexdigest()}\n"
|
| 628 |
+
)
|
| 629 |
+
return "\n".join(lines)
|
| 630 |
+
|
| 631 |
+
|
| 632 |
+
class UnsupportedPythonVersion(InstallationError):
|
| 633 |
+
"""Unsupported python version according to Requires-Python package
|
| 634 |
+
metadata."""
|
| 635 |
+
|
| 636 |
+
|
| 637 |
+
class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
|
| 638 |
+
"""When there are errors while loading a configuration file"""
|
| 639 |
+
|
| 640 |
+
def __init__(
|
| 641 |
+
self,
|
| 642 |
+
reason: str = "could not be loaded",
|
| 643 |
+
fname: Optional[str] = None,
|
| 644 |
+
error: Optional[configparser.Error] = None,
|
| 645 |
+
) -> None:
|
| 646 |
+
super().__init__(error)
|
| 647 |
+
self.reason = reason
|
| 648 |
+
self.fname = fname
|
| 649 |
+
self.error = error
|
| 650 |
+
|
| 651 |
+
def __str__(self) -> str:
|
| 652 |
+
if self.fname is not None:
|
| 653 |
+
message_part = f" in {self.fname}."
|
| 654 |
+
else:
|
| 655 |
+
assert self.error is not None
|
| 656 |
+
message_part = f".\n{self.error}\n"
|
| 657 |
+
return f"Configuration file {self.reason}{message_part}"
|
| 658 |
+
|
| 659 |
+
|
| 660 |
+
_DEFAULT_EXTERNALLY_MANAGED_ERROR = f"""\
|
| 661 |
+
The Python environment under {sys.prefix} is managed externally, and may not be
|
| 662 |
+
manipulated by the user. Please use specific tooling from the distributor of
|
| 663 |
+
the Python installation to interact with this environment instead.
|
| 664 |
+
"""
|
| 665 |
+
|
| 666 |
+
|
| 667 |
+
class ExternallyManagedEnvironment(DiagnosticPipError):
|
| 668 |
+
"""The current environment is externally managed.
|
| 669 |
+
|
| 670 |
+
This is raised when the current environment is externally managed, as
|
| 671 |
+
defined by `PEP 668`_. The ``EXTERNALLY-MANAGED`` configuration is checked
|
| 672 |
+
and displayed when the error is bubbled up to the user.
|
| 673 |
+
|
| 674 |
+
:param error: The error message read from ``EXTERNALLY-MANAGED``.
|
| 675 |
+
"""
|
| 676 |
+
|
| 677 |
+
reference = "externally-managed-environment"
|
| 678 |
+
|
| 679 |
+
def __init__(self, error: Optional[str]) -> None:
|
| 680 |
+
if error is None:
|
| 681 |
+
context = Text(_DEFAULT_EXTERNALLY_MANAGED_ERROR)
|
| 682 |
+
else:
|
| 683 |
+
context = Text(error)
|
| 684 |
+
super().__init__(
|
| 685 |
+
message="This environment is externally managed",
|
| 686 |
+
context=context,
|
| 687 |
+
note_stmt=(
|
| 688 |
+
"If you believe this is a mistake, please contact your "
|
| 689 |
+
"Python installation or OS distribution provider. "
|
| 690 |
+
"You can override this, at the risk of breaking your Python "
|
| 691 |
+
"installation or OS, by passing --break-system-packages."
|
| 692 |
+
),
|
| 693 |
+
hint_stmt=Text("See PEP 668 for the detailed specification."),
|
| 694 |
+
)
|
| 695 |
+
|
| 696 |
+
@staticmethod
|
| 697 |
+
def _iter_externally_managed_error_keys() -> Iterator[str]:
|
| 698 |
+
# LC_MESSAGES is in POSIX, but not the C standard. The most common
|
| 699 |
+
# platform that does not implement this category is Windows, where
|
| 700 |
+
# using other categories for console message localization is equally
|
| 701 |
+
# unreliable, so we fall back to the locale-less vendor message. This
|
| 702 |
+
# can always be re-evaluated when a vendor proposes a new alternative.
|
| 703 |
+
try:
|
| 704 |
+
category = locale.LC_MESSAGES
|
| 705 |
+
except AttributeError:
|
| 706 |
+
lang: Optional[str] = None
|
| 707 |
+
else:
|
| 708 |
+
lang, _ = locale.getlocale(category)
|
| 709 |
+
if lang is not None:
|
| 710 |
+
yield f"Error-{lang}"
|
| 711 |
+
for sep in ("-", "_"):
|
| 712 |
+
before, found, _ = lang.partition(sep)
|
| 713 |
+
if not found:
|
| 714 |
+
continue
|
| 715 |
+
yield f"Error-{before}"
|
| 716 |
+
yield "Error"
|
| 717 |
+
|
| 718 |
+
@classmethod
|
| 719 |
+
def from_config(
|
| 720 |
+
cls,
|
| 721 |
+
config: Union[pathlib.Path, str],
|
| 722 |
+
) -> "ExternallyManagedEnvironment":
|
| 723 |
+
parser = configparser.ConfigParser(interpolation=None)
|
| 724 |
+
try:
|
| 725 |
+
parser.read(config, encoding="utf-8")
|
| 726 |
+
section = parser["externally-managed"]
|
| 727 |
+
for key in cls._iter_externally_managed_error_keys():
|
| 728 |
+
with contextlib.suppress(KeyError):
|
| 729 |
+
return cls(section[key])
|
| 730 |
+
except KeyError:
|
| 731 |
+
pass
|
| 732 |
+
except (OSError, UnicodeDecodeError, configparser.ParsingError):
|
| 733 |
+
from pip._internal.utils._log import VERBOSE
|
| 734 |
+
|
| 735 |
+
exc_info = logger.isEnabledFor(VERBOSE)
|
| 736 |
+
logger.warning("Failed to read %s", config, exc_info=exc_info)
|
| 737 |
+
return cls(None)
|
| 738 |
+
|
| 739 |
+
|
| 740 |
+
class UninstallMissingRecord(DiagnosticPipError):
|
| 741 |
+
reference = "uninstall-no-record-file"
|
| 742 |
+
|
| 743 |
+
def __init__(self, *, distribution: "BaseDistribution") -> None:
|
| 744 |
+
installer = distribution.installer
|
| 745 |
+
if not installer or installer == "pip":
|
| 746 |
+
dep = f"{distribution.raw_name}=={distribution.version}"
|
| 747 |
+
hint = Text.assemble(
|
| 748 |
+
"You might be able to recover from this via: ",
|
| 749 |
+
(f"pip install --force-reinstall --no-deps {dep}", "green"),
|
| 750 |
+
)
|
| 751 |
+
else:
|
| 752 |
+
hint = Text(
|
| 753 |
+
f"The package was installed by {installer}. "
|
| 754 |
+
"You should check if it can uninstall the package."
|
| 755 |
+
)
|
| 756 |
+
|
| 757 |
+
super().__init__(
|
| 758 |
+
message=Text(f"Cannot uninstall {distribution}"),
|
| 759 |
+
context=(
|
| 760 |
+
"The package's contents are unknown: "
|
| 761 |
+
f"no RECORD file was found for {distribution.raw_name}."
|
| 762 |
+
),
|
| 763 |
+
hint_stmt=hint,
|
| 764 |
+
)
|
| 765 |
+
|
| 766 |
+
|
| 767 |
+
class LegacyDistutilsInstall(DiagnosticPipError):
|
| 768 |
+
reference = "uninstall-distutils-installed-package"
|
| 769 |
+
|
| 770 |
+
def __init__(self, *, distribution: "BaseDistribution") -> None:
|
| 771 |
+
super().__init__(
|
| 772 |
+
message=Text(f"Cannot uninstall {distribution}"),
|
| 773 |
+
context=(
|
| 774 |
+
"It is a distutils installed project and thus we cannot accurately "
|
| 775 |
+
"determine which files belong to it which would lead to only a partial "
|
| 776 |
+
"uninstall."
|
| 777 |
+
),
|
| 778 |
+
hint_stmt=None,
|
| 779 |
+
)
|
| 780 |
+
|
| 781 |
+
|
| 782 |
+
class InvalidInstalledPackage(DiagnosticPipError):
|
| 783 |
+
reference = "invalid-installed-package"
|
| 784 |
+
|
| 785 |
+
def __init__(
|
| 786 |
+
self,
|
| 787 |
+
*,
|
| 788 |
+
dist: "BaseDistribution",
|
| 789 |
+
invalid_exc: Union[InvalidRequirement, InvalidVersion],
|
| 790 |
+
) -> None:
|
| 791 |
+
installed_location = dist.installed_location
|
| 792 |
+
|
| 793 |
+
if isinstance(invalid_exc, InvalidRequirement):
|
| 794 |
+
invalid_type = "requirement"
|
| 795 |
+
else:
|
| 796 |
+
invalid_type = "version"
|
| 797 |
+
|
| 798 |
+
super().__init__(
|
| 799 |
+
message=Text(
|
| 800 |
+
f"Cannot process installed package {dist} "
|
| 801 |
+
+ (f"in {installed_location!r} " if installed_location else "")
|
| 802 |
+
+ f"because it has an invalid {invalid_type}:\n{invalid_exc.args[0]}"
|
| 803 |
+
),
|
| 804 |
+
context=(
|
| 805 |
+
"Starting with pip 24.1, packages with invalid "
|
| 806 |
+
f"{invalid_type}s can not be processed."
|
| 807 |
+
),
|
| 808 |
+
hint_stmt="To proceed this package must be uninstalled.",
|
| 809 |
+
)
|
vllm/lib/python3.10/site-packages/pip/_internal/main.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List, Optional
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def main(args: Optional[List[str]] = None) -> int:
|
| 5 |
+
"""This is preserved for old console scripts that may still be referencing
|
| 6 |
+
it.
|
| 7 |
+
|
| 8 |
+
For additional details, see https://github.com/pypa/pip/issues/7498.
|
| 9 |
+
"""
|
| 10 |
+
from pip._internal.utils.entrypoints import _wrapper
|
| 11 |
+
|
| 12 |
+
return _wrapper(args)
|
vllm/lib/python3.10/site-packages/pip/_internal/pyproject.py
ADDED
|
@@ -0,0 +1,185 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import importlib.util
|
| 2 |
+
import os
|
| 3 |
+
import sys
|
| 4 |
+
from collections import namedtuple
|
| 5 |
+
from typing import Any, List, Optional
|
| 6 |
+
|
| 7 |
+
if sys.version_info >= (3, 11):
|
| 8 |
+
import tomllib
|
| 9 |
+
else:
|
| 10 |
+
from pip._vendor import tomli as tomllib
|
| 11 |
+
|
| 12 |
+
from pip._vendor.packaging.requirements import InvalidRequirement
|
| 13 |
+
|
| 14 |
+
from pip._internal.exceptions import (
|
| 15 |
+
InstallationError,
|
| 16 |
+
InvalidPyProjectBuildRequires,
|
| 17 |
+
MissingPyProjectBuildRequires,
|
| 18 |
+
)
|
| 19 |
+
from pip._internal.utils.packaging import get_requirement
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def _is_list_of_str(obj: Any) -> bool:
|
| 23 |
+
return isinstance(obj, list) and all(isinstance(item, str) for item in obj)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def make_pyproject_path(unpacked_source_directory: str) -> str:
|
| 27 |
+
return os.path.join(unpacked_source_directory, "pyproject.toml")
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
BuildSystemDetails = namedtuple(
|
| 31 |
+
"BuildSystemDetails", ["requires", "backend", "check", "backend_path"]
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def load_pyproject_toml(
|
| 36 |
+
use_pep517: Optional[bool], pyproject_toml: str, setup_py: str, req_name: str
|
| 37 |
+
) -> Optional[BuildSystemDetails]:
|
| 38 |
+
"""Load the pyproject.toml file.
|
| 39 |
+
|
| 40 |
+
Parameters:
|
| 41 |
+
use_pep517 - Has the user requested PEP 517 processing? None
|
| 42 |
+
means the user hasn't explicitly specified.
|
| 43 |
+
pyproject_toml - Location of the project's pyproject.toml file
|
| 44 |
+
setup_py - Location of the project's setup.py file
|
| 45 |
+
req_name - The name of the requirement we're processing (for
|
| 46 |
+
error reporting)
|
| 47 |
+
|
| 48 |
+
Returns:
|
| 49 |
+
None if we should use the legacy code path, otherwise a tuple
|
| 50 |
+
(
|
| 51 |
+
requirements from pyproject.toml,
|
| 52 |
+
name of PEP 517 backend,
|
| 53 |
+
requirements we should check are installed after setting
|
| 54 |
+
up the build environment
|
| 55 |
+
directory paths to import the backend from (backend-path),
|
| 56 |
+
relative to the project root.
|
| 57 |
+
)
|
| 58 |
+
"""
|
| 59 |
+
has_pyproject = os.path.isfile(pyproject_toml)
|
| 60 |
+
has_setup = os.path.isfile(setup_py)
|
| 61 |
+
|
| 62 |
+
if not has_pyproject and not has_setup:
|
| 63 |
+
raise InstallationError(
|
| 64 |
+
f"{req_name} does not appear to be a Python project: "
|
| 65 |
+
f"neither 'setup.py' nor 'pyproject.toml' found."
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
if has_pyproject:
|
| 69 |
+
with open(pyproject_toml, encoding="utf-8") as f:
|
| 70 |
+
pp_toml = tomllib.loads(f.read())
|
| 71 |
+
build_system = pp_toml.get("build-system")
|
| 72 |
+
else:
|
| 73 |
+
build_system = None
|
| 74 |
+
|
| 75 |
+
# The following cases must use PEP 517
|
| 76 |
+
# We check for use_pep517 being non-None and falsy because that means
|
| 77 |
+
# the user explicitly requested --no-use-pep517. The value 0 as
|
| 78 |
+
# opposed to False can occur when the value is provided via an
|
| 79 |
+
# environment variable or config file option (due to the quirk of
|
| 80 |
+
# strtobool() returning an integer in pip's configuration code).
|
| 81 |
+
if has_pyproject and not has_setup:
|
| 82 |
+
if use_pep517 is not None and not use_pep517:
|
| 83 |
+
raise InstallationError(
|
| 84 |
+
"Disabling PEP 517 processing is invalid: "
|
| 85 |
+
"project does not have a setup.py"
|
| 86 |
+
)
|
| 87 |
+
use_pep517 = True
|
| 88 |
+
elif build_system and "build-backend" in build_system:
|
| 89 |
+
if use_pep517 is not None and not use_pep517:
|
| 90 |
+
raise InstallationError(
|
| 91 |
+
"Disabling PEP 517 processing is invalid: "
|
| 92 |
+
"project specifies a build backend of {} "
|
| 93 |
+
"in pyproject.toml".format(build_system["build-backend"])
|
| 94 |
+
)
|
| 95 |
+
use_pep517 = True
|
| 96 |
+
|
| 97 |
+
# If we haven't worked out whether to use PEP 517 yet,
|
| 98 |
+
# and the user hasn't explicitly stated a preference,
|
| 99 |
+
# we do so if the project has a pyproject.toml file
|
| 100 |
+
# or if we cannot import setuptools or wheels.
|
| 101 |
+
|
| 102 |
+
# We fallback to PEP 517 when without setuptools or without the wheel package,
|
| 103 |
+
# so setuptools can be installed as a default build backend.
|
| 104 |
+
# For more info see:
|
| 105 |
+
# https://discuss.python.org/t/pip-without-setuptools-could-the-experience-be-improved/11810/9
|
| 106 |
+
# https://github.com/pypa/pip/issues/8559
|
| 107 |
+
elif use_pep517 is None:
|
| 108 |
+
use_pep517 = (
|
| 109 |
+
has_pyproject
|
| 110 |
+
or not importlib.util.find_spec("setuptools")
|
| 111 |
+
or not importlib.util.find_spec("wheel")
|
| 112 |
+
)
|
| 113 |
+
|
| 114 |
+
# At this point, we know whether we're going to use PEP 517.
|
| 115 |
+
assert use_pep517 is not None
|
| 116 |
+
|
| 117 |
+
# If we're using the legacy code path, there is nothing further
|
| 118 |
+
# for us to do here.
|
| 119 |
+
if not use_pep517:
|
| 120 |
+
return None
|
| 121 |
+
|
| 122 |
+
if build_system is None:
|
| 123 |
+
# Either the user has a pyproject.toml with no build-system
|
| 124 |
+
# section, or the user has no pyproject.toml, but has opted in
|
| 125 |
+
# explicitly via --use-pep517.
|
| 126 |
+
# In the absence of any explicit backend specification, we
|
| 127 |
+
# assume the setuptools backend that most closely emulates the
|
| 128 |
+
# traditional direct setup.py execution, and require wheel and
|
| 129 |
+
# a version of setuptools that supports that backend.
|
| 130 |
+
|
| 131 |
+
build_system = {
|
| 132 |
+
"requires": ["setuptools>=40.8.0"],
|
| 133 |
+
"build-backend": "setuptools.build_meta:__legacy__",
|
| 134 |
+
}
|
| 135 |
+
|
| 136 |
+
# If we're using PEP 517, we have build system information (either
|
| 137 |
+
# from pyproject.toml, or defaulted by the code above).
|
| 138 |
+
# Note that at this point, we do not know if the user has actually
|
| 139 |
+
# specified a backend, though.
|
| 140 |
+
assert build_system is not None
|
| 141 |
+
|
| 142 |
+
# Ensure that the build-system section in pyproject.toml conforms
|
| 143 |
+
# to PEP 518.
|
| 144 |
+
|
| 145 |
+
# Specifying the build-system table but not the requires key is invalid
|
| 146 |
+
if "requires" not in build_system:
|
| 147 |
+
raise MissingPyProjectBuildRequires(package=req_name)
|
| 148 |
+
|
| 149 |
+
# Error out if requires is not a list of strings
|
| 150 |
+
requires = build_system["requires"]
|
| 151 |
+
if not _is_list_of_str(requires):
|
| 152 |
+
raise InvalidPyProjectBuildRequires(
|
| 153 |
+
package=req_name,
|
| 154 |
+
reason="It is not a list of strings.",
|
| 155 |
+
)
|
| 156 |
+
|
| 157 |
+
# Each requirement must be valid as per PEP 508
|
| 158 |
+
for requirement in requires:
|
| 159 |
+
try:
|
| 160 |
+
get_requirement(requirement)
|
| 161 |
+
except InvalidRequirement as error:
|
| 162 |
+
raise InvalidPyProjectBuildRequires(
|
| 163 |
+
package=req_name,
|
| 164 |
+
reason=f"It contains an invalid requirement: {requirement!r}",
|
| 165 |
+
) from error
|
| 166 |
+
|
| 167 |
+
backend = build_system.get("build-backend")
|
| 168 |
+
backend_path = build_system.get("backend-path", [])
|
| 169 |
+
check: List[str] = []
|
| 170 |
+
if backend is None:
|
| 171 |
+
# If the user didn't specify a backend, we assume they want to use
|
| 172 |
+
# the setuptools backend. But we can't be sure they have included
|
| 173 |
+
# a version of setuptools which supplies the backend. So we
|
| 174 |
+
# make a note to check that this requirement is present once
|
| 175 |
+
# we have set up the environment.
|
| 176 |
+
# This is quite a lot of work to check for a very specific case. But
|
| 177 |
+
# the problem is, that case is potentially quite common - projects that
|
| 178 |
+
# adopted PEP 518 early for the ability to specify requirements to
|
| 179 |
+
# execute setup.py, but never considered needing to mention the build
|
| 180 |
+
# tools themselves. The original PEP 518 code had a similar check (but
|
| 181 |
+
# implemented in a different way).
|
| 182 |
+
backend = "setuptools.build_meta:__legacy__"
|
| 183 |
+
check = ["setuptools>=40.8.0"]
|
| 184 |
+
|
| 185 |
+
return BuildSystemDetails(requires, backend, check, backend_path)
|
vllm/lib/python3.10/site-packages/pip/_internal/req/__init__.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
import logging
|
| 3 |
+
from dataclasses import dataclass
|
| 4 |
+
from typing import Generator, List, Optional, Sequence, Tuple
|
| 5 |
+
|
| 6 |
+
from pip._internal.utils.logging import indent_log
|
| 7 |
+
|
| 8 |
+
from .req_file import parse_requirements
|
| 9 |
+
from .req_install import InstallRequirement
|
| 10 |
+
from .req_set import RequirementSet
|
| 11 |
+
|
| 12 |
+
__all__ = [
|
| 13 |
+
"RequirementSet",
|
| 14 |
+
"InstallRequirement",
|
| 15 |
+
"parse_requirements",
|
| 16 |
+
"install_given_reqs",
|
| 17 |
+
]
|
| 18 |
+
|
| 19 |
+
logger = logging.getLogger(__name__)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
@dataclass(frozen=True)
|
| 23 |
+
class InstallationResult:
|
| 24 |
+
name: str
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def _validate_requirements(
|
| 28 |
+
requirements: List[InstallRequirement],
|
| 29 |
+
) -> Generator[Tuple[str, InstallRequirement], None, None]:
|
| 30 |
+
for req in requirements:
|
| 31 |
+
assert req.name, f"invalid to-be-installed requirement: {req}"
|
| 32 |
+
yield req.name, req
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def install_given_reqs(
|
| 36 |
+
requirements: List[InstallRequirement],
|
| 37 |
+
global_options: Sequence[str],
|
| 38 |
+
root: Optional[str],
|
| 39 |
+
home: Optional[str],
|
| 40 |
+
prefix: Optional[str],
|
| 41 |
+
warn_script_location: bool,
|
| 42 |
+
use_user_site: bool,
|
| 43 |
+
pycompile: bool,
|
| 44 |
+
) -> List[InstallationResult]:
|
| 45 |
+
"""
|
| 46 |
+
Install everything in the given list.
|
| 47 |
+
|
| 48 |
+
(to be called after having downloaded and unpacked the packages)
|
| 49 |
+
"""
|
| 50 |
+
to_install = collections.OrderedDict(_validate_requirements(requirements))
|
| 51 |
+
|
| 52 |
+
if to_install:
|
| 53 |
+
logger.info(
|
| 54 |
+
"Installing collected packages: %s",
|
| 55 |
+
", ".join(to_install.keys()),
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
installed = []
|
| 59 |
+
|
| 60 |
+
with indent_log():
|
| 61 |
+
for req_name, requirement in to_install.items():
|
| 62 |
+
if requirement.should_reinstall:
|
| 63 |
+
logger.info("Attempting uninstall: %s", req_name)
|
| 64 |
+
with indent_log():
|
| 65 |
+
uninstalled_pathset = requirement.uninstall(auto_confirm=True)
|
| 66 |
+
else:
|
| 67 |
+
uninstalled_pathset = None
|
| 68 |
+
|
| 69 |
+
try:
|
| 70 |
+
requirement.install(
|
| 71 |
+
global_options,
|
| 72 |
+
root=root,
|
| 73 |
+
home=home,
|
| 74 |
+
prefix=prefix,
|
| 75 |
+
warn_script_location=warn_script_location,
|
| 76 |
+
use_user_site=use_user_site,
|
| 77 |
+
pycompile=pycompile,
|
| 78 |
+
)
|
| 79 |
+
except Exception:
|
| 80 |
+
# if install did not succeed, rollback previous uninstall
|
| 81 |
+
if uninstalled_pathset and not requirement.install_succeeded:
|
| 82 |
+
uninstalled_pathset.rollback()
|
| 83 |
+
raise
|
| 84 |
+
else:
|
| 85 |
+
if uninstalled_pathset and requirement.install_succeeded:
|
| 86 |
+
uninstalled_pathset.commit()
|
| 87 |
+
|
| 88 |
+
installed.append(InstallationResult(req_name))
|
| 89 |
+
|
| 90 |
+
return installed
|
vllm/lib/python3.10/site-packages/pip/_internal/req/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (2.31 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pip/_internal/req/__pycache__/constructors.cpython-310.pyc
ADDED
|
Binary file (13.9 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_file.cpython-310.pyc
ADDED
|
Binary file (15.4 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_install.cpython-310.pyc
ADDED
|
Binary file (24.8 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_set.cpython-310.pyc
ADDED
|
Binary file (3.89 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-310.pyc
ADDED
|
Binary file (18.7 kB). View file
|
|
|
vllm/lib/python3.10/site-packages/pip/_internal/req/constructors.py
ADDED
|
@@ -0,0 +1,560 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Backing implementation for InstallRequirement's various constructors
|
| 2 |
+
|
| 3 |
+
The idea here is that these formed a major chunk of InstallRequirement's size
|
| 4 |
+
so, moving them and support code dedicated to them outside of that class
|
| 5 |
+
helps creates for better understandability for the rest of the code.
|
| 6 |
+
|
| 7 |
+
These are meant to be used elsewhere within pip to create instances of
|
| 8 |
+
InstallRequirement.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import copy
|
| 12 |
+
import logging
|
| 13 |
+
import os
|
| 14 |
+
import re
|
| 15 |
+
from dataclasses import dataclass
|
| 16 |
+
from typing import Collection, Dict, List, Optional, Set, Tuple, Union
|
| 17 |
+
|
| 18 |
+
from pip._vendor.packaging.markers import Marker
|
| 19 |
+
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
| 20 |
+
from pip._vendor.packaging.specifiers import Specifier
|
| 21 |
+
|
| 22 |
+
from pip._internal.exceptions import InstallationError
|
| 23 |
+
from pip._internal.models.index import PyPI, TestPyPI
|
| 24 |
+
from pip._internal.models.link import Link
|
| 25 |
+
from pip._internal.models.wheel import Wheel
|
| 26 |
+
from pip._internal.req.req_file import ParsedRequirement
|
| 27 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 28 |
+
from pip._internal.utils.filetypes import is_archive_file
|
| 29 |
+
from pip._internal.utils.misc import is_installable_dir
|
| 30 |
+
from pip._internal.utils.packaging import get_requirement
|
| 31 |
+
from pip._internal.utils.urls import path_to_url
|
| 32 |
+
from pip._internal.vcs import is_url, vcs
|
| 33 |
+
|
| 34 |
+
__all__ = [
|
| 35 |
+
"install_req_from_editable",
|
| 36 |
+
"install_req_from_line",
|
| 37 |
+
"parse_editable",
|
| 38 |
+
]
|
| 39 |
+
|
| 40 |
+
logger = logging.getLogger(__name__)
|
| 41 |
+
operators = Specifier._operators.keys()
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def _strip_extras(path: str) -> Tuple[str, Optional[str]]:
|
| 45 |
+
m = re.match(r"^(.+)(\[[^\]]+\])$", path)
|
| 46 |
+
extras = None
|
| 47 |
+
if m:
|
| 48 |
+
path_no_extras = m.group(1)
|
| 49 |
+
extras = m.group(2)
|
| 50 |
+
else:
|
| 51 |
+
path_no_extras = path
|
| 52 |
+
|
| 53 |
+
return path_no_extras, extras
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def convert_extras(extras: Optional[str]) -> Set[str]:
|
| 57 |
+
if not extras:
|
| 58 |
+
return set()
|
| 59 |
+
return get_requirement("placeholder" + extras.lower()).extras
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def _set_requirement_extras(req: Requirement, new_extras: Set[str]) -> Requirement:
|
| 63 |
+
"""
|
| 64 |
+
Returns a new requirement based on the given one, with the supplied extras. If the
|
| 65 |
+
given requirement already has extras those are replaced (or dropped if no new extras
|
| 66 |
+
are given).
|
| 67 |
+
"""
|
| 68 |
+
match: Optional[re.Match[str]] = re.fullmatch(
|
| 69 |
+
# see https://peps.python.org/pep-0508/#complete-grammar
|
| 70 |
+
r"([\w\t .-]+)(\[[^\]]*\])?(.*)",
|
| 71 |
+
str(req),
|
| 72 |
+
flags=re.ASCII,
|
| 73 |
+
)
|
| 74 |
+
# ireq.req is a valid requirement so the regex should always match
|
| 75 |
+
assert (
|
| 76 |
+
match is not None
|
| 77 |
+
), f"regex match on requirement {req} failed, this should never happen"
|
| 78 |
+
pre: Optional[str] = match.group(1)
|
| 79 |
+
post: Optional[str] = match.group(3)
|
| 80 |
+
assert (
|
| 81 |
+
pre is not None and post is not None
|
| 82 |
+
), f"regex group selection for requirement {req} failed, this should never happen"
|
| 83 |
+
extras: str = "[{}]".format(",".join(sorted(new_extras)) if new_extras else "")
|
| 84 |
+
return get_requirement(f"{pre}{extras}{post}")
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
|
| 88 |
+
"""Parses an editable requirement into:
|
| 89 |
+
- a requirement name
|
| 90 |
+
- an URL
|
| 91 |
+
- extras
|
| 92 |
+
- editable options
|
| 93 |
+
Accepted requirements:
|
| 94 |
+
svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
|
| 95 |
+
.[some_extra]
|
| 96 |
+
"""
|
| 97 |
+
|
| 98 |
+
url = editable_req
|
| 99 |
+
|
| 100 |
+
# If a file path is specified with extras, strip off the extras.
|
| 101 |
+
url_no_extras, extras = _strip_extras(url)
|
| 102 |
+
|
| 103 |
+
if os.path.isdir(url_no_extras):
|
| 104 |
+
# Treating it as code that has already been checked out
|
| 105 |
+
url_no_extras = path_to_url(url_no_extras)
|
| 106 |
+
|
| 107 |
+
if url_no_extras.lower().startswith("file:"):
|
| 108 |
+
package_name = Link(url_no_extras).egg_fragment
|
| 109 |
+
if extras:
|
| 110 |
+
return (
|
| 111 |
+
package_name,
|
| 112 |
+
url_no_extras,
|
| 113 |
+
get_requirement("placeholder" + extras.lower()).extras,
|
| 114 |
+
)
|
| 115 |
+
else:
|
| 116 |
+
return package_name, url_no_extras, set()
|
| 117 |
+
|
| 118 |
+
for version_control in vcs:
|
| 119 |
+
if url.lower().startswith(f"{version_control}:"):
|
| 120 |
+
url = f"{version_control}+{url}"
|
| 121 |
+
break
|
| 122 |
+
|
| 123 |
+
link = Link(url)
|
| 124 |
+
|
| 125 |
+
if not link.is_vcs:
|
| 126 |
+
backends = ", ".join(vcs.all_schemes)
|
| 127 |
+
raise InstallationError(
|
| 128 |
+
f"{editable_req} is not a valid editable requirement. "
|
| 129 |
+
f"It should either be a path to a local project or a VCS URL "
|
| 130 |
+
f"(beginning with {backends})."
|
| 131 |
+
)
|
| 132 |
+
|
| 133 |
+
package_name = link.egg_fragment
|
| 134 |
+
if not package_name:
|
| 135 |
+
raise InstallationError(
|
| 136 |
+
f"Could not detect requirement name for '{editable_req}', "
|
| 137 |
+
"please specify one with #egg=your_package_name"
|
| 138 |
+
)
|
| 139 |
+
return package_name, url, set()
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def check_first_requirement_in_file(filename: str) -> None:
|
| 143 |
+
"""Check if file is parsable as a requirements file.
|
| 144 |
+
|
| 145 |
+
This is heavily based on ``pkg_resources.parse_requirements``, but
|
| 146 |
+
simplified to just check the first meaningful line.
|
| 147 |
+
|
| 148 |
+
:raises InvalidRequirement: If the first meaningful line cannot be parsed
|
| 149 |
+
as an requirement.
|
| 150 |
+
"""
|
| 151 |
+
with open(filename, encoding="utf-8", errors="ignore") as f:
|
| 152 |
+
# Create a steppable iterator, so we can handle \-continuations.
|
| 153 |
+
lines = (
|
| 154 |
+
line
|
| 155 |
+
for line in (line.strip() for line in f)
|
| 156 |
+
if line and not line.startswith("#") # Skip blank lines/comments.
|
| 157 |
+
)
|
| 158 |
+
|
| 159 |
+
for line in lines:
|
| 160 |
+
# Drop comments -- a hash without a space may be in a URL.
|
| 161 |
+
if " #" in line:
|
| 162 |
+
line = line[: line.find(" #")]
|
| 163 |
+
# If there is a line continuation, drop it, and append the next line.
|
| 164 |
+
if line.endswith("\\"):
|
| 165 |
+
line = line[:-2].strip() + next(lines, "")
|
| 166 |
+
get_requirement(line)
|
| 167 |
+
return
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
def deduce_helpful_msg(req: str) -> str:
|
| 171 |
+
"""Returns helpful msg in case requirements file does not exist,
|
| 172 |
+
or cannot be parsed.
|
| 173 |
+
|
| 174 |
+
:params req: Requirements file path
|
| 175 |
+
"""
|
| 176 |
+
if not os.path.exists(req):
|
| 177 |
+
return f" File '{req}' does not exist."
|
| 178 |
+
msg = " The path does exist. "
|
| 179 |
+
# Try to parse and check if it is a requirements file.
|
| 180 |
+
try:
|
| 181 |
+
check_first_requirement_in_file(req)
|
| 182 |
+
except InvalidRequirement:
|
| 183 |
+
logger.debug("Cannot parse '%s' as requirements file", req)
|
| 184 |
+
else:
|
| 185 |
+
msg += (
|
| 186 |
+
f"The argument you provided "
|
| 187 |
+
f"({req}) appears to be a"
|
| 188 |
+
f" requirements file. If that is the"
|
| 189 |
+
f" case, use the '-r' flag to install"
|
| 190 |
+
f" the packages specified within it."
|
| 191 |
+
)
|
| 192 |
+
return msg
|
| 193 |
+
|
| 194 |
+
|
| 195 |
+
@dataclass(frozen=True)
|
| 196 |
+
class RequirementParts:
|
| 197 |
+
requirement: Optional[Requirement]
|
| 198 |
+
link: Optional[Link]
|
| 199 |
+
markers: Optional[Marker]
|
| 200 |
+
extras: Set[str]
|
| 201 |
+
|
| 202 |
+
|
| 203 |
+
def parse_req_from_editable(editable_req: str) -> RequirementParts:
|
| 204 |
+
name, url, extras_override = parse_editable(editable_req)
|
| 205 |
+
|
| 206 |
+
if name is not None:
|
| 207 |
+
try:
|
| 208 |
+
req: Optional[Requirement] = get_requirement(name)
|
| 209 |
+
except InvalidRequirement as exc:
|
| 210 |
+
raise InstallationError(f"Invalid requirement: {name!r}: {exc}")
|
| 211 |
+
else:
|
| 212 |
+
req = None
|
| 213 |
+
|
| 214 |
+
link = Link(url)
|
| 215 |
+
|
| 216 |
+
return RequirementParts(req, link, None, extras_override)
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
# ---- The actual constructors follow ----
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
def install_req_from_editable(
|
| 223 |
+
editable_req: str,
|
| 224 |
+
comes_from: Optional[Union[InstallRequirement, str]] = None,
|
| 225 |
+
*,
|
| 226 |
+
use_pep517: Optional[bool] = None,
|
| 227 |
+
isolated: bool = False,
|
| 228 |
+
global_options: Optional[List[str]] = None,
|
| 229 |
+
hash_options: Optional[Dict[str, List[str]]] = None,
|
| 230 |
+
constraint: bool = False,
|
| 231 |
+
user_supplied: bool = False,
|
| 232 |
+
permit_editable_wheels: bool = False,
|
| 233 |
+
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
|
| 234 |
+
) -> InstallRequirement:
|
| 235 |
+
parts = parse_req_from_editable(editable_req)
|
| 236 |
+
|
| 237 |
+
return InstallRequirement(
|
| 238 |
+
parts.requirement,
|
| 239 |
+
comes_from=comes_from,
|
| 240 |
+
user_supplied=user_supplied,
|
| 241 |
+
editable=True,
|
| 242 |
+
permit_editable_wheels=permit_editable_wheels,
|
| 243 |
+
link=parts.link,
|
| 244 |
+
constraint=constraint,
|
| 245 |
+
use_pep517=use_pep517,
|
| 246 |
+
isolated=isolated,
|
| 247 |
+
global_options=global_options,
|
| 248 |
+
hash_options=hash_options,
|
| 249 |
+
config_settings=config_settings,
|
| 250 |
+
extras=parts.extras,
|
| 251 |
+
)
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
def _looks_like_path(name: str) -> bool:
|
| 255 |
+
"""Checks whether the string "looks like" a path on the filesystem.
|
| 256 |
+
|
| 257 |
+
This does not check whether the target actually exists, only judge from the
|
| 258 |
+
appearance.
|
| 259 |
+
|
| 260 |
+
Returns true if any of the following conditions is true:
|
| 261 |
+
* a path separator is found (either os.path.sep or os.path.altsep);
|
| 262 |
+
* a dot is found (which represents the current directory).
|
| 263 |
+
"""
|
| 264 |
+
if os.path.sep in name:
|
| 265 |
+
return True
|
| 266 |
+
if os.path.altsep is not None and os.path.altsep in name:
|
| 267 |
+
return True
|
| 268 |
+
if name.startswith("."):
|
| 269 |
+
return True
|
| 270 |
+
return False
|
| 271 |
+
|
| 272 |
+
|
| 273 |
+
def _get_url_from_path(path: str, name: str) -> Optional[str]:
|
| 274 |
+
"""
|
| 275 |
+
First, it checks whether a provided path is an installable directory. If it
|
| 276 |
+
is, returns the path.
|
| 277 |
+
|
| 278 |
+
If false, check if the path is an archive file (such as a .whl).
|
| 279 |
+
The function checks if the path is a file. If false, if the path has
|
| 280 |
+
an @, it will treat it as a PEP 440 URL requirement and return the path.
|
| 281 |
+
"""
|
| 282 |
+
if _looks_like_path(name) and os.path.isdir(path):
|
| 283 |
+
if is_installable_dir(path):
|
| 284 |
+
return path_to_url(path)
|
| 285 |
+
# TODO: The is_installable_dir test here might not be necessary
|
| 286 |
+
# now that it is done in load_pyproject_toml too.
|
| 287 |
+
raise InstallationError(
|
| 288 |
+
f"Directory {name!r} is not installable. Neither 'setup.py' "
|
| 289 |
+
"nor 'pyproject.toml' found."
|
| 290 |
+
)
|
| 291 |
+
if not is_archive_file(path):
|
| 292 |
+
return None
|
| 293 |
+
if os.path.isfile(path):
|
| 294 |
+
return path_to_url(path)
|
| 295 |
+
urlreq_parts = name.split("@", 1)
|
| 296 |
+
if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]):
|
| 297 |
+
# If the path contains '@' and the part before it does not look
|
| 298 |
+
# like a path, try to treat it as a PEP 440 URL req instead.
|
| 299 |
+
return None
|
| 300 |
+
logger.warning(
|
| 301 |
+
"Requirement %r looks like a filename, but the file does not exist",
|
| 302 |
+
name,
|
| 303 |
+
)
|
| 304 |
+
return path_to_url(path)
|
| 305 |
+
|
| 306 |
+
|
| 307 |
+
def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementParts:
|
| 308 |
+
if is_url(name):
|
| 309 |
+
marker_sep = "; "
|
| 310 |
+
else:
|
| 311 |
+
marker_sep = ";"
|
| 312 |
+
if marker_sep in name:
|
| 313 |
+
name, markers_as_string = name.split(marker_sep, 1)
|
| 314 |
+
markers_as_string = markers_as_string.strip()
|
| 315 |
+
if not markers_as_string:
|
| 316 |
+
markers = None
|
| 317 |
+
else:
|
| 318 |
+
markers = Marker(markers_as_string)
|
| 319 |
+
else:
|
| 320 |
+
markers = None
|
| 321 |
+
name = name.strip()
|
| 322 |
+
req_as_string = None
|
| 323 |
+
path = os.path.normpath(os.path.abspath(name))
|
| 324 |
+
link = None
|
| 325 |
+
extras_as_string = None
|
| 326 |
+
|
| 327 |
+
if is_url(name):
|
| 328 |
+
link = Link(name)
|
| 329 |
+
else:
|
| 330 |
+
p, extras_as_string = _strip_extras(path)
|
| 331 |
+
url = _get_url_from_path(p, name)
|
| 332 |
+
if url is not None:
|
| 333 |
+
link = Link(url)
|
| 334 |
+
|
| 335 |
+
# it's a local file, dir, or url
|
| 336 |
+
if link:
|
| 337 |
+
# Handle relative file URLs
|
| 338 |
+
if link.scheme == "file" and re.search(r"\.\./", link.url):
|
| 339 |
+
link = Link(path_to_url(os.path.normpath(os.path.abspath(link.path))))
|
| 340 |
+
# wheel file
|
| 341 |
+
if link.is_wheel:
|
| 342 |
+
wheel = Wheel(link.filename) # can raise InvalidWheelFilename
|
| 343 |
+
req_as_string = f"{wheel.name}=={wheel.version}"
|
| 344 |
+
else:
|
| 345 |
+
# set the req to the egg fragment. when it's not there, this
|
| 346 |
+
# will become an 'unnamed' requirement
|
| 347 |
+
req_as_string = link.egg_fragment
|
| 348 |
+
|
| 349 |
+
# a requirement specifier
|
| 350 |
+
else:
|
| 351 |
+
req_as_string = name
|
| 352 |
+
|
| 353 |
+
extras = convert_extras(extras_as_string)
|
| 354 |
+
|
| 355 |
+
def with_source(text: str) -> str:
|
| 356 |
+
if not line_source:
|
| 357 |
+
return text
|
| 358 |
+
return f"{text} (from {line_source})"
|
| 359 |
+
|
| 360 |
+
def _parse_req_string(req_as_string: str) -> Requirement:
|
| 361 |
+
try:
|
| 362 |
+
return get_requirement(req_as_string)
|
| 363 |
+
except InvalidRequirement as exc:
|
| 364 |
+
if os.path.sep in req_as_string:
|
| 365 |
+
add_msg = "It looks like a path."
|
| 366 |
+
add_msg += deduce_helpful_msg(req_as_string)
|
| 367 |
+
elif "=" in req_as_string and not any(
|
| 368 |
+
op in req_as_string for op in operators
|
| 369 |
+
):
|
| 370 |
+
add_msg = "= is not a valid operator. Did you mean == ?"
|
| 371 |
+
else:
|
| 372 |
+
add_msg = ""
|
| 373 |
+
msg = with_source(f"Invalid requirement: {req_as_string!r}: {exc}")
|
| 374 |
+
if add_msg:
|
| 375 |
+
msg += f"\nHint: {add_msg}"
|
| 376 |
+
raise InstallationError(msg)
|
| 377 |
+
|
| 378 |
+
if req_as_string is not None:
|
| 379 |
+
req: Optional[Requirement] = _parse_req_string(req_as_string)
|
| 380 |
+
else:
|
| 381 |
+
req = None
|
| 382 |
+
|
| 383 |
+
return RequirementParts(req, link, markers, extras)
|
| 384 |
+
|
| 385 |
+
|
| 386 |
+
def install_req_from_line(
|
| 387 |
+
name: str,
|
| 388 |
+
comes_from: Optional[Union[str, InstallRequirement]] = None,
|
| 389 |
+
*,
|
| 390 |
+
use_pep517: Optional[bool] = None,
|
| 391 |
+
isolated: bool = False,
|
| 392 |
+
global_options: Optional[List[str]] = None,
|
| 393 |
+
hash_options: Optional[Dict[str, List[str]]] = None,
|
| 394 |
+
constraint: bool = False,
|
| 395 |
+
line_source: Optional[str] = None,
|
| 396 |
+
user_supplied: bool = False,
|
| 397 |
+
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
|
| 398 |
+
) -> InstallRequirement:
|
| 399 |
+
"""Creates an InstallRequirement from a name, which might be a
|
| 400 |
+
requirement, directory containing 'setup.py', filename, or URL.
|
| 401 |
+
|
| 402 |
+
:param line_source: An optional string describing where the line is from,
|
| 403 |
+
for logging purposes in case of an error.
|
| 404 |
+
"""
|
| 405 |
+
parts = parse_req_from_line(name, line_source)
|
| 406 |
+
|
| 407 |
+
return InstallRequirement(
|
| 408 |
+
parts.requirement,
|
| 409 |
+
comes_from,
|
| 410 |
+
link=parts.link,
|
| 411 |
+
markers=parts.markers,
|
| 412 |
+
use_pep517=use_pep517,
|
| 413 |
+
isolated=isolated,
|
| 414 |
+
global_options=global_options,
|
| 415 |
+
hash_options=hash_options,
|
| 416 |
+
config_settings=config_settings,
|
| 417 |
+
constraint=constraint,
|
| 418 |
+
extras=parts.extras,
|
| 419 |
+
user_supplied=user_supplied,
|
| 420 |
+
)
|
| 421 |
+
|
| 422 |
+
|
| 423 |
+
def install_req_from_req_string(
|
| 424 |
+
req_string: str,
|
| 425 |
+
comes_from: Optional[InstallRequirement] = None,
|
| 426 |
+
isolated: bool = False,
|
| 427 |
+
use_pep517: Optional[bool] = None,
|
| 428 |
+
user_supplied: bool = False,
|
| 429 |
+
) -> InstallRequirement:
|
| 430 |
+
try:
|
| 431 |
+
req = get_requirement(req_string)
|
| 432 |
+
except InvalidRequirement as exc:
|
| 433 |
+
raise InstallationError(f"Invalid requirement: {req_string!r}: {exc}")
|
| 434 |
+
|
| 435 |
+
domains_not_allowed = [
|
| 436 |
+
PyPI.file_storage_domain,
|
| 437 |
+
TestPyPI.file_storage_domain,
|
| 438 |
+
]
|
| 439 |
+
if (
|
| 440 |
+
req.url
|
| 441 |
+
and comes_from
|
| 442 |
+
and comes_from.link
|
| 443 |
+
and comes_from.link.netloc in domains_not_allowed
|
| 444 |
+
):
|
| 445 |
+
# Explicitly disallow pypi packages that depend on external urls
|
| 446 |
+
raise InstallationError(
|
| 447 |
+
"Packages installed from PyPI cannot depend on packages "
|
| 448 |
+
"which are not also hosted on PyPI.\n"
|
| 449 |
+
f"{comes_from.name} depends on {req} "
|
| 450 |
+
)
|
| 451 |
+
|
| 452 |
+
return InstallRequirement(
|
| 453 |
+
req,
|
| 454 |
+
comes_from,
|
| 455 |
+
isolated=isolated,
|
| 456 |
+
use_pep517=use_pep517,
|
| 457 |
+
user_supplied=user_supplied,
|
| 458 |
+
)
|
| 459 |
+
|
| 460 |
+
|
| 461 |
+
def install_req_from_parsed_requirement(
|
| 462 |
+
parsed_req: ParsedRequirement,
|
| 463 |
+
isolated: bool = False,
|
| 464 |
+
use_pep517: Optional[bool] = None,
|
| 465 |
+
user_supplied: bool = False,
|
| 466 |
+
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
|
| 467 |
+
) -> InstallRequirement:
|
| 468 |
+
if parsed_req.is_editable:
|
| 469 |
+
req = install_req_from_editable(
|
| 470 |
+
parsed_req.requirement,
|
| 471 |
+
comes_from=parsed_req.comes_from,
|
| 472 |
+
use_pep517=use_pep517,
|
| 473 |
+
constraint=parsed_req.constraint,
|
| 474 |
+
isolated=isolated,
|
| 475 |
+
user_supplied=user_supplied,
|
| 476 |
+
config_settings=config_settings,
|
| 477 |
+
)
|
| 478 |
+
|
| 479 |
+
else:
|
| 480 |
+
req = install_req_from_line(
|
| 481 |
+
parsed_req.requirement,
|
| 482 |
+
comes_from=parsed_req.comes_from,
|
| 483 |
+
use_pep517=use_pep517,
|
| 484 |
+
isolated=isolated,
|
| 485 |
+
global_options=(
|
| 486 |
+
parsed_req.options.get("global_options", [])
|
| 487 |
+
if parsed_req.options
|
| 488 |
+
else []
|
| 489 |
+
),
|
| 490 |
+
hash_options=(
|
| 491 |
+
parsed_req.options.get("hashes", {}) if parsed_req.options else {}
|
| 492 |
+
),
|
| 493 |
+
constraint=parsed_req.constraint,
|
| 494 |
+
line_source=parsed_req.line_source,
|
| 495 |
+
user_supplied=user_supplied,
|
| 496 |
+
config_settings=config_settings,
|
| 497 |
+
)
|
| 498 |
+
return req
|
| 499 |
+
|
| 500 |
+
|
| 501 |
+
def install_req_from_link_and_ireq(
|
| 502 |
+
link: Link, ireq: InstallRequirement
|
| 503 |
+
) -> InstallRequirement:
|
| 504 |
+
return InstallRequirement(
|
| 505 |
+
req=ireq.req,
|
| 506 |
+
comes_from=ireq.comes_from,
|
| 507 |
+
editable=ireq.editable,
|
| 508 |
+
link=link,
|
| 509 |
+
markers=ireq.markers,
|
| 510 |
+
use_pep517=ireq.use_pep517,
|
| 511 |
+
isolated=ireq.isolated,
|
| 512 |
+
global_options=ireq.global_options,
|
| 513 |
+
hash_options=ireq.hash_options,
|
| 514 |
+
config_settings=ireq.config_settings,
|
| 515 |
+
user_supplied=ireq.user_supplied,
|
| 516 |
+
)
|
| 517 |
+
|
| 518 |
+
|
| 519 |
+
def install_req_drop_extras(ireq: InstallRequirement) -> InstallRequirement:
|
| 520 |
+
"""
|
| 521 |
+
Creates a new InstallationRequirement using the given template but without
|
| 522 |
+
any extras. Sets the original requirement as the new one's parent
|
| 523 |
+
(comes_from).
|
| 524 |
+
"""
|
| 525 |
+
return InstallRequirement(
|
| 526 |
+
req=(
|
| 527 |
+
_set_requirement_extras(ireq.req, set()) if ireq.req is not None else None
|
| 528 |
+
),
|
| 529 |
+
comes_from=ireq,
|
| 530 |
+
editable=ireq.editable,
|
| 531 |
+
link=ireq.link,
|
| 532 |
+
markers=ireq.markers,
|
| 533 |
+
use_pep517=ireq.use_pep517,
|
| 534 |
+
isolated=ireq.isolated,
|
| 535 |
+
global_options=ireq.global_options,
|
| 536 |
+
hash_options=ireq.hash_options,
|
| 537 |
+
constraint=ireq.constraint,
|
| 538 |
+
extras=[],
|
| 539 |
+
config_settings=ireq.config_settings,
|
| 540 |
+
user_supplied=ireq.user_supplied,
|
| 541 |
+
permit_editable_wheels=ireq.permit_editable_wheels,
|
| 542 |
+
)
|
| 543 |
+
|
| 544 |
+
|
| 545 |
+
def install_req_extend_extras(
|
| 546 |
+
ireq: InstallRequirement,
|
| 547 |
+
extras: Collection[str],
|
| 548 |
+
) -> InstallRequirement:
|
| 549 |
+
"""
|
| 550 |
+
Returns a copy of an installation requirement with some additional extras.
|
| 551 |
+
Makes a shallow copy of the ireq object.
|
| 552 |
+
"""
|
| 553 |
+
result = copy.copy(ireq)
|
| 554 |
+
result.extras = {*ireq.extras, *extras}
|
| 555 |
+
result.req = (
|
| 556 |
+
_set_requirement_extras(ireq.req, result.extras)
|
| 557 |
+
if ireq.req is not None
|
| 558 |
+
else None
|
| 559 |
+
)
|
| 560 |
+
return result
|
vllm/lib/python3.10/site-packages/pip/_internal/req/req_file.py
ADDED
|
@@ -0,0 +1,623 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Requirements file parsing
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import codecs
|
| 6 |
+
import locale
|
| 7 |
+
import logging
|
| 8 |
+
import optparse
|
| 9 |
+
import os
|
| 10 |
+
import re
|
| 11 |
+
import shlex
|
| 12 |
+
import sys
|
| 13 |
+
import urllib.parse
|
| 14 |
+
from dataclasses import dataclass
|
| 15 |
+
from optparse import Values
|
| 16 |
+
from typing import (
|
| 17 |
+
TYPE_CHECKING,
|
| 18 |
+
Any,
|
| 19 |
+
Callable,
|
| 20 |
+
Dict,
|
| 21 |
+
Generator,
|
| 22 |
+
Iterable,
|
| 23 |
+
List,
|
| 24 |
+
NoReturn,
|
| 25 |
+
Optional,
|
| 26 |
+
Tuple,
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
from pip._internal.cli import cmdoptions
|
| 30 |
+
from pip._internal.exceptions import InstallationError, RequirementsFileParseError
|
| 31 |
+
from pip._internal.models.search_scope import SearchScope
|
| 32 |
+
|
| 33 |
+
if TYPE_CHECKING:
|
| 34 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 35 |
+
from pip._internal.network.session import PipSession
|
| 36 |
+
|
| 37 |
+
__all__ = ["parse_requirements"]
|
| 38 |
+
|
| 39 |
+
ReqFileLines = Iterable[Tuple[int, str]]
|
| 40 |
+
|
| 41 |
+
LineParser = Callable[[str], Tuple[str, Values]]
|
| 42 |
+
|
| 43 |
+
SCHEME_RE = re.compile(r"^(http|https|file):", re.I)
|
| 44 |
+
COMMENT_RE = re.compile(r"(^|\s+)#.*$")
|
| 45 |
+
|
| 46 |
+
# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
|
| 47 |
+
# variable name consisting of only uppercase letters, digits or the '_'
|
| 48 |
+
# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
|
| 49 |
+
# 2013 Edition.
|
| 50 |
+
ENV_VAR_RE = re.compile(r"(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})")
|
| 51 |
+
|
| 52 |
+
SUPPORTED_OPTIONS: List[Callable[..., optparse.Option]] = [
|
| 53 |
+
cmdoptions.index_url,
|
| 54 |
+
cmdoptions.extra_index_url,
|
| 55 |
+
cmdoptions.no_index,
|
| 56 |
+
cmdoptions.constraints,
|
| 57 |
+
cmdoptions.requirements,
|
| 58 |
+
cmdoptions.editable,
|
| 59 |
+
cmdoptions.find_links,
|
| 60 |
+
cmdoptions.no_binary,
|
| 61 |
+
cmdoptions.only_binary,
|
| 62 |
+
cmdoptions.prefer_binary,
|
| 63 |
+
cmdoptions.require_hashes,
|
| 64 |
+
cmdoptions.pre,
|
| 65 |
+
cmdoptions.trusted_host,
|
| 66 |
+
cmdoptions.use_new_feature,
|
| 67 |
+
]
|
| 68 |
+
|
| 69 |
+
# options to be passed to requirements
|
| 70 |
+
SUPPORTED_OPTIONS_REQ: List[Callable[..., optparse.Option]] = [
|
| 71 |
+
cmdoptions.global_options,
|
| 72 |
+
cmdoptions.hash,
|
| 73 |
+
cmdoptions.config_settings,
|
| 74 |
+
]
|
| 75 |
+
|
| 76 |
+
SUPPORTED_OPTIONS_EDITABLE_REQ: List[Callable[..., optparse.Option]] = [
|
| 77 |
+
cmdoptions.config_settings,
|
| 78 |
+
]
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
# the 'dest' string values
|
| 82 |
+
SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
|
| 83 |
+
SUPPORTED_OPTIONS_EDITABLE_REQ_DEST = [
|
| 84 |
+
str(o().dest) for o in SUPPORTED_OPTIONS_EDITABLE_REQ
|
| 85 |
+
]
|
| 86 |
+
|
| 87 |
+
# order of BOMS is important: codecs.BOM_UTF16_LE is a prefix of codecs.BOM_UTF32_LE
|
| 88 |
+
# so data.startswith(BOM_UTF16_LE) would be true for UTF32_LE data
|
| 89 |
+
BOMS: List[Tuple[bytes, str]] = [
|
| 90 |
+
(codecs.BOM_UTF8, "utf-8"),
|
| 91 |
+
(codecs.BOM_UTF32, "utf-32"),
|
| 92 |
+
(codecs.BOM_UTF32_BE, "utf-32-be"),
|
| 93 |
+
(codecs.BOM_UTF32_LE, "utf-32-le"),
|
| 94 |
+
(codecs.BOM_UTF16, "utf-16"),
|
| 95 |
+
(codecs.BOM_UTF16_BE, "utf-16-be"),
|
| 96 |
+
(codecs.BOM_UTF16_LE, "utf-16-le"),
|
| 97 |
+
]
|
| 98 |
+
|
| 99 |
+
PEP263_ENCODING_RE = re.compile(rb"coding[:=]\s*([-\w.]+)")
|
| 100 |
+
DEFAULT_ENCODING = "utf-8"
|
| 101 |
+
|
| 102 |
+
logger = logging.getLogger(__name__)
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
@dataclass(frozen=True)
|
| 106 |
+
class ParsedRequirement:
|
| 107 |
+
# TODO: replace this with slots=True when dropping Python 3.9 support.
|
| 108 |
+
__slots__ = (
|
| 109 |
+
"requirement",
|
| 110 |
+
"is_editable",
|
| 111 |
+
"comes_from",
|
| 112 |
+
"constraint",
|
| 113 |
+
"options",
|
| 114 |
+
"line_source",
|
| 115 |
+
)
|
| 116 |
+
|
| 117 |
+
requirement: str
|
| 118 |
+
is_editable: bool
|
| 119 |
+
comes_from: str
|
| 120 |
+
constraint: bool
|
| 121 |
+
options: Optional[Dict[str, Any]]
|
| 122 |
+
line_source: Optional[str]
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
@dataclass(frozen=True)
|
| 126 |
+
class ParsedLine:
|
| 127 |
+
__slots__ = ("filename", "lineno", "args", "opts", "constraint")
|
| 128 |
+
|
| 129 |
+
filename: str
|
| 130 |
+
lineno: int
|
| 131 |
+
args: str
|
| 132 |
+
opts: Values
|
| 133 |
+
constraint: bool
|
| 134 |
+
|
| 135 |
+
@property
|
| 136 |
+
def is_editable(self) -> bool:
|
| 137 |
+
return bool(self.opts.editables)
|
| 138 |
+
|
| 139 |
+
@property
|
| 140 |
+
def requirement(self) -> Optional[str]:
|
| 141 |
+
if self.args:
|
| 142 |
+
return self.args
|
| 143 |
+
elif self.is_editable:
|
| 144 |
+
# We don't support multiple -e on one line
|
| 145 |
+
return self.opts.editables[0]
|
| 146 |
+
return None
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
def parse_requirements(
|
| 150 |
+
filename: str,
|
| 151 |
+
session: "PipSession",
|
| 152 |
+
finder: Optional["PackageFinder"] = None,
|
| 153 |
+
options: Optional[optparse.Values] = None,
|
| 154 |
+
constraint: bool = False,
|
| 155 |
+
) -> Generator[ParsedRequirement, None, None]:
|
| 156 |
+
"""Parse a requirements file and yield ParsedRequirement instances.
|
| 157 |
+
|
| 158 |
+
:param filename: Path or url of requirements file.
|
| 159 |
+
:param session: PipSession instance.
|
| 160 |
+
:param finder: Instance of pip.index.PackageFinder.
|
| 161 |
+
:param options: cli options.
|
| 162 |
+
:param constraint: If true, parsing a constraint file rather than
|
| 163 |
+
requirements file.
|
| 164 |
+
"""
|
| 165 |
+
line_parser = get_line_parser(finder)
|
| 166 |
+
parser = RequirementsFileParser(session, line_parser)
|
| 167 |
+
|
| 168 |
+
for parsed_line in parser.parse(filename, constraint):
|
| 169 |
+
parsed_req = handle_line(
|
| 170 |
+
parsed_line, options=options, finder=finder, session=session
|
| 171 |
+
)
|
| 172 |
+
if parsed_req is not None:
|
| 173 |
+
yield parsed_req
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
def preprocess(content: str) -> ReqFileLines:
|
| 177 |
+
"""Split, filter, and join lines, and return a line iterator
|
| 178 |
+
|
| 179 |
+
:param content: the content of the requirements file
|
| 180 |
+
"""
|
| 181 |
+
lines_enum: ReqFileLines = enumerate(content.splitlines(), start=1)
|
| 182 |
+
lines_enum = join_lines(lines_enum)
|
| 183 |
+
lines_enum = ignore_comments(lines_enum)
|
| 184 |
+
lines_enum = expand_env_variables(lines_enum)
|
| 185 |
+
return lines_enum
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
def handle_requirement_line(
|
| 189 |
+
line: ParsedLine,
|
| 190 |
+
options: Optional[optparse.Values] = None,
|
| 191 |
+
) -> ParsedRequirement:
|
| 192 |
+
# preserve for the nested code path
|
| 193 |
+
line_comes_from = "{} {} (line {})".format(
|
| 194 |
+
"-c" if line.constraint else "-r",
|
| 195 |
+
line.filename,
|
| 196 |
+
line.lineno,
|
| 197 |
+
)
|
| 198 |
+
|
| 199 |
+
assert line.requirement is not None
|
| 200 |
+
|
| 201 |
+
# get the options that apply to requirements
|
| 202 |
+
if line.is_editable:
|
| 203 |
+
supported_dest = SUPPORTED_OPTIONS_EDITABLE_REQ_DEST
|
| 204 |
+
else:
|
| 205 |
+
supported_dest = SUPPORTED_OPTIONS_REQ_DEST
|
| 206 |
+
req_options = {}
|
| 207 |
+
for dest in supported_dest:
|
| 208 |
+
if dest in line.opts.__dict__ and line.opts.__dict__[dest]:
|
| 209 |
+
req_options[dest] = line.opts.__dict__[dest]
|
| 210 |
+
|
| 211 |
+
line_source = f"line {line.lineno} of {line.filename}"
|
| 212 |
+
return ParsedRequirement(
|
| 213 |
+
requirement=line.requirement,
|
| 214 |
+
is_editable=line.is_editable,
|
| 215 |
+
comes_from=line_comes_from,
|
| 216 |
+
constraint=line.constraint,
|
| 217 |
+
options=req_options,
|
| 218 |
+
line_source=line_source,
|
| 219 |
+
)
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
def handle_option_line(
|
| 223 |
+
opts: Values,
|
| 224 |
+
filename: str,
|
| 225 |
+
lineno: int,
|
| 226 |
+
finder: Optional["PackageFinder"] = None,
|
| 227 |
+
options: Optional[optparse.Values] = None,
|
| 228 |
+
session: Optional["PipSession"] = None,
|
| 229 |
+
) -> None:
|
| 230 |
+
if opts.hashes:
|
| 231 |
+
logger.warning(
|
| 232 |
+
"%s line %s has --hash but no requirement, and will be ignored.",
|
| 233 |
+
filename,
|
| 234 |
+
lineno,
|
| 235 |
+
)
|
| 236 |
+
|
| 237 |
+
if options:
|
| 238 |
+
# percolate options upward
|
| 239 |
+
if opts.require_hashes:
|
| 240 |
+
options.require_hashes = opts.require_hashes
|
| 241 |
+
if opts.features_enabled:
|
| 242 |
+
options.features_enabled.extend(
|
| 243 |
+
f for f in opts.features_enabled if f not in options.features_enabled
|
| 244 |
+
)
|
| 245 |
+
|
| 246 |
+
# set finder options
|
| 247 |
+
if finder:
|
| 248 |
+
find_links = finder.find_links
|
| 249 |
+
index_urls = finder.index_urls
|
| 250 |
+
no_index = finder.search_scope.no_index
|
| 251 |
+
if opts.no_index is True:
|
| 252 |
+
no_index = True
|
| 253 |
+
index_urls = []
|
| 254 |
+
if opts.index_url and not no_index:
|
| 255 |
+
index_urls = [opts.index_url]
|
| 256 |
+
if opts.extra_index_urls and not no_index:
|
| 257 |
+
index_urls.extend(opts.extra_index_urls)
|
| 258 |
+
if opts.find_links:
|
| 259 |
+
# FIXME: it would be nice to keep track of the source
|
| 260 |
+
# of the find_links: support a find-links local path
|
| 261 |
+
# relative to a requirements file.
|
| 262 |
+
value = opts.find_links[0]
|
| 263 |
+
req_dir = os.path.dirname(os.path.abspath(filename))
|
| 264 |
+
relative_to_reqs_file = os.path.join(req_dir, value)
|
| 265 |
+
if os.path.exists(relative_to_reqs_file):
|
| 266 |
+
value = relative_to_reqs_file
|
| 267 |
+
find_links.append(value)
|
| 268 |
+
|
| 269 |
+
if session:
|
| 270 |
+
# We need to update the auth urls in session
|
| 271 |
+
session.update_index_urls(index_urls)
|
| 272 |
+
|
| 273 |
+
search_scope = SearchScope(
|
| 274 |
+
find_links=find_links,
|
| 275 |
+
index_urls=index_urls,
|
| 276 |
+
no_index=no_index,
|
| 277 |
+
)
|
| 278 |
+
finder.search_scope = search_scope
|
| 279 |
+
|
| 280 |
+
if opts.pre:
|
| 281 |
+
finder.set_allow_all_prereleases()
|
| 282 |
+
|
| 283 |
+
if opts.prefer_binary:
|
| 284 |
+
finder.set_prefer_binary()
|
| 285 |
+
|
| 286 |
+
if session:
|
| 287 |
+
for host in opts.trusted_hosts or []:
|
| 288 |
+
source = f"line {lineno} of {filename}"
|
| 289 |
+
session.add_trusted_host(host, source=source)
|
| 290 |
+
|
| 291 |
+
|
| 292 |
+
def handle_line(
|
| 293 |
+
line: ParsedLine,
|
| 294 |
+
options: Optional[optparse.Values] = None,
|
| 295 |
+
finder: Optional["PackageFinder"] = None,
|
| 296 |
+
session: Optional["PipSession"] = None,
|
| 297 |
+
) -> Optional[ParsedRequirement]:
|
| 298 |
+
"""Handle a single parsed requirements line; This can result in
|
| 299 |
+
creating/yielding requirements, or updating the finder.
|
| 300 |
+
|
| 301 |
+
:param line: The parsed line to be processed.
|
| 302 |
+
:param options: CLI options.
|
| 303 |
+
:param finder: The finder - updated by non-requirement lines.
|
| 304 |
+
:param session: The session - updated by non-requirement lines.
|
| 305 |
+
|
| 306 |
+
Returns a ParsedRequirement object if the line is a requirement line,
|
| 307 |
+
otherwise returns None.
|
| 308 |
+
|
| 309 |
+
For lines that contain requirements, the only options that have an effect
|
| 310 |
+
are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
|
| 311 |
+
requirement. Other options from SUPPORTED_OPTIONS may be present, but are
|
| 312 |
+
ignored.
|
| 313 |
+
|
| 314 |
+
For lines that do not contain requirements, the only options that have an
|
| 315 |
+
effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
|
| 316 |
+
be present, but are ignored. These lines may contain multiple options
|
| 317 |
+
(although our docs imply only one is supported), and all our parsed and
|
| 318 |
+
affect the finder.
|
| 319 |
+
"""
|
| 320 |
+
|
| 321 |
+
if line.requirement is not None:
|
| 322 |
+
parsed_req = handle_requirement_line(line, options)
|
| 323 |
+
return parsed_req
|
| 324 |
+
else:
|
| 325 |
+
handle_option_line(
|
| 326 |
+
line.opts,
|
| 327 |
+
line.filename,
|
| 328 |
+
line.lineno,
|
| 329 |
+
finder,
|
| 330 |
+
options,
|
| 331 |
+
session,
|
| 332 |
+
)
|
| 333 |
+
return None
|
| 334 |
+
|
| 335 |
+
|
| 336 |
+
class RequirementsFileParser:
|
| 337 |
+
def __init__(
|
| 338 |
+
self,
|
| 339 |
+
session: "PipSession",
|
| 340 |
+
line_parser: LineParser,
|
| 341 |
+
) -> None:
|
| 342 |
+
self._session = session
|
| 343 |
+
self._line_parser = line_parser
|
| 344 |
+
|
| 345 |
+
def parse(
|
| 346 |
+
self, filename: str, constraint: bool
|
| 347 |
+
) -> Generator[ParsedLine, None, None]:
|
| 348 |
+
"""Parse a given file, yielding parsed lines."""
|
| 349 |
+
yield from self._parse_and_recurse(
|
| 350 |
+
filename, constraint, [{os.path.abspath(filename): None}]
|
| 351 |
+
)
|
| 352 |
+
|
| 353 |
+
def _parse_and_recurse(
|
| 354 |
+
self,
|
| 355 |
+
filename: str,
|
| 356 |
+
constraint: bool,
|
| 357 |
+
parsed_files_stack: List[Dict[str, Optional[str]]],
|
| 358 |
+
) -> Generator[ParsedLine, None, None]:
|
| 359 |
+
for line in self._parse_file(filename, constraint):
|
| 360 |
+
if line.requirement is None and (
|
| 361 |
+
line.opts.requirements or line.opts.constraints
|
| 362 |
+
):
|
| 363 |
+
# parse a nested requirements file
|
| 364 |
+
if line.opts.requirements:
|
| 365 |
+
req_path = line.opts.requirements[0]
|
| 366 |
+
nested_constraint = False
|
| 367 |
+
else:
|
| 368 |
+
req_path = line.opts.constraints[0]
|
| 369 |
+
nested_constraint = True
|
| 370 |
+
|
| 371 |
+
# original file is over http
|
| 372 |
+
if SCHEME_RE.search(filename):
|
| 373 |
+
# do a url join so relative paths work
|
| 374 |
+
req_path = urllib.parse.urljoin(filename, req_path)
|
| 375 |
+
# original file and nested file are paths
|
| 376 |
+
elif not SCHEME_RE.search(req_path):
|
| 377 |
+
# do a join so relative paths work
|
| 378 |
+
# and then abspath so that we can identify recursive references
|
| 379 |
+
req_path = os.path.abspath(
|
| 380 |
+
os.path.join(
|
| 381 |
+
os.path.dirname(filename),
|
| 382 |
+
req_path,
|
| 383 |
+
)
|
| 384 |
+
)
|
| 385 |
+
parsed_files = parsed_files_stack[0]
|
| 386 |
+
if req_path in parsed_files:
|
| 387 |
+
initial_file = parsed_files[req_path]
|
| 388 |
+
tail = (
|
| 389 |
+
f" and again in {initial_file}"
|
| 390 |
+
if initial_file is not None
|
| 391 |
+
else ""
|
| 392 |
+
)
|
| 393 |
+
raise RequirementsFileParseError(
|
| 394 |
+
f"{req_path} recursively references itself in {filename}{tail}"
|
| 395 |
+
)
|
| 396 |
+
# Keeping a track where was each file first included in
|
| 397 |
+
new_parsed_files = parsed_files.copy()
|
| 398 |
+
new_parsed_files[req_path] = filename
|
| 399 |
+
yield from self._parse_and_recurse(
|
| 400 |
+
req_path, nested_constraint, [new_parsed_files, *parsed_files_stack]
|
| 401 |
+
)
|
| 402 |
+
else:
|
| 403 |
+
yield line
|
| 404 |
+
|
| 405 |
+
def _parse_file(
|
| 406 |
+
self, filename: str, constraint: bool
|
| 407 |
+
) -> Generator[ParsedLine, None, None]:
|
| 408 |
+
_, content = get_file_content(filename, self._session)
|
| 409 |
+
|
| 410 |
+
lines_enum = preprocess(content)
|
| 411 |
+
|
| 412 |
+
for line_number, line in lines_enum:
|
| 413 |
+
try:
|
| 414 |
+
args_str, opts = self._line_parser(line)
|
| 415 |
+
except OptionParsingError as e:
|
| 416 |
+
# add offending line
|
| 417 |
+
msg = f"Invalid requirement: {line}\n{e.msg}"
|
| 418 |
+
raise RequirementsFileParseError(msg)
|
| 419 |
+
|
| 420 |
+
yield ParsedLine(
|
| 421 |
+
filename,
|
| 422 |
+
line_number,
|
| 423 |
+
args_str,
|
| 424 |
+
opts,
|
| 425 |
+
constraint,
|
| 426 |
+
)
|
| 427 |
+
|
| 428 |
+
|
| 429 |
+
def get_line_parser(finder: Optional["PackageFinder"]) -> LineParser:
|
| 430 |
+
def parse_line(line: str) -> Tuple[str, Values]:
|
| 431 |
+
# Build new parser for each line since it accumulates appendable
|
| 432 |
+
# options.
|
| 433 |
+
parser = build_parser()
|
| 434 |
+
defaults = parser.get_default_values()
|
| 435 |
+
defaults.index_url = None
|
| 436 |
+
if finder:
|
| 437 |
+
defaults.format_control = finder.format_control
|
| 438 |
+
|
| 439 |
+
args_str, options_str = break_args_options(line)
|
| 440 |
+
|
| 441 |
+
try:
|
| 442 |
+
options = shlex.split(options_str)
|
| 443 |
+
except ValueError as e:
|
| 444 |
+
raise OptionParsingError(f"Could not split options: {options_str}") from e
|
| 445 |
+
|
| 446 |
+
opts, _ = parser.parse_args(options, defaults)
|
| 447 |
+
|
| 448 |
+
return args_str, opts
|
| 449 |
+
|
| 450 |
+
return parse_line
|
| 451 |
+
|
| 452 |
+
|
| 453 |
+
def break_args_options(line: str) -> Tuple[str, str]:
|
| 454 |
+
"""Break up the line into an args and options string. We only want to shlex
|
| 455 |
+
(and then optparse) the options, not the args. args can contain markers
|
| 456 |
+
which are corrupted by shlex.
|
| 457 |
+
"""
|
| 458 |
+
tokens = line.split(" ")
|
| 459 |
+
args = []
|
| 460 |
+
options = tokens[:]
|
| 461 |
+
for token in tokens:
|
| 462 |
+
if token.startswith("-") or token.startswith("--"):
|
| 463 |
+
break
|
| 464 |
+
else:
|
| 465 |
+
args.append(token)
|
| 466 |
+
options.pop(0)
|
| 467 |
+
return " ".join(args), " ".join(options)
|
| 468 |
+
|
| 469 |
+
|
| 470 |
+
class OptionParsingError(Exception):
|
| 471 |
+
def __init__(self, msg: str) -> None:
|
| 472 |
+
self.msg = msg
|
| 473 |
+
|
| 474 |
+
|
| 475 |
+
def build_parser() -> optparse.OptionParser:
|
| 476 |
+
"""
|
| 477 |
+
Return a parser for parsing requirement lines
|
| 478 |
+
"""
|
| 479 |
+
parser = optparse.OptionParser(add_help_option=False)
|
| 480 |
+
|
| 481 |
+
option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
|
| 482 |
+
for option_factory in option_factories:
|
| 483 |
+
option = option_factory()
|
| 484 |
+
parser.add_option(option)
|
| 485 |
+
|
| 486 |
+
# By default optparse sys.exits on parsing errors. We want to wrap
|
| 487 |
+
# that in our own exception.
|
| 488 |
+
def parser_exit(self: Any, msg: str) -> "NoReturn":
|
| 489 |
+
raise OptionParsingError(msg)
|
| 490 |
+
|
| 491 |
+
# NOTE: mypy disallows assigning to a method
|
| 492 |
+
# https://github.com/python/mypy/issues/2427
|
| 493 |
+
parser.exit = parser_exit # type: ignore
|
| 494 |
+
|
| 495 |
+
return parser
|
| 496 |
+
|
| 497 |
+
|
| 498 |
+
def join_lines(lines_enum: ReqFileLines) -> ReqFileLines:
|
| 499 |
+
"""Joins a line ending in '\' with the previous line (except when following
|
| 500 |
+
comments). The joined line takes on the index of the first line.
|
| 501 |
+
"""
|
| 502 |
+
primary_line_number = None
|
| 503 |
+
new_line: List[str] = []
|
| 504 |
+
for line_number, line in lines_enum:
|
| 505 |
+
if not line.endswith("\\") or COMMENT_RE.match(line):
|
| 506 |
+
if COMMENT_RE.match(line):
|
| 507 |
+
# this ensures comments are always matched later
|
| 508 |
+
line = " " + line
|
| 509 |
+
if new_line:
|
| 510 |
+
new_line.append(line)
|
| 511 |
+
assert primary_line_number is not None
|
| 512 |
+
yield primary_line_number, "".join(new_line)
|
| 513 |
+
new_line = []
|
| 514 |
+
else:
|
| 515 |
+
yield line_number, line
|
| 516 |
+
else:
|
| 517 |
+
if not new_line:
|
| 518 |
+
primary_line_number = line_number
|
| 519 |
+
new_line.append(line.strip("\\"))
|
| 520 |
+
|
| 521 |
+
# last line contains \
|
| 522 |
+
if new_line:
|
| 523 |
+
assert primary_line_number is not None
|
| 524 |
+
yield primary_line_number, "".join(new_line)
|
| 525 |
+
|
| 526 |
+
# TODO: handle space after '\'.
|
| 527 |
+
|
| 528 |
+
|
| 529 |
+
def ignore_comments(lines_enum: ReqFileLines) -> ReqFileLines:
|
| 530 |
+
"""
|
| 531 |
+
Strips comments and filter empty lines.
|
| 532 |
+
"""
|
| 533 |
+
for line_number, line in lines_enum:
|
| 534 |
+
line = COMMENT_RE.sub("", line)
|
| 535 |
+
line = line.strip()
|
| 536 |
+
if line:
|
| 537 |
+
yield line_number, line
|
| 538 |
+
|
| 539 |
+
|
| 540 |
+
def expand_env_variables(lines_enum: ReqFileLines) -> ReqFileLines:
|
| 541 |
+
"""Replace all environment variables that can be retrieved via `os.getenv`.
|
| 542 |
+
|
| 543 |
+
The only allowed format for environment variables defined in the
|
| 544 |
+
requirement file is `${MY_VARIABLE_1}` to ensure two things:
|
| 545 |
+
|
| 546 |
+
1. Strings that contain a `$` aren't accidentally (partially) expanded.
|
| 547 |
+
2. Ensure consistency across platforms for requirement files.
|
| 548 |
+
|
| 549 |
+
These points are the result of a discussion on the `github pull
|
| 550 |
+
request #3514 <https://github.com/pypa/pip/pull/3514>`_.
|
| 551 |
+
|
| 552 |
+
Valid characters in variable names follow the `POSIX standard
|
| 553 |
+
<http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited
|
| 554 |
+
to uppercase letter, digits and the `_` (underscore).
|
| 555 |
+
"""
|
| 556 |
+
for line_number, line in lines_enum:
|
| 557 |
+
for env_var, var_name in ENV_VAR_RE.findall(line):
|
| 558 |
+
value = os.getenv(var_name)
|
| 559 |
+
if not value:
|
| 560 |
+
continue
|
| 561 |
+
|
| 562 |
+
line = line.replace(env_var, value)
|
| 563 |
+
|
| 564 |
+
yield line_number, line
|
| 565 |
+
|
| 566 |
+
|
| 567 |
+
def get_file_content(url: str, session: "PipSession") -> Tuple[str, str]:
|
| 568 |
+
"""Gets the content of a file; it may be a filename, file: URL, or
|
| 569 |
+
http: URL. Returns (location, content). Content is unicode.
|
| 570 |
+
Respects # -*- coding: declarations on the retrieved files.
|
| 571 |
+
|
| 572 |
+
:param url: File path or url.
|
| 573 |
+
:param session: PipSession instance.
|
| 574 |
+
"""
|
| 575 |
+
scheme = urllib.parse.urlsplit(url).scheme
|
| 576 |
+
# Pip has special support for file:// URLs (LocalFSAdapter).
|
| 577 |
+
if scheme in ["http", "https", "file"]:
|
| 578 |
+
# Delay importing heavy network modules until absolutely necessary.
|
| 579 |
+
from pip._internal.network.utils import raise_for_status
|
| 580 |
+
|
| 581 |
+
resp = session.get(url)
|
| 582 |
+
raise_for_status(resp)
|
| 583 |
+
return resp.url, resp.text
|
| 584 |
+
|
| 585 |
+
# Assume this is a bare path.
|
| 586 |
+
try:
|
| 587 |
+
with open(url, "rb") as f:
|
| 588 |
+
raw_content = f.read()
|
| 589 |
+
except OSError as exc:
|
| 590 |
+
raise InstallationError(f"Could not open requirements file: {exc}")
|
| 591 |
+
|
| 592 |
+
content = _decode_req_file(raw_content, url)
|
| 593 |
+
|
| 594 |
+
return url, content
|
| 595 |
+
|
| 596 |
+
|
| 597 |
+
def _decode_req_file(data: bytes, url: str) -> str:
|
| 598 |
+
for bom, encoding in BOMS:
|
| 599 |
+
if data.startswith(bom):
|
| 600 |
+
return data[len(bom) :].decode(encoding)
|
| 601 |
+
|
| 602 |
+
for line in data.split(b"\n")[:2]:
|
| 603 |
+
if line[0:1] == b"#":
|
| 604 |
+
result = PEP263_ENCODING_RE.search(line)
|
| 605 |
+
if result is not None:
|
| 606 |
+
encoding = result.groups()[0].decode("ascii")
|
| 607 |
+
return data.decode(encoding)
|
| 608 |
+
|
| 609 |
+
try:
|
| 610 |
+
return data.decode(DEFAULT_ENCODING)
|
| 611 |
+
except UnicodeDecodeError:
|
| 612 |
+
locale_encoding = locale.getpreferredencoding(False) or sys.getdefaultencoding()
|
| 613 |
+
logging.warning(
|
| 614 |
+
"unable to decode data from %s with default encoding %s, "
|
| 615 |
+
"falling back to encoding from locale: %s. "
|
| 616 |
+
"If this is intentional you should specify the encoding with a "
|
| 617 |
+
"PEP-263 style comment, e.g. '# -*- coding: %s -*-'",
|
| 618 |
+
url,
|
| 619 |
+
DEFAULT_ENCODING,
|
| 620 |
+
locale_encoding,
|
| 621 |
+
locale_encoding,
|
| 622 |
+
)
|
| 623 |
+
return data.decode(locale_encoding)
|
vllm/lib/python3.10/site-packages/pip/_internal/req/req_install.py
ADDED
|
@@ -0,0 +1,934 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import logging
|
| 3 |
+
import os
|
| 4 |
+
import shutil
|
| 5 |
+
import sys
|
| 6 |
+
import uuid
|
| 7 |
+
import zipfile
|
| 8 |
+
from optparse import Values
|
| 9 |
+
from pathlib import Path
|
| 10 |
+
from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union
|
| 11 |
+
|
| 12 |
+
from pip._vendor.packaging.markers import Marker
|
| 13 |
+
from pip._vendor.packaging.requirements import Requirement
|
| 14 |
+
from pip._vendor.packaging.specifiers import SpecifierSet
|
| 15 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 16 |
+
from pip._vendor.packaging.version import Version
|
| 17 |
+
from pip._vendor.packaging.version import parse as parse_version
|
| 18 |
+
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
|
| 19 |
+
|
| 20 |
+
from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
|
| 21 |
+
from pip._internal.exceptions import InstallationError, PreviousBuildDirError
|
| 22 |
+
from pip._internal.locations import get_scheme
|
| 23 |
+
from pip._internal.metadata import (
|
| 24 |
+
BaseDistribution,
|
| 25 |
+
get_default_environment,
|
| 26 |
+
get_directory_distribution,
|
| 27 |
+
get_wheel_distribution,
|
| 28 |
+
)
|
| 29 |
+
from pip._internal.metadata.base import FilesystemWheel
|
| 30 |
+
from pip._internal.models.direct_url import DirectUrl
|
| 31 |
+
from pip._internal.models.link import Link
|
| 32 |
+
from pip._internal.operations.build.metadata import generate_metadata
|
| 33 |
+
from pip._internal.operations.build.metadata_editable import generate_editable_metadata
|
| 34 |
+
from pip._internal.operations.build.metadata_legacy import (
|
| 35 |
+
generate_metadata as generate_metadata_legacy,
|
| 36 |
+
)
|
| 37 |
+
from pip._internal.operations.install.editable_legacy import (
|
| 38 |
+
install_editable as install_editable_legacy,
|
| 39 |
+
)
|
| 40 |
+
from pip._internal.operations.install.wheel import install_wheel
|
| 41 |
+
from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
|
| 42 |
+
from pip._internal.req.req_uninstall import UninstallPathSet
|
| 43 |
+
from pip._internal.utils.deprecation import deprecated
|
| 44 |
+
from pip._internal.utils.hashes import Hashes
|
| 45 |
+
from pip._internal.utils.misc import (
|
| 46 |
+
ConfiguredBuildBackendHookCaller,
|
| 47 |
+
ask_path_exists,
|
| 48 |
+
backup_dir,
|
| 49 |
+
display_path,
|
| 50 |
+
hide_url,
|
| 51 |
+
is_installable_dir,
|
| 52 |
+
redact_auth_from_requirement,
|
| 53 |
+
redact_auth_from_url,
|
| 54 |
+
)
|
| 55 |
+
from pip._internal.utils.packaging import get_requirement
|
| 56 |
+
from pip._internal.utils.subprocess import runner_with_spinner_message
|
| 57 |
+
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
| 58 |
+
from pip._internal.utils.unpacking import unpack_file
|
| 59 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
| 60 |
+
from pip._internal.vcs import vcs
|
| 61 |
+
|
| 62 |
+
logger = logging.getLogger(__name__)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
class InstallRequirement:
|
| 66 |
+
"""
|
| 67 |
+
Represents something that may be installed later on, may have information
|
| 68 |
+
about where to fetch the relevant requirement and also contains logic for
|
| 69 |
+
installing the said requirement.
|
| 70 |
+
"""
|
| 71 |
+
|
| 72 |
+
def __init__(
|
| 73 |
+
self,
|
| 74 |
+
req: Optional[Requirement],
|
| 75 |
+
comes_from: Optional[Union[str, "InstallRequirement"]],
|
| 76 |
+
editable: bool = False,
|
| 77 |
+
link: Optional[Link] = None,
|
| 78 |
+
markers: Optional[Marker] = None,
|
| 79 |
+
use_pep517: Optional[bool] = None,
|
| 80 |
+
isolated: bool = False,
|
| 81 |
+
*,
|
| 82 |
+
global_options: Optional[List[str]] = None,
|
| 83 |
+
hash_options: Optional[Dict[str, List[str]]] = None,
|
| 84 |
+
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
|
| 85 |
+
constraint: bool = False,
|
| 86 |
+
extras: Collection[str] = (),
|
| 87 |
+
user_supplied: bool = False,
|
| 88 |
+
permit_editable_wheels: bool = False,
|
| 89 |
+
) -> None:
|
| 90 |
+
assert req is None or isinstance(req, Requirement), req
|
| 91 |
+
self.req = req
|
| 92 |
+
self.comes_from = comes_from
|
| 93 |
+
self.constraint = constraint
|
| 94 |
+
self.editable = editable
|
| 95 |
+
self.permit_editable_wheels = permit_editable_wheels
|
| 96 |
+
|
| 97 |
+
# source_dir is the local directory where the linked requirement is
|
| 98 |
+
# located, or unpacked. In case unpacking is needed, creating and
|
| 99 |
+
# populating source_dir is done by the RequirementPreparer. Note this
|
| 100 |
+
# is not necessarily the directory where pyproject.toml or setup.py is
|
| 101 |
+
# located - that one is obtained via unpacked_source_directory.
|
| 102 |
+
self.source_dir: Optional[str] = None
|
| 103 |
+
if self.editable:
|
| 104 |
+
assert link
|
| 105 |
+
if link.is_file:
|
| 106 |
+
self.source_dir = os.path.normpath(os.path.abspath(link.file_path))
|
| 107 |
+
|
| 108 |
+
# original_link is the direct URL that was provided by the user for the
|
| 109 |
+
# requirement, either directly or via a constraints file.
|
| 110 |
+
if link is None and req and req.url:
|
| 111 |
+
# PEP 508 URL requirement
|
| 112 |
+
link = Link(req.url)
|
| 113 |
+
self.link = self.original_link = link
|
| 114 |
+
|
| 115 |
+
# When this InstallRequirement is a wheel obtained from the cache of locally
|
| 116 |
+
# built wheels, this is the source link corresponding to the cache entry, which
|
| 117 |
+
# was used to download and build the cached wheel.
|
| 118 |
+
self.cached_wheel_source_link: Optional[Link] = None
|
| 119 |
+
|
| 120 |
+
# Information about the location of the artifact that was downloaded . This
|
| 121 |
+
# property is guaranteed to be set in resolver results.
|
| 122 |
+
self.download_info: Optional[DirectUrl] = None
|
| 123 |
+
|
| 124 |
+
# Path to any downloaded or already-existing package.
|
| 125 |
+
self.local_file_path: Optional[str] = None
|
| 126 |
+
if self.link and self.link.is_file:
|
| 127 |
+
self.local_file_path = self.link.file_path
|
| 128 |
+
|
| 129 |
+
if extras:
|
| 130 |
+
self.extras = extras
|
| 131 |
+
elif req:
|
| 132 |
+
self.extras = req.extras
|
| 133 |
+
else:
|
| 134 |
+
self.extras = set()
|
| 135 |
+
if markers is None and req:
|
| 136 |
+
markers = req.marker
|
| 137 |
+
self.markers = markers
|
| 138 |
+
|
| 139 |
+
# This holds the Distribution object if this requirement is already installed.
|
| 140 |
+
self.satisfied_by: Optional[BaseDistribution] = None
|
| 141 |
+
# Whether the installation process should try to uninstall an existing
|
| 142 |
+
# distribution before installing this requirement.
|
| 143 |
+
self.should_reinstall = False
|
| 144 |
+
# Temporary build location
|
| 145 |
+
self._temp_build_dir: Optional[TempDirectory] = None
|
| 146 |
+
# Set to True after successful installation
|
| 147 |
+
self.install_succeeded: Optional[bool] = None
|
| 148 |
+
# Supplied options
|
| 149 |
+
self.global_options = global_options if global_options else []
|
| 150 |
+
self.hash_options = hash_options if hash_options else {}
|
| 151 |
+
self.config_settings = config_settings
|
| 152 |
+
# Set to True after successful preparation of this requirement
|
| 153 |
+
self.prepared = False
|
| 154 |
+
# User supplied requirement are explicitly requested for installation
|
| 155 |
+
# by the user via CLI arguments or requirements files, as opposed to,
|
| 156 |
+
# e.g. dependencies, extras or constraints.
|
| 157 |
+
self.user_supplied = user_supplied
|
| 158 |
+
|
| 159 |
+
self.isolated = isolated
|
| 160 |
+
self.build_env: BuildEnvironment = NoOpBuildEnvironment()
|
| 161 |
+
|
| 162 |
+
# For PEP 517, the directory where we request the project metadata
|
| 163 |
+
# gets stored. We need this to pass to build_wheel, so the backend
|
| 164 |
+
# can ensure that the wheel matches the metadata (see the PEP for
|
| 165 |
+
# details).
|
| 166 |
+
self.metadata_directory: Optional[str] = None
|
| 167 |
+
|
| 168 |
+
# The static build requirements (from pyproject.toml)
|
| 169 |
+
self.pyproject_requires: Optional[List[str]] = None
|
| 170 |
+
|
| 171 |
+
# Build requirements that we will check are available
|
| 172 |
+
self.requirements_to_check: List[str] = []
|
| 173 |
+
|
| 174 |
+
# The PEP 517 backend we should use to build the project
|
| 175 |
+
self.pep517_backend: Optional[BuildBackendHookCaller] = None
|
| 176 |
+
|
| 177 |
+
# Are we using PEP 517 for this requirement?
|
| 178 |
+
# After pyproject.toml has been loaded, the only valid values are True
|
| 179 |
+
# and False. Before loading, None is valid (meaning "use the default").
|
| 180 |
+
# Setting an explicit value before loading pyproject.toml is supported,
|
| 181 |
+
# but after loading this flag should be treated as read only.
|
| 182 |
+
self.use_pep517 = use_pep517
|
| 183 |
+
|
| 184 |
+
# If config settings are provided, enforce PEP 517.
|
| 185 |
+
if self.config_settings:
|
| 186 |
+
if self.use_pep517 is False:
|
| 187 |
+
logger.warning(
|
| 188 |
+
"--no-use-pep517 ignored for %s "
|
| 189 |
+
"because --config-settings are specified.",
|
| 190 |
+
self,
|
| 191 |
+
)
|
| 192 |
+
self.use_pep517 = True
|
| 193 |
+
|
| 194 |
+
# This requirement needs more preparation before it can be built
|
| 195 |
+
self.needs_more_preparation = False
|
| 196 |
+
|
| 197 |
+
# This requirement needs to be unpacked before it can be installed.
|
| 198 |
+
self._archive_source: Optional[Path] = None
|
| 199 |
+
|
| 200 |
+
def __str__(self) -> str:
|
| 201 |
+
if self.req:
|
| 202 |
+
s = redact_auth_from_requirement(self.req)
|
| 203 |
+
if self.link:
|
| 204 |
+
s += f" from {redact_auth_from_url(self.link.url)}"
|
| 205 |
+
elif self.link:
|
| 206 |
+
s = redact_auth_from_url(self.link.url)
|
| 207 |
+
else:
|
| 208 |
+
s = "<InstallRequirement>"
|
| 209 |
+
if self.satisfied_by is not None:
|
| 210 |
+
if self.satisfied_by.location is not None:
|
| 211 |
+
location = display_path(self.satisfied_by.location)
|
| 212 |
+
else:
|
| 213 |
+
location = "<memory>"
|
| 214 |
+
s += f" in {location}"
|
| 215 |
+
if self.comes_from:
|
| 216 |
+
if isinstance(self.comes_from, str):
|
| 217 |
+
comes_from: Optional[str] = self.comes_from
|
| 218 |
+
else:
|
| 219 |
+
comes_from = self.comes_from.from_path()
|
| 220 |
+
if comes_from:
|
| 221 |
+
s += f" (from {comes_from})"
|
| 222 |
+
return s
|
| 223 |
+
|
| 224 |
+
def __repr__(self) -> str:
|
| 225 |
+
return (
|
| 226 |
+
f"<{self.__class__.__name__} object: "
|
| 227 |
+
f"{str(self)} editable={self.editable!r}>"
|
| 228 |
+
)
|
| 229 |
+
|
| 230 |
+
def format_debug(self) -> str:
|
| 231 |
+
"""An un-tested helper for getting state, for debugging."""
|
| 232 |
+
attributes = vars(self)
|
| 233 |
+
names = sorted(attributes)
|
| 234 |
+
|
| 235 |
+
state = (f"{attr}={attributes[attr]!r}" for attr in sorted(names))
|
| 236 |
+
return "<{name} object: {{{state}}}>".format(
|
| 237 |
+
name=self.__class__.__name__,
|
| 238 |
+
state=", ".join(state),
|
| 239 |
+
)
|
| 240 |
+
|
| 241 |
+
# Things that are valid for all kinds of requirements?
|
| 242 |
+
@property
|
| 243 |
+
def name(self) -> Optional[str]:
|
| 244 |
+
if self.req is None:
|
| 245 |
+
return None
|
| 246 |
+
return self.req.name
|
| 247 |
+
|
| 248 |
+
@functools.cached_property
|
| 249 |
+
def supports_pyproject_editable(self) -> bool:
|
| 250 |
+
if not self.use_pep517:
|
| 251 |
+
return False
|
| 252 |
+
assert self.pep517_backend
|
| 253 |
+
with self.build_env:
|
| 254 |
+
runner = runner_with_spinner_message(
|
| 255 |
+
"Checking if build backend supports build_editable"
|
| 256 |
+
)
|
| 257 |
+
with self.pep517_backend.subprocess_runner(runner):
|
| 258 |
+
return "build_editable" in self.pep517_backend._supported_features()
|
| 259 |
+
|
| 260 |
+
@property
|
| 261 |
+
def specifier(self) -> SpecifierSet:
|
| 262 |
+
assert self.req is not None
|
| 263 |
+
return self.req.specifier
|
| 264 |
+
|
| 265 |
+
@property
|
| 266 |
+
def is_direct(self) -> bool:
|
| 267 |
+
"""Whether this requirement was specified as a direct URL."""
|
| 268 |
+
return self.original_link is not None
|
| 269 |
+
|
| 270 |
+
@property
|
| 271 |
+
def is_pinned(self) -> bool:
|
| 272 |
+
"""Return whether I am pinned to an exact version.
|
| 273 |
+
|
| 274 |
+
For example, some-package==1.2 is pinned; some-package>1.2 is not.
|
| 275 |
+
"""
|
| 276 |
+
assert self.req is not None
|
| 277 |
+
specifiers = self.req.specifier
|
| 278 |
+
return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="}
|
| 279 |
+
|
| 280 |
+
def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
|
| 281 |
+
if not extras_requested:
|
| 282 |
+
# Provide an extra to safely evaluate the markers
|
| 283 |
+
# without matching any extra
|
| 284 |
+
extras_requested = ("",)
|
| 285 |
+
if self.markers is not None:
|
| 286 |
+
return any(
|
| 287 |
+
self.markers.evaluate({"extra": extra}) for extra in extras_requested
|
| 288 |
+
)
|
| 289 |
+
else:
|
| 290 |
+
return True
|
| 291 |
+
|
| 292 |
+
@property
|
| 293 |
+
def has_hash_options(self) -> bool:
|
| 294 |
+
"""Return whether any known-good hashes are specified as options.
|
| 295 |
+
|
| 296 |
+
These activate --require-hashes mode; hashes specified as part of a
|
| 297 |
+
URL do not.
|
| 298 |
+
|
| 299 |
+
"""
|
| 300 |
+
return bool(self.hash_options)
|
| 301 |
+
|
| 302 |
+
def hashes(self, trust_internet: bool = True) -> Hashes:
|
| 303 |
+
"""Return a hash-comparer that considers my option- and URL-based
|
| 304 |
+
hashes to be known-good.
|
| 305 |
+
|
| 306 |
+
Hashes in URLs--ones embedded in the requirements file, not ones
|
| 307 |
+
downloaded from an index server--are almost peers with ones from
|
| 308 |
+
flags. They satisfy --require-hashes (whether it was implicitly or
|
| 309 |
+
explicitly activated) but do not activate it. md5 and sha224 are not
|
| 310 |
+
allowed in flags, which should nudge people toward good algos. We
|
| 311 |
+
always OR all hashes together, even ones from URLs.
|
| 312 |
+
|
| 313 |
+
:param trust_internet: Whether to trust URL-based (#md5=...) hashes
|
| 314 |
+
downloaded from the internet, as by populate_link()
|
| 315 |
+
|
| 316 |
+
"""
|
| 317 |
+
good_hashes = self.hash_options.copy()
|
| 318 |
+
if trust_internet:
|
| 319 |
+
link = self.link
|
| 320 |
+
elif self.is_direct and self.user_supplied:
|
| 321 |
+
link = self.original_link
|
| 322 |
+
else:
|
| 323 |
+
link = None
|
| 324 |
+
if link and link.hash:
|
| 325 |
+
assert link.hash_name is not None
|
| 326 |
+
good_hashes.setdefault(link.hash_name, []).append(link.hash)
|
| 327 |
+
return Hashes(good_hashes)
|
| 328 |
+
|
| 329 |
+
def from_path(self) -> Optional[str]:
|
| 330 |
+
"""Format a nice indicator to show where this "comes from" """
|
| 331 |
+
if self.req is None:
|
| 332 |
+
return None
|
| 333 |
+
s = str(self.req)
|
| 334 |
+
if self.comes_from:
|
| 335 |
+
comes_from: Optional[str]
|
| 336 |
+
if isinstance(self.comes_from, str):
|
| 337 |
+
comes_from = self.comes_from
|
| 338 |
+
else:
|
| 339 |
+
comes_from = self.comes_from.from_path()
|
| 340 |
+
if comes_from:
|
| 341 |
+
s += "->" + comes_from
|
| 342 |
+
return s
|
| 343 |
+
|
| 344 |
+
def ensure_build_location(
|
| 345 |
+
self, build_dir: str, autodelete: bool, parallel_builds: bool
|
| 346 |
+
) -> str:
|
| 347 |
+
assert build_dir is not None
|
| 348 |
+
if self._temp_build_dir is not None:
|
| 349 |
+
assert self._temp_build_dir.path
|
| 350 |
+
return self._temp_build_dir.path
|
| 351 |
+
if self.req is None:
|
| 352 |
+
# Some systems have /tmp as a symlink which confuses custom
|
| 353 |
+
# builds (such as numpy). Thus, we ensure that the real path
|
| 354 |
+
# is returned.
|
| 355 |
+
self._temp_build_dir = TempDirectory(
|
| 356 |
+
kind=tempdir_kinds.REQ_BUILD, globally_managed=True
|
| 357 |
+
)
|
| 358 |
+
|
| 359 |
+
return self._temp_build_dir.path
|
| 360 |
+
|
| 361 |
+
# This is the only remaining place where we manually determine the path
|
| 362 |
+
# for the temporary directory. It is only needed for editables where
|
| 363 |
+
# it is the value of the --src option.
|
| 364 |
+
|
| 365 |
+
# When parallel builds are enabled, add a UUID to the build directory
|
| 366 |
+
# name so multiple builds do not interfere with each other.
|
| 367 |
+
dir_name: str = canonicalize_name(self.req.name)
|
| 368 |
+
if parallel_builds:
|
| 369 |
+
dir_name = f"{dir_name}_{uuid.uuid4().hex}"
|
| 370 |
+
|
| 371 |
+
# FIXME: Is there a better place to create the build_dir? (hg and bzr
|
| 372 |
+
# need this)
|
| 373 |
+
if not os.path.exists(build_dir):
|
| 374 |
+
logger.debug("Creating directory %s", build_dir)
|
| 375 |
+
os.makedirs(build_dir)
|
| 376 |
+
actual_build_dir = os.path.join(build_dir, dir_name)
|
| 377 |
+
# `None` indicates that we respect the globally-configured deletion
|
| 378 |
+
# settings, which is what we actually want when auto-deleting.
|
| 379 |
+
delete_arg = None if autodelete else False
|
| 380 |
+
return TempDirectory(
|
| 381 |
+
path=actual_build_dir,
|
| 382 |
+
delete=delete_arg,
|
| 383 |
+
kind=tempdir_kinds.REQ_BUILD,
|
| 384 |
+
globally_managed=True,
|
| 385 |
+
).path
|
| 386 |
+
|
| 387 |
+
def _set_requirement(self) -> None:
|
| 388 |
+
"""Set requirement after generating metadata."""
|
| 389 |
+
assert self.req is None
|
| 390 |
+
assert self.metadata is not None
|
| 391 |
+
assert self.source_dir is not None
|
| 392 |
+
|
| 393 |
+
# Construct a Requirement object from the generated metadata
|
| 394 |
+
if isinstance(parse_version(self.metadata["Version"]), Version):
|
| 395 |
+
op = "=="
|
| 396 |
+
else:
|
| 397 |
+
op = "==="
|
| 398 |
+
|
| 399 |
+
self.req = get_requirement(
|
| 400 |
+
"".join(
|
| 401 |
+
[
|
| 402 |
+
self.metadata["Name"],
|
| 403 |
+
op,
|
| 404 |
+
self.metadata["Version"],
|
| 405 |
+
]
|
| 406 |
+
)
|
| 407 |
+
)
|
| 408 |
+
|
| 409 |
+
def warn_on_mismatching_name(self) -> None:
|
| 410 |
+
assert self.req is not None
|
| 411 |
+
metadata_name = canonicalize_name(self.metadata["Name"])
|
| 412 |
+
if canonicalize_name(self.req.name) == metadata_name:
|
| 413 |
+
# Everything is fine.
|
| 414 |
+
return
|
| 415 |
+
|
| 416 |
+
# If we're here, there's a mismatch. Log a warning about it.
|
| 417 |
+
logger.warning(
|
| 418 |
+
"Generating metadata for package %s "
|
| 419 |
+
"produced metadata for project name %s. Fix your "
|
| 420 |
+
"#egg=%s fragments.",
|
| 421 |
+
self.name,
|
| 422 |
+
metadata_name,
|
| 423 |
+
self.name,
|
| 424 |
+
)
|
| 425 |
+
self.req = get_requirement(metadata_name)
|
| 426 |
+
|
| 427 |
+
def check_if_exists(self, use_user_site: bool) -> None:
|
| 428 |
+
"""Find an installed distribution that satisfies or conflicts
|
| 429 |
+
with this requirement, and set self.satisfied_by or
|
| 430 |
+
self.should_reinstall appropriately.
|
| 431 |
+
"""
|
| 432 |
+
if self.req is None:
|
| 433 |
+
return
|
| 434 |
+
existing_dist = get_default_environment().get_distribution(self.req.name)
|
| 435 |
+
if not existing_dist:
|
| 436 |
+
return
|
| 437 |
+
|
| 438 |
+
version_compatible = self.req.specifier.contains(
|
| 439 |
+
existing_dist.version,
|
| 440 |
+
prereleases=True,
|
| 441 |
+
)
|
| 442 |
+
if not version_compatible:
|
| 443 |
+
self.satisfied_by = None
|
| 444 |
+
if use_user_site:
|
| 445 |
+
if existing_dist.in_usersite:
|
| 446 |
+
self.should_reinstall = True
|
| 447 |
+
elif running_under_virtualenv() and existing_dist.in_site_packages:
|
| 448 |
+
raise InstallationError(
|
| 449 |
+
f"Will not install to the user site because it will "
|
| 450 |
+
f"lack sys.path precedence to {existing_dist.raw_name} "
|
| 451 |
+
f"in {existing_dist.location}"
|
| 452 |
+
)
|
| 453 |
+
else:
|
| 454 |
+
self.should_reinstall = True
|
| 455 |
+
else:
|
| 456 |
+
if self.editable:
|
| 457 |
+
self.should_reinstall = True
|
| 458 |
+
# when installing editables, nothing pre-existing should ever
|
| 459 |
+
# satisfy
|
| 460 |
+
self.satisfied_by = None
|
| 461 |
+
else:
|
| 462 |
+
self.satisfied_by = existing_dist
|
| 463 |
+
|
| 464 |
+
# Things valid for wheels
|
| 465 |
+
@property
|
| 466 |
+
def is_wheel(self) -> bool:
|
| 467 |
+
if not self.link:
|
| 468 |
+
return False
|
| 469 |
+
return self.link.is_wheel
|
| 470 |
+
|
| 471 |
+
@property
|
| 472 |
+
def is_wheel_from_cache(self) -> bool:
|
| 473 |
+
# When True, it means that this InstallRequirement is a local wheel file in the
|
| 474 |
+
# cache of locally built wheels.
|
| 475 |
+
return self.cached_wheel_source_link is not None
|
| 476 |
+
|
| 477 |
+
# Things valid for sdists
|
| 478 |
+
@property
|
| 479 |
+
def unpacked_source_directory(self) -> str:
|
| 480 |
+
assert self.source_dir, f"No source dir for {self}"
|
| 481 |
+
return os.path.join(
|
| 482 |
+
self.source_dir, self.link and self.link.subdirectory_fragment or ""
|
| 483 |
+
)
|
| 484 |
+
|
| 485 |
+
@property
|
| 486 |
+
def setup_py_path(self) -> str:
|
| 487 |
+
assert self.source_dir, f"No source dir for {self}"
|
| 488 |
+
setup_py = os.path.join(self.unpacked_source_directory, "setup.py")
|
| 489 |
+
|
| 490 |
+
return setup_py
|
| 491 |
+
|
| 492 |
+
@property
|
| 493 |
+
def setup_cfg_path(self) -> str:
|
| 494 |
+
assert self.source_dir, f"No source dir for {self}"
|
| 495 |
+
setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg")
|
| 496 |
+
|
| 497 |
+
return setup_cfg
|
| 498 |
+
|
| 499 |
+
@property
|
| 500 |
+
def pyproject_toml_path(self) -> str:
|
| 501 |
+
assert self.source_dir, f"No source dir for {self}"
|
| 502 |
+
return make_pyproject_path(self.unpacked_source_directory)
|
| 503 |
+
|
| 504 |
+
def load_pyproject_toml(self) -> None:
|
| 505 |
+
"""Load the pyproject.toml file.
|
| 506 |
+
|
| 507 |
+
After calling this routine, all of the attributes related to PEP 517
|
| 508 |
+
processing for this requirement have been set. In particular, the
|
| 509 |
+
use_pep517 attribute can be used to determine whether we should
|
| 510 |
+
follow the PEP 517 or legacy (setup.py) code path.
|
| 511 |
+
"""
|
| 512 |
+
pyproject_toml_data = load_pyproject_toml(
|
| 513 |
+
self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self)
|
| 514 |
+
)
|
| 515 |
+
|
| 516 |
+
if pyproject_toml_data is None:
|
| 517 |
+
assert not self.config_settings
|
| 518 |
+
self.use_pep517 = False
|
| 519 |
+
return
|
| 520 |
+
|
| 521 |
+
self.use_pep517 = True
|
| 522 |
+
requires, backend, check, backend_path = pyproject_toml_data
|
| 523 |
+
self.requirements_to_check = check
|
| 524 |
+
self.pyproject_requires = requires
|
| 525 |
+
self.pep517_backend = ConfiguredBuildBackendHookCaller(
|
| 526 |
+
self,
|
| 527 |
+
self.unpacked_source_directory,
|
| 528 |
+
backend,
|
| 529 |
+
backend_path=backend_path,
|
| 530 |
+
)
|
| 531 |
+
|
| 532 |
+
def isolated_editable_sanity_check(self) -> None:
|
| 533 |
+
"""Check that an editable requirement if valid for use with PEP 517/518.
|
| 534 |
+
|
| 535 |
+
This verifies that an editable that has a pyproject.toml either supports PEP 660
|
| 536 |
+
or as a setup.py or a setup.cfg
|
| 537 |
+
"""
|
| 538 |
+
if (
|
| 539 |
+
self.editable
|
| 540 |
+
and self.use_pep517
|
| 541 |
+
and not self.supports_pyproject_editable
|
| 542 |
+
and not os.path.isfile(self.setup_py_path)
|
| 543 |
+
and not os.path.isfile(self.setup_cfg_path)
|
| 544 |
+
):
|
| 545 |
+
raise InstallationError(
|
| 546 |
+
f"Project {self} has a 'pyproject.toml' and its build "
|
| 547 |
+
f"backend is missing the 'build_editable' hook. Since it does not "
|
| 548 |
+
f"have a 'setup.py' nor a 'setup.cfg', "
|
| 549 |
+
f"it cannot be installed in editable mode. "
|
| 550 |
+
f"Consider using a build backend that supports PEP 660."
|
| 551 |
+
)
|
| 552 |
+
|
| 553 |
+
def prepare_metadata(self) -> None:
|
| 554 |
+
"""Ensure that project metadata is available.
|
| 555 |
+
|
| 556 |
+
Under PEP 517 and PEP 660, call the backend hook to prepare the metadata.
|
| 557 |
+
Under legacy processing, call setup.py egg-info.
|
| 558 |
+
"""
|
| 559 |
+
assert self.source_dir, f"No source dir for {self}"
|
| 560 |
+
details = self.name or f"from {self.link}"
|
| 561 |
+
|
| 562 |
+
if self.use_pep517:
|
| 563 |
+
assert self.pep517_backend is not None
|
| 564 |
+
if (
|
| 565 |
+
self.editable
|
| 566 |
+
and self.permit_editable_wheels
|
| 567 |
+
and self.supports_pyproject_editable
|
| 568 |
+
):
|
| 569 |
+
self.metadata_directory = generate_editable_metadata(
|
| 570 |
+
build_env=self.build_env,
|
| 571 |
+
backend=self.pep517_backend,
|
| 572 |
+
details=details,
|
| 573 |
+
)
|
| 574 |
+
else:
|
| 575 |
+
self.metadata_directory = generate_metadata(
|
| 576 |
+
build_env=self.build_env,
|
| 577 |
+
backend=self.pep517_backend,
|
| 578 |
+
details=details,
|
| 579 |
+
)
|
| 580 |
+
else:
|
| 581 |
+
self.metadata_directory = generate_metadata_legacy(
|
| 582 |
+
build_env=self.build_env,
|
| 583 |
+
setup_py_path=self.setup_py_path,
|
| 584 |
+
source_dir=self.unpacked_source_directory,
|
| 585 |
+
isolated=self.isolated,
|
| 586 |
+
details=details,
|
| 587 |
+
)
|
| 588 |
+
|
| 589 |
+
# Act on the newly generated metadata, based on the name and version.
|
| 590 |
+
if not self.name:
|
| 591 |
+
self._set_requirement()
|
| 592 |
+
else:
|
| 593 |
+
self.warn_on_mismatching_name()
|
| 594 |
+
|
| 595 |
+
self.assert_source_matches_version()
|
| 596 |
+
|
| 597 |
+
@property
|
| 598 |
+
def metadata(self) -> Any:
|
| 599 |
+
if not hasattr(self, "_metadata"):
|
| 600 |
+
self._metadata = self.get_dist().metadata
|
| 601 |
+
|
| 602 |
+
return self._metadata
|
| 603 |
+
|
| 604 |
+
def get_dist(self) -> BaseDistribution:
|
| 605 |
+
if self.metadata_directory:
|
| 606 |
+
return get_directory_distribution(self.metadata_directory)
|
| 607 |
+
elif self.local_file_path and self.is_wheel:
|
| 608 |
+
assert self.req is not None
|
| 609 |
+
return get_wheel_distribution(
|
| 610 |
+
FilesystemWheel(self.local_file_path),
|
| 611 |
+
canonicalize_name(self.req.name),
|
| 612 |
+
)
|
| 613 |
+
raise AssertionError(
|
| 614 |
+
f"InstallRequirement {self} has no metadata directory and no wheel: "
|
| 615 |
+
f"can't make a distribution."
|
| 616 |
+
)
|
| 617 |
+
|
| 618 |
+
def assert_source_matches_version(self) -> None:
|
| 619 |
+
assert self.source_dir, f"No source dir for {self}"
|
| 620 |
+
version = self.metadata["version"]
|
| 621 |
+
if self.req and self.req.specifier and version not in self.req.specifier:
|
| 622 |
+
logger.warning(
|
| 623 |
+
"Requested %s, but installing version %s",
|
| 624 |
+
self,
|
| 625 |
+
version,
|
| 626 |
+
)
|
| 627 |
+
else:
|
| 628 |
+
logger.debug(
|
| 629 |
+
"Source in %s has version %s, which satisfies requirement %s",
|
| 630 |
+
display_path(self.source_dir),
|
| 631 |
+
version,
|
| 632 |
+
self,
|
| 633 |
+
)
|
| 634 |
+
|
| 635 |
+
# For both source distributions and editables
|
| 636 |
+
def ensure_has_source_dir(
|
| 637 |
+
self,
|
| 638 |
+
parent_dir: str,
|
| 639 |
+
autodelete: bool = False,
|
| 640 |
+
parallel_builds: bool = False,
|
| 641 |
+
) -> None:
|
| 642 |
+
"""Ensure that a source_dir is set.
|
| 643 |
+
|
| 644 |
+
This will create a temporary build dir if the name of the requirement
|
| 645 |
+
isn't known yet.
|
| 646 |
+
|
| 647 |
+
:param parent_dir: The ideal pip parent_dir for the source_dir.
|
| 648 |
+
Generally src_dir for editables and build_dir for sdists.
|
| 649 |
+
:return: self.source_dir
|
| 650 |
+
"""
|
| 651 |
+
if self.source_dir is None:
|
| 652 |
+
self.source_dir = self.ensure_build_location(
|
| 653 |
+
parent_dir,
|
| 654 |
+
autodelete=autodelete,
|
| 655 |
+
parallel_builds=parallel_builds,
|
| 656 |
+
)
|
| 657 |
+
|
| 658 |
+
def needs_unpacked_archive(self, archive_source: Path) -> None:
|
| 659 |
+
assert self._archive_source is None
|
| 660 |
+
self._archive_source = archive_source
|
| 661 |
+
|
| 662 |
+
def ensure_pristine_source_checkout(self) -> None:
|
| 663 |
+
"""Ensure the source directory has not yet been built in."""
|
| 664 |
+
assert self.source_dir is not None
|
| 665 |
+
if self._archive_source is not None:
|
| 666 |
+
unpack_file(str(self._archive_source), self.source_dir)
|
| 667 |
+
elif is_installable_dir(self.source_dir):
|
| 668 |
+
# If a checkout exists, it's unwise to keep going.
|
| 669 |
+
# version inconsistencies are logged later, but do not fail
|
| 670 |
+
# the installation.
|
| 671 |
+
raise PreviousBuildDirError(
|
| 672 |
+
f"pip can't proceed with requirements '{self}' due to a "
|
| 673 |
+
f"pre-existing build directory ({self.source_dir}). This is likely "
|
| 674 |
+
"due to a previous installation that failed . pip is "
|
| 675 |
+
"being responsible and not assuming it can delete this. "
|
| 676 |
+
"Please delete it and try again."
|
| 677 |
+
)
|
| 678 |
+
|
| 679 |
+
# For editable installations
|
| 680 |
+
def update_editable(self) -> None:
|
| 681 |
+
if not self.link:
|
| 682 |
+
logger.debug(
|
| 683 |
+
"Cannot update repository at %s; repository location is unknown",
|
| 684 |
+
self.source_dir,
|
| 685 |
+
)
|
| 686 |
+
return
|
| 687 |
+
assert self.editable
|
| 688 |
+
assert self.source_dir
|
| 689 |
+
if self.link.scheme == "file":
|
| 690 |
+
# Static paths don't get updated
|
| 691 |
+
return
|
| 692 |
+
vcs_backend = vcs.get_backend_for_scheme(self.link.scheme)
|
| 693 |
+
# Editable requirements are validated in Requirement constructors.
|
| 694 |
+
# So here, if it's neither a path nor a valid VCS URL, it's a bug.
|
| 695 |
+
assert vcs_backend, f"Unsupported VCS URL {self.link.url}"
|
| 696 |
+
hidden_url = hide_url(self.link.url)
|
| 697 |
+
vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0)
|
| 698 |
+
|
| 699 |
+
# Top-level Actions
|
| 700 |
+
def uninstall(
|
| 701 |
+
self, auto_confirm: bool = False, verbose: bool = False
|
| 702 |
+
) -> Optional[UninstallPathSet]:
|
| 703 |
+
"""
|
| 704 |
+
Uninstall the distribution currently satisfying this requirement.
|
| 705 |
+
|
| 706 |
+
Prompts before removing or modifying files unless
|
| 707 |
+
``auto_confirm`` is True.
|
| 708 |
+
|
| 709 |
+
Refuses to delete or modify files outside of ``sys.prefix`` -
|
| 710 |
+
thus uninstallation within a virtual environment can only
|
| 711 |
+
modify that virtual environment, even if the virtualenv is
|
| 712 |
+
linked to global site-packages.
|
| 713 |
+
|
| 714 |
+
"""
|
| 715 |
+
assert self.req
|
| 716 |
+
dist = get_default_environment().get_distribution(self.req.name)
|
| 717 |
+
if not dist:
|
| 718 |
+
logger.warning("Skipping %s as it is not installed.", self.name)
|
| 719 |
+
return None
|
| 720 |
+
logger.info("Found existing installation: %s", dist)
|
| 721 |
+
|
| 722 |
+
uninstalled_pathset = UninstallPathSet.from_dist(dist)
|
| 723 |
+
uninstalled_pathset.remove(auto_confirm, verbose)
|
| 724 |
+
return uninstalled_pathset
|
| 725 |
+
|
| 726 |
+
def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str:
|
| 727 |
+
def _clean_zip_name(name: str, prefix: str) -> str:
|
| 728 |
+
assert name.startswith(
|
| 729 |
+
prefix + os.path.sep
|
| 730 |
+
), f"name {name!r} doesn't start with prefix {prefix!r}"
|
| 731 |
+
name = name[len(prefix) + 1 :]
|
| 732 |
+
name = name.replace(os.path.sep, "/")
|
| 733 |
+
return name
|
| 734 |
+
|
| 735 |
+
assert self.req is not None
|
| 736 |
+
path = os.path.join(parentdir, path)
|
| 737 |
+
name = _clean_zip_name(path, rootdir)
|
| 738 |
+
return self.req.name + "/" + name
|
| 739 |
+
|
| 740 |
+
def archive(self, build_dir: Optional[str]) -> None:
|
| 741 |
+
"""Saves archive to provided build_dir.
|
| 742 |
+
|
| 743 |
+
Used for saving downloaded VCS requirements as part of `pip download`.
|
| 744 |
+
"""
|
| 745 |
+
assert self.source_dir
|
| 746 |
+
if build_dir is None:
|
| 747 |
+
return
|
| 748 |
+
|
| 749 |
+
create_archive = True
|
| 750 |
+
archive_name = "{}-{}.zip".format(self.name, self.metadata["version"])
|
| 751 |
+
archive_path = os.path.join(build_dir, archive_name)
|
| 752 |
+
|
| 753 |
+
if os.path.exists(archive_path):
|
| 754 |
+
response = ask_path_exists(
|
| 755 |
+
f"The file {display_path(archive_path)} exists. (i)gnore, (w)ipe, "
|
| 756 |
+
"(b)ackup, (a)bort ",
|
| 757 |
+
("i", "w", "b", "a"),
|
| 758 |
+
)
|
| 759 |
+
if response == "i":
|
| 760 |
+
create_archive = False
|
| 761 |
+
elif response == "w":
|
| 762 |
+
logger.warning("Deleting %s", display_path(archive_path))
|
| 763 |
+
os.remove(archive_path)
|
| 764 |
+
elif response == "b":
|
| 765 |
+
dest_file = backup_dir(archive_path)
|
| 766 |
+
logger.warning(
|
| 767 |
+
"Backing up %s to %s",
|
| 768 |
+
display_path(archive_path),
|
| 769 |
+
display_path(dest_file),
|
| 770 |
+
)
|
| 771 |
+
shutil.move(archive_path, dest_file)
|
| 772 |
+
elif response == "a":
|
| 773 |
+
sys.exit(-1)
|
| 774 |
+
|
| 775 |
+
if not create_archive:
|
| 776 |
+
return
|
| 777 |
+
|
| 778 |
+
zip_output = zipfile.ZipFile(
|
| 779 |
+
archive_path,
|
| 780 |
+
"w",
|
| 781 |
+
zipfile.ZIP_DEFLATED,
|
| 782 |
+
allowZip64=True,
|
| 783 |
+
)
|
| 784 |
+
with zip_output:
|
| 785 |
+
dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory))
|
| 786 |
+
for dirpath, dirnames, filenames in os.walk(dir):
|
| 787 |
+
for dirname in dirnames:
|
| 788 |
+
dir_arcname = self._get_archive_name(
|
| 789 |
+
dirname,
|
| 790 |
+
parentdir=dirpath,
|
| 791 |
+
rootdir=dir,
|
| 792 |
+
)
|
| 793 |
+
zipdir = zipfile.ZipInfo(dir_arcname + "/")
|
| 794 |
+
zipdir.external_attr = 0x1ED << 16 # 0o755
|
| 795 |
+
zip_output.writestr(zipdir, "")
|
| 796 |
+
for filename in filenames:
|
| 797 |
+
file_arcname = self._get_archive_name(
|
| 798 |
+
filename,
|
| 799 |
+
parentdir=dirpath,
|
| 800 |
+
rootdir=dir,
|
| 801 |
+
)
|
| 802 |
+
filename = os.path.join(dirpath, filename)
|
| 803 |
+
zip_output.write(filename, file_arcname)
|
| 804 |
+
|
| 805 |
+
logger.info("Saved %s", display_path(archive_path))
|
| 806 |
+
|
| 807 |
+
def install(
|
| 808 |
+
self,
|
| 809 |
+
global_options: Optional[Sequence[str]] = None,
|
| 810 |
+
root: Optional[str] = None,
|
| 811 |
+
home: Optional[str] = None,
|
| 812 |
+
prefix: Optional[str] = None,
|
| 813 |
+
warn_script_location: bool = True,
|
| 814 |
+
use_user_site: bool = False,
|
| 815 |
+
pycompile: bool = True,
|
| 816 |
+
) -> None:
|
| 817 |
+
assert self.req is not None
|
| 818 |
+
scheme = get_scheme(
|
| 819 |
+
self.req.name,
|
| 820 |
+
user=use_user_site,
|
| 821 |
+
home=home,
|
| 822 |
+
root=root,
|
| 823 |
+
isolated=self.isolated,
|
| 824 |
+
prefix=prefix,
|
| 825 |
+
)
|
| 826 |
+
|
| 827 |
+
if self.editable and not self.is_wheel:
|
| 828 |
+
deprecated(
|
| 829 |
+
reason=(
|
| 830 |
+
f"Legacy editable install of {self} (setup.py develop) "
|
| 831 |
+
"is deprecated."
|
| 832 |
+
),
|
| 833 |
+
replacement=(
|
| 834 |
+
"to add a pyproject.toml or enable --use-pep517, "
|
| 835 |
+
"and use setuptools >= 64. "
|
| 836 |
+
"If the resulting installation is not behaving as expected, "
|
| 837 |
+
"try using --config-settings editable_mode=compat. "
|
| 838 |
+
"Please consult the setuptools documentation for more information"
|
| 839 |
+
),
|
| 840 |
+
gone_in="25.1",
|
| 841 |
+
issue=11457,
|
| 842 |
+
)
|
| 843 |
+
if self.config_settings:
|
| 844 |
+
logger.warning(
|
| 845 |
+
"--config-settings ignored for legacy editable install of %s. "
|
| 846 |
+
"Consider upgrading to a version of setuptools "
|
| 847 |
+
"that supports PEP 660 (>= 64).",
|
| 848 |
+
self,
|
| 849 |
+
)
|
| 850 |
+
install_editable_legacy(
|
| 851 |
+
global_options=global_options if global_options is not None else [],
|
| 852 |
+
prefix=prefix,
|
| 853 |
+
home=home,
|
| 854 |
+
use_user_site=use_user_site,
|
| 855 |
+
name=self.req.name,
|
| 856 |
+
setup_py_path=self.setup_py_path,
|
| 857 |
+
isolated=self.isolated,
|
| 858 |
+
build_env=self.build_env,
|
| 859 |
+
unpacked_source_directory=self.unpacked_source_directory,
|
| 860 |
+
)
|
| 861 |
+
self.install_succeeded = True
|
| 862 |
+
return
|
| 863 |
+
|
| 864 |
+
assert self.is_wheel
|
| 865 |
+
assert self.local_file_path
|
| 866 |
+
|
| 867 |
+
install_wheel(
|
| 868 |
+
self.req.name,
|
| 869 |
+
self.local_file_path,
|
| 870 |
+
scheme=scheme,
|
| 871 |
+
req_description=str(self.req),
|
| 872 |
+
pycompile=pycompile,
|
| 873 |
+
warn_script_location=warn_script_location,
|
| 874 |
+
direct_url=self.download_info if self.is_direct else None,
|
| 875 |
+
requested=self.user_supplied,
|
| 876 |
+
)
|
| 877 |
+
self.install_succeeded = True
|
| 878 |
+
|
| 879 |
+
|
| 880 |
+
def check_invalid_constraint_type(req: InstallRequirement) -> str:
|
| 881 |
+
# Check for unsupported forms
|
| 882 |
+
problem = ""
|
| 883 |
+
if not req.name:
|
| 884 |
+
problem = "Unnamed requirements are not allowed as constraints"
|
| 885 |
+
elif req.editable:
|
| 886 |
+
problem = "Editable requirements are not allowed as constraints"
|
| 887 |
+
elif req.extras:
|
| 888 |
+
problem = "Constraints cannot have extras"
|
| 889 |
+
|
| 890 |
+
if problem:
|
| 891 |
+
deprecated(
|
| 892 |
+
reason=(
|
| 893 |
+
"Constraints are only allowed to take the form of a package "
|
| 894 |
+
"name and a version specifier. Other forms were originally "
|
| 895 |
+
"permitted as an accident of the implementation, but were "
|
| 896 |
+
"undocumented. The new implementation of the resolver no "
|
| 897 |
+
"longer supports these forms."
|
| 898 |
+
),
|
| 899 |
+
replacement="replacing the constraint with a requirement",
|
| 900 |
+
# No plan yet for when the new resolver becomes default
|
| 901 |
+
gone_in=None,
|
| 902 |
+
issue=8210,
|
| 903 |
+
)
|
| 904 |
+
|
| 905 |
+
return problem
|
| 906 |
+
|
| 907 |
+
|
| 908 |
+
def _has_option(options: Values, reqs: List[InstallRequirement], option: str) -> bool:
|
| 909 |
+
if getattr(options, option, None):
|
| 910 |
+
return True
|
| 911 |
+
for req in reqs:
|
| 912 |
+
if getattr(req, option, None):
|
| 913 |
+
return True
|
| 914 |
+
return False
|
| 915 |
+
|
| 916 |
+
|
| 917 |
+
def check_legacy_setup_py_options(
|
| 918 |
+
options: Values,
|
| 919 |
+
reqs: List[InstallRequirement],
|
| 920 |
+
) -> None:
|
| 921 |
+
has_build_options = _has_option(options, reqs, "build_options")
|
| 922 |
+
has_global_options = _has_option(options, reqs, "global_options")
|
| 923 |
+
if has_build_options or has_global_options:
|
| 924 |
+
deprecated(
|
| 925 |
+
reason="--build-option and --global-option are deprecated.",
|
| 926 |
+
issue=11859,
|
| 927 |
+
replacement="to use --config-settings",
|
| 928 |
+
gone_in=None,
|
| 929 |
+
)
|
| 930 |
+
logger.warning(
|
| 931 |
+
"Implying --no-binary=:all: due to the presence of "
|
| 932 |
+
"--build-option / --global-option. "
|
| 933 |
+
)
|
| 934 |
+
options.format_control.disallow_binaries()
|
vllm/lib/python3.10/site-packages/pip/_internal/req/req_set.py
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from collections import OrderedDict
|
| 3 |
+
from typing import Dict, List
|
| 4 |
+
|
| 5 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 6 |
+
|
| 7 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 8 |
+
|
| 9 |
+
logger = logging.getLogger(__name__)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class RequirementSet:
|
| 13 |
+
def __init__(self, check_supported_wheels: bool = True) -> None:
|
| 14 |
+
"""Create a RequirementSet."""
|
| 15 |
+
|
| 16 |
+
self.requirements: Dict[str, InstallRequirement] = OrderedDict()
|
| 17 |
+
self.check_supported_wheels = check_supported_wheels
|
| 18 |
+
|
| 19 |
+
self.unnamed_requirements: List[InstallRequirement] = []
|
| 20 |
+
|
| 21 |
+
def __str__(self) -> str:
|
| 22 |
+
requirements = sorted(
|
| 23 |
+
(req for req in self.requirements.values() if not req.comes_from),
|
| 24 |
+
key=lambda req: canonicalize_name(req.name or ""),
|
| 25 |
+
)
|
| 26 |
+
return " ".join(str(req.req) for req in requirements)
|
| 27 |
+
|
| 28 |
+
def __repr__(self) -> str:
|
| 29 |
+
requirements = sorted(
|
| 30 |
+
self.requirements.values(),
|
| 31 |
+
key=lambda req: canonicalize_name(req.name or ""),
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
format_string = "<{classname} object; {count} requirement(s): {reqs}>"
|
| 35 |
+
return format_string.format(
|
| 36 |
+
classname=self.__class__.__name__,
|
| 37 |
+
count=len(requirements),
|
| 38 |
+
reqs=", ".join(str(req.req) for req in requirements),
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
def add_unnamed_requirement(self, install_req: InstallRequirement) -> None:
|
| 42 |
+
assert not install_req.name
|
| 43 |
+
self.unnamed_requirements.append(install_req)
|
| 44 |
+
|
| 45 |
+
def add_named_requirement(self, install_req: InstallRequirement) -> None:
|
| 46 |
+
assert install_req.name
|
| 47 |
+
|
| 48 |
+
project_name = canonicalize_name(install_req.name)
|
| 49 |
+
self.requirements[project_name] = install_req
|
| 50 |
+
|
| 51 |
+
def has_requirement(self, name: str) -> bool:
|
| 52 |
+
project_name = canonicalize_name(name)
|
| 53 |
+
|
| 54 |
+
return (
|
| 55 |
+
project_name in self.requirements
|
| 56 |
+
and not self.requirements[project_name].constraint
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
def get_requirement(self, name: str) -> InstallRequirement:
|
| 60 |
+
project_name = canonicalize_name(name)
|
| 61 |
+
|
| 62 |
+
if project_name in self.requirements:
|
| 63 |
+
return self.requirements[project_name]
|
| 64 |
+
|
| 65 |
+
raise KeyError(f"No project with the name {name!r}")
|
| 66 |
+
|
| 67 |
+
@property
|
| 68 |
+
def all_requirements(self) -> List[InstallRequirement]:
|
| 69 |
+
return self.unnamed_requirements + list(self.requirements.values())
|
| 70 |
+
|
| 71 |
+
@property
|
| 72 |
+
def requirements_to_install(self) -> List[InstallRequirement]:
|
| 73 |
+
"""Return the list of requirements that need to be installed.
|
| 74 |
+
|
| 75 |
+
TODO remove this property together with the legacy resolver, since the new
|
| 76 |
+
resolver only returns requirements that need to be installed.
|
| 77 |
+
"""
|
| 78 |
+
return [
|
| 79 |
+
install_req
|
| 80 |
+
for install_req in self.all_requirements
|
| 81 |
+
if not install_req.constraint and not install_req.satisfied_by
|
| 82 |
+
]
|
vllm/lib/python3.10/site-packages/pip/_internal/req/req_uninstall.py
ADDED
|
@@ -0,0 +1,633 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import os
|
| 3 |
+
import sys
|
| 4 |
+
import sysconfig
|
| 5 |
+
from importlib.util import cache_from_source
|
| 6 |
+
from typing import Any, Callable, Dict, Generator, Iterable, List, Optional, Set, Tuple
|
| 7 |
+
|
| 8 |
+
from pip._internal.exceptions import LegacyDistutilsInstall, UninstallMissingRecord
|
| 9 |
+
from pip._internal.locations import get_bin_prefix, get_bin_user
|
| 10 |
+
from pip._internal.metadata import BaseDistribution
|
| 11 |
+
from pip._internal.utils.compat import WINDOWS
|
| 12 |
+
from pip._internal.utils.egg_link import egg_link_path_from_location
|
| 13 |
+
from pip._internal.utils.logging import getLogger, indent_log
|
| 14 |
+
from pip._internal.utils.misc import ask, normalize_path, renames, rmtree
|
| 15 |
+
from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
|
| 16 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
| 17 |
+
|
| 18 |
+
logger = getLogger(__name__)
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def _script_names(
|
| 22 |
+
bin_dir: str, script_name: str, is_gui: bool
|
| 23 |
+
) -> Generator[str, None, None]:
|
| 24 |
+
"""Create the fully qualified name of the files created by
|
| 25 |
+
{console,gui}_scripts for the given ``dist``.
|
| 26 |
+
Returns the list of file names
|
| 27 |
+
"""
|
| 28 |
+
exe_name = os.path.join(bin_dir, script_name)
|
| 29 |
+
yield exe_name
|
| 30 |
+
if not WINDOWS:
|
| 31 |
+
return
|
| 32 |
+
yield f"{exe_name}.exe"
|
| 33 |
+
yield f"{exe_name}.exe.manifest"
|
| 34 |
+
if is_gui:
|
| 35 |
+
yield f"{exe_name}-script.pyw"
|
| 36 |
+
else:
|
| 37 |
+
yield f"{exe_name}-script.py"
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def _unique(
|
| 41 |
+
fn: Callable[..., Generator[Any, None, None]]
|
| 42 |
+
) -> Callable[..., Generator[Any, None, None]]:
|
| 43 |
+
@functools.wraps(fn)
|
| 44 |
+
def unique(*args: Any, **kw: Any) -> Generator[Any, None, None]:
|
| 45 |
+
seen: Set[Any] = set()
|
| 46 |
+
for item in fn(*args, **kw):
|
| 47 |
+
if item not in seen:
|
| 48 |
+
seen.add(item)
|
| 49 |
+
yield item
|
| 50 |
+
|
| 51 |
+
return unique
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
@_unique
|
| 55 |
+
def uninstallation_paths(dist: BaseDistribution) -> Generator[str, None, None]:
|
| 56 |
+
"""
|
| 57 |
+
Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
|
| 58 |
+
|
| 59 |
+
Yield paths to all the files in RECORD. For each .py file in RECORD, add
|
| 60 |
+
the .pyc and .pyo in the same directory.
|
| 61 |
+
|
| 62 |
+
UninstallPathSet.add() takes care of the __pycache__ .py[co].
|
| 63 |
+
|
| 64 |
+
If RECORD is not found, raises an error,
|
| 65 |
+
with possible information from the INSTALLER file.
|
| 66 |
+
|
| 67 |
+
https://packaging.python.org/specifications/recording-installed-packages/
|
| 68 |
+
"""
|
| 69 |
+
location = dist.location
|
| 70 |
+
assert location is not None, "not installed"
|
| 71 |
+
|
| 72 |
+
entries = dist.iter_declared_entries()
|
| 73 |
+
if entries is None:
|
| 74 |
+
raise UninstallMissingRecord(distribution=dist)
|
| 75 |
+
|
| 76 |
+
for entry in entries:
|
| 77 |
+
path = os.path.join(location, entry)
|
| 78 |
+
yield path
|
| 79 |
+
if path.endswith(".py"):
|
| 80 |
+
dn, fn = os.path.split(path)
|
| 81 |
+
base = fn[:-3]
|
| 82 |
+
path = os.path.join(dn, base + ".pyc")
|
| 83 |
+
yield path
|
| 84 |
+
path = os.path.join(dn, base + ".pyo")
|
| 85 |
+
yield path
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
def compact(paths: Iterable[str]) -> Set[str]:
|
| 89 |
+
"""Compact a path set to contain the minimal number of paths
|
| 90 |
+
necessary to contain all paths in the set. If /a/path/ and
|
| 91 |
+
/a/path/to/a/file.txt are both in the set, leave only the
|
| 92 |
+
shorter path."""
|
| 93 |
+
|
| 94 |
+
sep = os.path.sep
|
| 95 |
+
short_paths: Set[str] = set()
|
| 96 |
+
for path in sorted(paths, key=len):
|
| 97 |
+
should_skip = any(
|
| 98 |
+
path.startswith(shortpath.rstrip("*"))
|
| 99 |
+
and path[len(shortpath.rstrip("*").rstrip(sep))] == sep
|
| 100 |
+
for shortpath in short_paths
|
| 101 |
+
)
|
| 102 |
+
if not should_skip:
|
| 103 |
+
short_paths.add(path)
|
| 104 |
+
return short_paths
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def compress_for_rename(paths: Iterable[str]) -> Set[str]:
|
| 108 |
+
"""Returns a set containing the paths that need to be renamed.
|
| 109 |
+
|
| 110 |
+
This set may include directories when the original sequence of paths
|
| 111 |
+
included every file on disk.
|
| 112 |
+
"""
|
| 113 |
+
case_map = {os.path.normcase(p): p for p in paths}
|
| 114 |
+
remaining = set(case_map)
|
| 115 |
+
unchecked = sorted({os.path.split(p)[0] for p in case_map.values()}, key=len)
|
| 116 |
+
wildcards: Set[str] = set()
|
| 117 |
+
|
| 118 |
+
def norm_join(*a: str) -> str:
|
| 119 |
+
return os.path.normcase(os.path.join(*a))
|
| 120 |
+
|
| 121 |
+
for root in unchecked:
|
| 122 |
+
if any(os.path.normcase(root).startswith(w) for w in wildcards):
|
| 123 |
+
# This directory has already been handled.
|
| 124 |
+
continue
|
| 125 |
+
|
| 126 |
+
all_files: Set[str] = set()
|
| 127 |
+
all_subdirs: Set[str] = set()
|
| 128 |
+
for dirname, subdirs, files in os.walk(root):
|
| 129 |
+
all_subdirs.update(norm_join(root, dirname, d) for d in subdirs)
|
| 130 |
+
all_files.update(norm_join(root, dirname, f) for f in files)
|
| 131 |
+
# If all the files we found are in our remaining set of files to
|
| 132 |
+
# remove, then remove them from the latter set and add a wildcard
|
| 133 |
+
# for the directory.
|
| 134 |
+
if not (all_files - remaining):
|
| 135 |
+
remaining.difference_update(all_files)
|
| 136 |
+
wildcards.add(root + os.sep)
|
| 137 |
+
|
| 138 |
+
return set(map(case_map.__getitem__, remaining)) | wildcards
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def compress_for_output_listing(paths: Iterable[str]) -> Tuple[Set[str], Set[str]]:
|
| 142 |
+
"""Returns a tuple of 2 sets of which paths to display to user
|
| 143 |
+
|
| 144 |
+
The first set contains paths that would be deleted. Files of a package
|
| 145 |
+
are not added and the top-level directory of the package has a '*' added
|
| 146 |
+
at the end - to signify that all it's contents are removed.
|
| 147 |
+
|
| 148 |
+
The second set contains files that would have been skipped in the above
|
| 149 |
+
folders.
|
| 150 |
+
"""
|
| 151 |
+
|
| 152 |
+
will_remove = set(paths)
|
| 153 |
+
will_skip = set()
|
| 154 |
+
|
| 155 |
+
# Determine folders and files
|
| 156 |
+
folders = set()
|
| 157 |
+
files = set()
|
| 158 |
+
for path in will_remove:
|
| 159 |
+
if path.endswith(".pyc"):
|
| 160 |
+
continue
|
| 161 |
+
if path.endswith("__init__.py") or ".dist-info" in path:
|
| 162 |
+
folders.add(os.path.dirname(path))
|
| 163 |
+
files.add(path)
|
| 164 |
+
|
| 165 |
+
_normcased_files = set(map(os.path.normcase, files))
|
| 166 |
+
|
| 167 |
+
folders = compact(folders)
|
| 168 |
+
|
| 169 |
+
# This walks the tree using os.walk to not miss extra folders
|
| 170 |
+
# that might get added.
|
| 171 |
+
for folder in folders:
|
| 172 |
+
for dirpath, _, dirfiles in os.walk(folder):
|
| 173 |
+
for fname in dirfiles:
|
| 174 |
+
if fname.endswith(".pyc"):
|
| 175 |
+
continue
|
| 176 |
+
|
| 177 |
+
file_ = os.path.join(dirpath, fname)
|
| 178 |
+
if (
|
| 179 |
+
os.path.isfile(file_)
|
| 180 |
+
and os.path.normcase(file_) not in _normcased_files
|
| 181 |
+
):
|
| 182 |
+
# We are skipping this file. Add it to the set.
|
| 183 |
+
will_skip.add(file_)
|
| 184 |
+
|
| 185 |
+
will_remove = files | {os.path.join(folder, "*") for folder in folders}
|
| 186 |
+
|
| 187 |
+
return will_remove, will_skip
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
class StashedUninstallPathSet:
|
| 191 |
+
"""A set of file rename operations to stash files while
|
| 192 |
+
tentatively uninstalling them."""
|
| 193 |
+
|
| 194 |
+
def __init__(self) -> None:
|
| 195 |
+
# Mapping from source file root to [Adjacent]TempDirectory
|
| 196 |
+
# for files under that directory.
|
| 197 |
+
self._save_dirs: Dict[str, TempDirectory] = {}
|
| 198 |
+
# (old path, new path) tuples for each move that may need
|
| 199 |
+
# to be undone.
|
| 200 |
+
self._moves: List[Tuple[str, str]] = []
|
| 201 |
+
|
| 202 |
+
def _get_directory_stash(self, path: str) -> str:
|
| 203 |
+
"""Stashes a directory.
|
| 204 |
+
|
| 205 |
+
Directories are stashed adjacent to their original location if
|
| 206 |
+
possible, or else moved/copied into the user's temp dir."""
|
| 207 |
+
|
| 208 |
+
try:
|
| 209 |
+
save_dir: TempDirectory = AdjacentTempDirectory(path)
|
| 210 |
+
except OSError:
|
| 211 |
+
save_dir = TempDirectory(kind="uninstall")
|
| 212 |
+
self._save_dirs[os.path.normcase(path)] = save_dir
|
| 213 |
+
|
| 214 |
+
return save_dir.path
|
| 215 |
+
|
| 216 |
+
def _get_file_stash(self, path: str) -> str:
|
| 217 |
+
"""Stashes a file.
|
| 218 |
+
|
| 219 |
+
If no root has been provided, one will be created for the directory
|
| 220 |
+
in the user's temp directory."""
|
| 221 |
+
path = os.path.normcase(path)
|
| 222 |
+
head, old_head = os.path.dirname(path), None
|
| 223 |
+
save_dir = None
|
| 224 |
+
|
| 225 |
+
while head != old_head:
|
| 226 |
+
try:
|
| 227 |
+
save_dir = self._save_dirs[head]
|
| 228 |
+
break
|
| 229 |
+
except KeyError:
|
| 230 |
+
pass
|
| 231 |
+
head, old_head = os.path.dirname(head), head
|
| 232 |
+
else:
|
| 233 |
+
# Did not find any suitable root
|
| 234 |
+
head = os.path.dirname(path)
|
| 235 |
+
save_dir = TempDirectory(kind="uninstall")
|
| 236 |
+
self._save_dirs[head] = save_dir
|
| 237 |
+
|
| 238 |
+
relpath = os.path.relpath(path, head)
|
| 239 |
+
if relpath and relpath != os.path.curdir:
|
| 240 |
+
return os.path.join(save_dir.path, relpath)
|
| 241 |
+
return save_dir.path
|
| 242 |
+
|
| 243 |
+
def stash(self, path: str) -> str:
|
| 244 |
+
"""Stashes the directory or file and returns its new location.
|
| 245 |
+
Handle symlinks as files to avoid modifying the symlink targets.
|
| 246 |
+
"""
|
| 247 |
+
path_is_dir = os.path.isdir(path) and not os.path.islink(path)
|
| 248 |
+
if path_is_dir:
|
| 249 |
+
new_path = self._get_directory_stash(path)
|
| 250 |
+
else:
|
| 251 |
+
new_path = self._get_file_stash(path)
|
| 252 |
+
|
| 253 |
+
self._moves.append((path, new_path))
|
| 254 |
+
if path_is_dir and os.path.isdir(new_path):
|
| 255 |
+
# If we're moving a directory, we need to
|
| 256 |
+
# remove the destination first or else it will be
|
| 257 |
+
# moved to inside the existing directory.
|
| 258 |
+
# We just created new_path ourselves, so it will
|
| 259 |
+
# be removable.
|
| 260 |
+
os.rmdir(new_path)
|
| 261 |
+
renames(path, new_path)
|
| 262 |
+
return new_path
|
| 263 |
+
|
| 264 |
+
def commit(self) -> None:
|
| 265 |
+
"""Commits the uninstall by removing stashed files."""
|
| 266 |
+
for save_dir in self._save_dirs.values():
|
| 267 |
+
save_dir.cleanup()
|
| 268 |
+
self._moves = []
|
| 269 |
+
self._save_dirs = {}
|
| 270 |
+
|
| 271 |
+
def rollback(self) -> None:
|
| 272 |
+
"""Undoes the uninstall by moving stashed files back."""
|
| 273 |
+
for p in self._moves:
|
| 274 |
+
logger.info("Moving to %s\n from %s", *p)
|
| 275 |
+
|
| 276 |
+
for new_path, path in self._moves:
|
| 277 |
+
try:
|
| 278 |
+
logger.debug("Replacing %s from %s", new_path, path)
|
| 279 |
+
if os.path.isfile(new_path) or os.path.islink(new_path):
|
| 280 |
+
os.unlink(new_path)
|
| 281 |
+
elif os.path.isdir(new_path):
|
| 282 |
+
rmtree(new_path)
|
| 283 |
+
renames(path, new_path)
|
| 284 |
+
except OSError as ex:
|
| 285 |
+
logger.error("Failed to restore %s", new_path)
|
| 286 |
+
logger.debug("Exception: %s", ex)
|
| 287 |
+
|
| 288 |
+
self.commit()
|
| 289 |
+
|
| 290 |
+
@property
|
| 291 |
+
def can_rollback(self) -> bool:
|
| 292 |
+
return bool(self._moves)
|
| 293 |
+
|
| 294 |
+
|
| 295 |
+
class UninstallPathSet:
|
| 296 |
+
"""A set of file paths to be removed in the uninstallation of a
|
| 297 |
+
requirement."""
|
| 298 |
+
|
| 299 |
+
def __init__(self, dist: BaseDistribution) -> None:
|
| 300 |
+
self._paths: Set[str] = set()
|
| 301 |
+
self._refuse: Set[str] = set()
|
| 302 |
+
self._pth: Dict[str, UninstallPthEntries] = {}
|
| 303 |
+
self._dist = dist
|
| 304 |
+
self._moved_paths = StashedUninstallPathSet()
|
| 305 |
+
# Create local cache of normalize_path results. Creating an UninstallPathSet
|
| 306 |
+
# can result in hundreds/thousands of redundant calls to normalize_path with
|
| 307 |
+
# the same args, which hurts performance.
|
| 308 |
+
self._normalize_path_cached = functools.lru_cache(normalize_path)
|
| 309 |
+
|
| 310 |
+
def _permitted(self, path: str) -> bool:
|
| 311 |
+
"""
|
| 312 |
+
Return True if the given path is one we are permitted to
|
| 313 |
+
remove/modify, False otherwise.
|
| 314 |
+
|
| 315 |
+
"""
|
| 316 |
+
# aka is_local, but caching normalized sys.prefix
|
| 317 |
+
if not running_under_virtualenv():
|
| 318 |
+
return True
|
| 319 |
+
return path.startswith(self._normalize_path_cached(sys.prefix))
|
| 320 |
+
|
| 321 |
+
def add(self, path: str) -> None:
|
| 322 |
+
head, tail = os.path.split(path)
|
| 323 |
+
|
| 324 |
+
# we normalize the head to resolve parent directory symlinks, but not
|
| 325 |
+
# the tail, since we only want to uninstall symlinks, not their targets
|
| 326 |
+
path = os.path.join(self._normalize_path_cached(head), os.path.normcase(tail))
|
| 327 |
+
|
| 328 |
+
if not os.path.exists(path):
|
| 329 |
+
return
|
| 330 |
+
if self._permitted(path):
|
| 331 |
+
self._paths.add(path)
|
| 332 |
+
else:
|
| 333 |
+
self._refuse.add(path)
|
| 334 |
+
|
| 335 |
+
# __pycache__ files can show up after 'installed-files.txt' is created,
|
| 336 |
+
# due to imports
|
| 337 |
+
if os.path.splitext(path)[1] == ".py":
|
| 338 |
+
self.add(cache_from_source(path))
|
| 339 |
+
|
| 340 |
+
def add_pth(self, pth_file: str, entry: str) -> None:
|
| 341 |
+
pth_file = self._normalize_path_cached(pth_file)
|
| 342 |
+
if self._permitted(pth_file):
|
| 343 |
+
if pth_file not in self._pth:
|
| 344 |
+
self._pth[pth_file] = UninstallPthEntries(pth_file)
|
| 345 |
+
self._pth[pth_file].add(entry)
|
| 346 |
+
else:
|
| 347 |
+
self._refuse.add(pth_file)
|
| 348 |
+
|
| 349 |
+
def remove(self, auto_confirm: bool = False, verbose: bool = False) -> None:
|
| 350 |
+
"""Remove paths in ``self._paths`` with confirmation (unless
|
| 351 |
+
``auto_confirm`` is True)."""
|
| 352 |
+
|
| 353 |
+
if not self._paths:
|
| 354 |
+
logger.info(
|
| 355 |
+
"Can't uninstall '%s'. No files were found to uninstall.",
|
| 356 |
+
self._dist.raw_name,
|
| 357 |
+
)
|
| 358 |
+
return
|
| 359 |
+
|
| 360 |
+
dist_name_version = f"{self._dist.raw_name}-{self._dist.raw_version}"
|
| 361 |
+
logger.info("Uninstalling %s:", dist_name_version)
|
| 362 |
+
|
| 363 |
+
with indent_log():
|
| 364 |
+
if auto_confirm or self._allowed_to_proceed(verbose):
|
| 365 |
+
moved = self._moved_paths
|
| 366 |
+
|
| 367 |
+
for_rename = compress_for_rename(self._paths)
|
| 368 |
+
|
| 369 |
+
for path in sorted(compact(for_rename)):
|
| 370 |
+
moved.stash(path)
|
| 371 |
+
logger.verbose("Removing file or directory %s", path)
|
| 372 |
+
|
| 373 |
+
for pth in self._pth.values():
|
| 374 |
+
pth.remove()
|
| 375 |
+
|
| 376 |
+
logger.info("Successfully uninstalled %s", dist_name_version)
|
| 377 |
+
|
| 378 |
+
def _allowed_to_proceed(self, verbose: bool) -> bool:
|
| 379 |
+
"""Display which files would be deleted and prompt for confirmation"""
|
| 380 |
+
|
| 381 |
+
def _display(msg: str, paths: Iterable[str]) -> None:
|
| 382 |
+
if not paths:
|
| 383 |
+
return
|
| 384 |
+
|
| 385 |
+
logger.info(msg)
|
| 386 |
+
with indent_log():
|
| 387 |
+
for path in sorted(compact(paths)):
|
| 388 |
+
logger.info(path)
|
| 389 |
+
|
| 390 |
+
if not verbose:
|
| 391 |
+
will_remove, will_skip = compress_for_output_listing(self._paths)
|
| 392 |
+
else:
|
| 393 |
+
# In verbose mode, display all the files that are going to be
|
| 394 |
+
# deleted.
|
| 395 |
+
will_remove = set(self._paths)
|
| 396 |
+
will_skip = set()
|
| 397 |
+
|
| 398 |
+
_display("Would remove:", will_remove)
|
| 399 |
+
_display("Would not remove (might be manually added):", will_skip)
|
| 400 |
+
_display("Would not remove (outside of prefix):", self._refuse)
|
| 401 |
+
if verbose:
|
| 402 |
+
_display("Will actually move:", compress_for_rename(self._paths))
|
| 403 |
+
|
| 404 |
+
return ask("Proceed (Y/n)? ", ("y", "n", "")) != "n"
|
| 405 |
+
|
| 406 |
+
def rollback(self) -> None:
|
| 407 |
+
"""Rollback the changes previously made by remove()."""
|
| 408 |
+
if not self._moved_paths.can_rollback:
|
| 409 |
+
logger.error(
|
| 410 |
+
"Can't roll back %s; was not uninstalled",
|
| 411 |
+
self._dist.raw_name,
|
| 412 |
+
)
|
| 413 |
+
return
|
| 414 |
+
logger.info("Rolling back uninstall of %s", self._dist.raw_name)
|
| 415 |
+
self._moved_paths.rollback()
|
| 416 |
+
for pth in self._pth.values():
|
| 417 |
+
pth.rollback()
|
| 418 |
+
|
| 419 |
+
def commit(self) -> None:
|
| 420 |
+
"""Remove temporary save dir: rollback will no longer be possible."""
|
| 421 |
+
self._moved_paths.commit()
|
| 422 |
+
|
| 423 |
+
@classmethod
|
| 424 |
+
def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet":
|
| 425 |
+
dist_location = dist.location
|
| 426 |
+
info_location = dist.info_location
|
| 427 |
+
if dist_location is None:
|
| 428 |
+
logger.info(
|
| 429 |
+
"Not uninstalling %s since it is not installed",
|
| 430 |
+
dist.canonical_name,
|
| 431 |
+
)
|
| 432 |
+
return cls(dist)
|
| 433 |
+
|
| 434 |
+
normalized_dist_location = normalize_path(dist_location)
|
| 435 |
+
if not dist.local:
|
| 436 |
+
logger.info(
|
| 437 |
+
"Not uninstalling %s at %s, outside environment %s",
|
| 438 |
+
dist.canonical_name,
|
| 439 |
+
normalized_dist_location,
|
| 440 |
+
sys.prefix,
|
| 441 |
+
)
|
| 442 |
+
return cls(dist)
|
| 443 |
+
|
| 444 |
+
if normalized_dist_location in {
|
| 445 |
+
p
|
| 446 |
+
for p in {sysconfig.get_path("stdlib"), sysconfig.get_path("platstdlib")}
|
| 447 |
+
if p
|
| 448 |
+
}:
|
| 449 |
+
logger.info(
|
| 450 |
+
"Not uninstalling %s at %s, as it is in the standard library.",
|
| 451 |
+
dist.canonical_name,
|
| 452 |
+
normalized_dist_location,
|
| 453 |
+
)
|
| 454 |
+
return cls(dist)
|
| 455 |
+
|
| 456 |
+
paths_to_remove = cls(dist)
|
| 457 |
+
develop_egg_link = egg_link_path_from_location(dist.raw_name)
|
| 458 |
+
|
| 459 |
+
# Distribution is installed with metadata in a "flat" .egg-info
|
| 460 |
+
# directory. This means it is not a modern .dist-info installation, an
|
| 461 |
+
# egg, or legacy editable.
|
| 462 |
+
setuptools_flat_installation = (
|
| 463 |
+
dist.installed_with_setuptools_egg_info
|
| 464 |
+
and info_location is not None
|
| 465 |
+
and os.path.exists(info_location)
|
| 466 |
+
# If dist is editable and the location points to a ``.egg-info``,
|
| 467 |
+
# we are in fact in the legacy editable case.
|
| 468 |
+
and not info_location.endswith(f"{dist.setuptools_filename}.egg-info")
|
| 469 |
+
)
|
| 470 |
+
|
| 471 |
+
# Uninstall cases order do matter as in the case of 2 installs of the
|
| 472 |
+
# same package, pip needs to uninstall the currently detected version
|
| 473 |
+
if setuptools_flat_installation:
|
| 474 |
+
if info_location is not None:
|
| 475 |
+
paths_to_remove.add(info_location)
|
| 476 |
+
installed_files = dist.iter_declared_entries()
|
| 477 |
+
if installed_files is not None:
|
| 478 |
+
for installed_file in installed_files:
|
| 479 |
+
paths_to_remove.add(os.path.join(dist_location, installed_file))
|
| 480 |
+
# FIXME: need a test for this elif block
|
| 481 |
+
# occurs with --single-version-externally-managed/--record outside
|
| 482 |
+
# of pip
|
| 483 |
+
elif dist.is_file("top_level.txt"):
|
| 484 |
+
try:
|
| 485 |
+
namespace_packages = dist.read_text("namespace_packages.txt")
|
| 486 |
+
except FileNotFoundError:
|
| 487 |
+
namespaces = []
|
| 488 |
+
else:
|
| 489 |
+
namespaces = namespace_packages.splitlines(keepends=False)
|
| 490 |
+
for top_level_pkg in [
|
| 491 |
+
p
|
| 492 |
+
for p in dist.read_text("top_level.txt").splitlines()
|
| 493 |
+
if p and p not in namespaces
|
| 494 |
+
]:
|
| 495 |
+
path = os.path.join(dist_location, top_level_pkg)
|
| 496 |
+
paths_to_remove.add(path)
|
| 497 |
+
paths_to_remove.add(f"{path}.py")
|
| 498 |
+
paths_to_remove.add(f"{path}.pyc")
|
| 499 |
+
paths_to_remove.add(f"{path}.pyo")
|
| 500 |
+
|
| 501 |
+
elif dist.installed_by_distutils:
|
| 502 |
+
raise LegacyDistutilsInstall(distribution=dist)
|
| 503 |
+
|
| 504 |
+
elif dist.installed_as_egg:
|
| 505 |
+
# package installed by easy_install
|
| 506 |
+
# We cannot match on dist.egg_name because it can slightly vary
|
| 507 |
+
# i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
|
| 508 |
+
paths_to_remove.add(dist_location)
|
| 509 |
+
easy_install_egg = os.path.split(dist_location)[1]
|
| 510 |
+
easy_install_pth = os.path.join(
|
| 511 |
+
os.path.dirname(dist_location),
|
| 512 |
+
"easy-install.pth",
|
| 513 |
+
)
|
| 514 |
+
paths_to_remove.add_pth(easy_install_pth, "./" + easy_install_egg)
|
| 515 |
+
|
| 516 |
+
elif dist.installed_with_dist_info:
|
| 517 |
+
for path in uninstallation_paths(dist):
|
| 518 |
+
paths_to_remove.add(path)
|
| 519 |
+
|
| 520 |
+
elif develop_egg_link:
|
| 521 |
+
# PEP 660 modern editable is handled in the ``.dist-info`` case
|
| 522 |
+
# above, so this only covers the setuptools-style editable.
|
| 523 |
+
with open(develop_egg_link) as fh:
|
| 524 |
+
link_pointer = os.path.normcase(fh.readline().strip())
|
| 525 |
+
normalized_link_pointer = paths_to_remove._normalize_path_cached(
|
| 526 |
+
link_pointer
|
| 527 |
+
)
|
| 528 |
+
assert os.path.samefile(
|
| 529 |
+
normalized_link_pointer, normalized_dist_location
|
| 530 |
+
), (
|
| 531 |
+
f"Egg-link {develop_egg_link} (to {link_pointer}) does not match "
|
| 532 |
+
f"installed location of {dist.raw_name} (at {dist_location})"
|
| 533 |
+
)
|
| 534 |
+
paths_to_remove.add(develop_egg_link)
|
| 535 |
+
easy_install_pth = os.path.join(
|
| 536 |
+
os.path.dirname(develop_egg_link), "easy-install.pth"
|
| 537 |
+
)
|
| 538 |
+
paths_to_remove.add_pth(easy_install_pth, dist_location)
|
| 539 |
+
|
| 540 |
+
else:
|
| 541 |
+
logger.debug(
|
| 542 |
+
"Not sure how to uninstall: %s - Check: %s",
|
| 543 |
+
dist,
|
| 544 |
+
dist_location,
|
| 545 |
+
)
|
| 546 |
+
|
| 547 |
+
if dist.in_usersite:
|
| 548 |
+
bin_dir = get_bin_user()
|
| 549 |
+
else:
|
| 550 |
+
bin_dir = get_bin_prefix()
|
| 551 |
+
|
| 552 |
+
# find distutils scripts= scripts
|
| 553 |
+
try:
|
| 554 |
+
for script in dist.iter_distutils_script_names():
|
| 555 |
+
paths_to_remove.add(os.path.join(bin_dir, script))
|
| 556 |
+
if WINDOWS:
|
| 557 |
+
paths_to_remove.add(os.path.join(bin_dir, f"{script}.bat"))
|
| 558 |
+
except (FileNotFoundError, NotADirectoryError):
|
| 559 |
+
pass
|
| 560 |
+
|
| 561 |
+
# find console_scripts and gui_scripts
|
| 562 |
+
def iter_scripts_to_remove(
|
| 563 |
+
dist: BaseDistribution,
|
| 564 |
+
bin_dir: str,
|
| 565 |
+
) -> Generator[str, None, None]:
|
| 566 |
+
for entry_point in dist.iter_entry_points():
|
| 567 |
+
if entry_point.group == "console_scripts":
|
| 568 |
+
yield from _script_names(bin_dir, entry_point.name, False)
|
| 569 |
+
elif entry_point.group == "gui_scripts":
|
| 570 |
+
yield from _script_names(bin_dir, entry_point.name, True)
|
| 571 |
+
|
| 572 |
+
for s in iter_scripts_to_remove(dist, bin_dir):
|
| 573 |
+
paths_to_remove.add(s)
|
| 574 |
+
|
| 575 |
+
return paths_to_remove
|
| 576 |
+
|
| 577 |
+
|
| 578 |
+
class UninstallPthEntries:
|
| 579 |
+
def __init__(self, pth_file: str) -> None:
|
| 580 |
+
self.file = pth_file
|
| 581 |
+
self.entries: Set[str] = set()
|
| 582 |
+
self._saved_lines: Optional[List[bytes]] = None
|
| 583 |
+
|
| 584 |
+
def add(self, entry: str) -> None:
|
| 585 |
+
entry = os.path.normcase(entry)
|
| 586 |
+
# On Windows, os.path.normcase converts the entry to use
|
| 587 |
+
# backslashes. This is correct for entries that describe absolute
|
| 588 |
+
# paths outside of site-packages, but all the others use forward
|
| 589 |
+
# slashes.
|
| 590 |
+
# os.path.splitdrive is used instead of os.path.isabs because isabs
|
| 591 |
+
# treats non-absolute paths with drive letter markings like c:foo\bar
|
| 592 |
+
# as absolute paths. It also does not recognize UNC paths if they don't
|
| 593 |
+
# have more than "\\sever\share". Valid examples: "\\server\share\" or
|
| 594 |
+
# "\\server\share\folder".
|
| 595 |
+
if WINDOWS and not os.path.splitdrive(entry)[0]:
|
| 596 |
+
entry = entry.replace("\\", "/")
|
| 597 |
+
self.entries.add(entry)
|
| 598 |
+
|
| 599 |
+
def remove(self) -> None:
|
| 600 |
+
logger.verbose("Removing pth entries from %s:", self.file)
|
| 601 |
+
|
| 602 |
+
# If the file doesn't exist, log a warning and return
|
| 603 |
+
if not os.path.isfile(self.file):
|
| 604 |
+
logger.warning("Cannot remove entries from nonexistent file %s", self.file)
|
| 605 |
+
return
|
| 606 |
+
with open(self.file, "rb") as fh:
|
| 607 |
+
# windows uses '\r\n' with py3k, but uses '\n' with py2.x
|
| 608 |
+
lines = fh.readlines()
|
| 609 |
+
self._saved_lines = lines
|
| 610 |
+
if any(b"\r\n" in line for line in lines):
|
| 611 |
+
endline = "\r\n"
|
| 612 |
+
else:
|
| 613 |
+
endline = "\n"
|
| 614 |
+
# handle missing trailing newline
|
| 615 |
+
if lines and not lines[-1].endswith(endline.encode("utf-8")):
|
| 616 |
+
lines[-1] = lines[-1] + endline.encode("utf-8")
|
| 617 |
+
for entry in self.entries:
|
| 618 |
+
try:
|
| 619 |
+
logger.verbose("Removing entry: %s", entry)
|
| 620 |
+
lines.remove((entry + endline).encode("utf-8"))
|
| 621 |
+
except ValueError:
|
| 622 |
+
pass
|
| 623 |
+
with open(self.file, "wb") as fh:
|
| 624 |
+
fh.writelines(lines)
|
| 625 |
+
|
| 626 |
+
def rollback(self) -> bool:
|
| 627 |
+
if self._saved_lines is None:
|
| 628 |
+
logger.error("Cannot roll back changes to %s, none were made", self.file)
|
| 629 |
+
return False
|
| 630 |
+
logger.debug("Rolling %s back to previous state", self.file)
|
| 631 |
+
with open(self.file, "wb") as fh:
|
| 632 |
+
fh.writelines(self._saved_lines)
|
| 633 |
+
return True
|
vllm/lib/python3.10/site-packages/pip/_internal/resolution/__init__.py
ADDED
|
File without changes
|