Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .gitattributes +2 -0
- evalkit_llava/bin/xz +3 -0
- evalkit_llava/lib/libncurses.so +3 -0
- evalkit_llava/lib/python3.10/site-packages/pip/__init__.py +13 -0
- evalkit_llava/lib/python3.10/site-packages/pip/__main__.py +24 -0
- evalkit_llava/lib/python3.10/site-packages/pip/__pip-runner__.py +50 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/__init__.py +18 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/build_env.py +322 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/cache.py +290 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/configuration.py +383 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/exceptions.py +809 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/main.py +12 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/pyproject.py +185 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/req/__init__.py +90 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_install.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/req/req_install.py +934 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/req/req_set.py +82 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/resolution/base.py +20 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/base.py +139 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/candidates.py +574 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/reporter.py +81 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/requirements.py +245 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/resolver.py +317 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/self_outdated_check.py +252 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/__init__.py +15 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/git.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/bazaar.py +112 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/git.py +527 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/mercurial.py +163 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/subversion.py +324 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/versioncontrol.py +688 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/wheel_builder.py +354 -0
- evalkit_llava/lib/python3.10/site-packages/pip/py.typed +4 -0
- evalkit_llava/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/entry_points.txt +51 -0
- evalkit_llava/lib/python3.10/site-packages/wheel-0.45.1.dist-info/WHEEL +4 -0
- evalkit_llava/lib/python3.10/site-packages/wheel/__main__.py +23 -0
- evalkit_llava/lib/python3.10/site-packages/wheel/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/wheel/__pycache__/__main__.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/wheel/__pycache__/_bdist_wheel.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/wheel/__pycache__/_setuptools_logging.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/wheel/__pycache__/bdist_wheel.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/wheel/__pycache__/macosx_libfile.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/wheel/__pycache__/metadata.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/wheel/__pycache__/util.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/wheel/__pycache__/wheelfile.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/wheel/cli/__init__.py +155 -0
.gitattributes
CHANGED
|
@@ -58,3 +58,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 58 |
*.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 59 |
*.webm filter=lfs diff=lfs merge=lfs -text
|
| 60 |
evalkit_llava/bin/bzip2 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
| 58 |
*.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 59 |
*.webm filter=lfs diff=lfs merge=lfs -text
|
| 60 |
evalkit_llava/bin/bzip2 filter=lfs diff=lfs merge=lfs -text
|
| 61 |
+
evalkit_llava/bin/xz filter=lfs diff=lfs merge=lfs -text
|
| 62 |
+
evalkit_llava/lib/libncurses.so filter=lfs diff=lfs merge=lfs -text
|
evalkit_llava/bin/xz
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5cc86d36933372b94af4bd9ed22ad711f57b4e16175675627edcd4cb9ea46a61
|
| 3 |
+
size 108336
|
evalkit_llava/lib/libncurses.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7fa4e5e93804d78660b0eef727cdb4211209e1742e4ad3669348022668d90962
|
| 3 |
+
size 271304
|
evalkit_llava/lib/python3.10/site-packages/pip/__init__.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List, Optional
|
| 2 |
+
|
| 3 |
+
__version__ = "25.0.1"
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def main(args: Optional[List[str]] = None) -> int:
|
| 7 |
+
"""This is an internal API only meant for use by pip's own console scripts.
|
| 8 |
+
|
| 9 |
+
For additional details, see https://github.com/pypa/pip/issues/7498.
|
| 10 |
+
"""
|
| 11 |
+
from pip._internal.utils.entrypoints import _wrapper
|
| 12 |
+
|
| 13 |
+
return _wrapper(args)
|
evalkit_llava/lib/python3.10/site-packages/pip/__main__.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sys
|
| 3 |
+
|
| 4 |
+
# Remove '' and current working directory from the first entry
|
| 5 |
+
# of sys.path, if present to avoid using current directory
|
| 6 |
+
# in pip commands check, freeze, install, list and show,
|
| 7 |
+
# when invoked as python -m pip <command>
|
| 8 |
+
if sys.path[0] in ("", os.getcwd()):
|
| 9 |
+
sys.path.pop(0)
|
| 10 |
+
|
| 11 |
+
# If we are running from a wheel, add the wheel to sys.path
|
| 12 |
+
# This allows the usage python pip-*.whl/pip install pip-*.whl
|
| 13 |
+
if __package__ == "":
|
| 14 |
+
# __file__ is pip-*.whl/pip/__main__.py
|
| 15 |
+
# first dirname call strips of '/__main__.py', second strips off '/pip'
|
| 16 |
+
# Resulting path is the name of the wheel itself
|
| 17 |
+
# Add that to sys.path so we can import pip
|
| 18 |
+
path = os.path.dirname(os.path.dirname(__file__))
|
| 19 |
+
sys.path.insert(0, path)
|
| 20 |
+
|
| 21 |
+
if __name__ == "__main__":
|
| 22 |
+
from pip._internal.cli.main import main as _main
|
| 23 |
+
|
| 24 |
+
sys.exit(_main())
|
evalkit_llava/lib/python3.10/site-packages/pip/__pip-runner__.py
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Execute exactly this copy of pip, within a different environment.
|
| 2 |
+
|
| 3 |
+
This file is named as it is, to ensure that this module can't be imported via
|
| 4 |
+
an import statement.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
# /!\ This version compatibility check section must be Python 2 compatible. /!\
|
| 8 |
+
|
| 9 |
+
import sys
|
| 10 |
+
|
| 11 |
+
# Copied from pyproject.toml
|
| 12 |
+
PYTHON_REQUIRES = (3, 8)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def version_str(version): # type: ignore
|
| 16 |
+
return ".".join(str(v) for v in version)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
if sys.version_info[:2] < PYTHON_REQUIRES:
|
| 20 |
+
raise SystemExit(
|
| 21 |
+
"This version of pip does not support python {} (requires >={}).".format(
|
| 22 |
+
version_str(sys.version_info[:2]), version_str(PYTHON_REQUIRES)
|
| 23 |
+
)
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
# From here on, we can use Python 3 features, but the syntax must remain
|
| 27 |
+
# Python 2 compatible.
|
| 28 |
+
|
| 29 |
+
import runpy # noqa: E402
|
| 30 |
+
from importlib.machinery import PathFinder # noqa: E402
|
| 31 |
+
from os.path import dirname # noqa: E402
|
| 32 |
+
|
| 33 |
+
PIP_SOURCES_ROOT = dirname(dirname(__file__))
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class PipImportRedirectingFinder:
|
| 37 |
+
@classmethod
|
| 38 |
+
def find_spec(self, fullname, path=None, target=None): # type: ignore
|
| 39 |
+
if fullname != "pip":
|
| 40 |
+
return None
|
| 41 |
+
|
| 42 |
+
spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target)
|
| 43 |
+
assert spec, (PIP_SOURCES_ROOT, fullname)
|
| 44 |
+
return spec
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
sys.meta_path.insert(0, PipImportRedirectingFinder())
|
| 48 |
+
|
| 49 |
+
assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module"
|
| 50 |
+
runpy.run_module("pip", run_name="__main__", alter_sys=True)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/__init__.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List, Optional
|
| 2 |
+
|
| 3 |
+
from pip._internal.utils import _log
|
| 4 |
+
|
| 5 |
+
# init_logging() must be called before any call to logging.getLogger()
|
| 6 |
+
# which happens at import of most modules.
|
| 7 |
+
_log.init_logging()
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def main(args: Optional[List[str]] = None) -> int:
|
| 11 |
+
"""This is preserved for old console scripts that may still be referencing
|
| 12 |
+
it.
|
| 13 |
+
|
| 14 |
+
For additional details, see https://github.com/pypa/pip/issues/7498.
|
| 15 |
+
"""
|
| 16 |
+
from pip._internal.utils.entrypoints import _wrapper
|
| 17 |
+
|
| 18 |
+
return _wrapper(args)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/build_env.py
ADDED
|
@@ -0,0 +1,322 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Build Environment used for isolation during sdist building
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import logging
|
| 5 |
+
import os
|
| 6 |
+
import pathlib
|
| 7 |
+
import site
|
| 8 |
+
import sys
|
| 9 |
+
import textwrap
|
| 10 |
+
from collections import OrderedDict
|
| 11 |
+
from types import TracebackType
|
| 12 |
+
from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type, Union
|
| 13 |
+
|
| 14 |
+
from pip._vendor.packaging.version import Version
|
| 15 |
+
|
| 16 |
+
from pip import __file__ as pip_location
|
| 17 |
+
from pip._internal.cli.spinners import open_spinner
|
| 18 |
+
from pip._internal.locations import get_platlib, get_purelib, get_scheme
|
| 19 |
+
from pip._internal.metadata import get_default_environment, get_environment
|
| 20 |
+
from pip._internal.utils.logging import VERBOSE
|
| 21 |
+
from pip._internal.utils.packaging import get_requirement
|
| 22 |
+
from pip._internal.utils.subprocess import call_subprocess
|
| 23 |
+
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
| 24 |
+
|
| 25 |
+
if TYPE_CHECKING:
|
| 26 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 27 |
+
|
| 28 |
+
logger = logging.getLogger(__name__)
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def _dedup(a: str, b: str) -> Union[Tuple[str], Tuple[str, str]]:
|
| 32 |
+
return (a, b) if a != b else (a,)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class _Prefix:
|
| 36 |
+
def __init__(self, path: str) -> None:
|
| 37 |
+
self.path = path
|
| 38 |
+
self.setup = False
|
| 39 |
+
scheme = get_scheme("", prefix=path)
|
| 40 |
+
self.bin_dir = scheme.scripts
|
| 41 |
+
self.lib_dirs = _dedup(scheme.purelib, scheme.platlib)
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def get_runnable_pip() -> str:
|
| 45 |
+
"""Get a file to pass to a Python executable, to run the currently-running pip.
|
| 46 |
+
|
| 47 |
+
This is used to run a pip subprocess, for installing requirements into the build
|
| 48 |
+
environment.
|
| 49 |
+
"""
|
| 50 |
+
source = pathlib.Path(pip_location).resolve().parent
|
| 51 |
+
|
| 52 |
+
if not source.is_dir():
|
| 53 |
+
# This would happen if someone is using pip from inside a zip file. In that
|
| 54 |
+
# case, we can use that directly.
|
| 55 |
+
return str(source)
|
| 56 |
+
|
| 57 |
+
return os.fsdecode(source / "__pip-runner__.py")
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def _get_system_sitepackages() -> Set[str]:
|
| 61 |
+
"""Get system site packages
|
| 62 |
+
|
| 63 |
+
Usually from site.getsitepackages,
|
| 64 |
+
but fallback on `get_purelib()/get_platlib()` if unavailable
|
| 65 |
+
(e.g. in a virtualenv created by virtualenv<20)
|
| 66 |
+
|
| 67 |
+
Returns normalized set of strings.
|
| 68 |
+
"""
|
| 69 |
+
if hasattr(site, "getsitepackages"):
|
| 70 |
+
system_sites = site.getsitepackages()
|
| 71 |
+
else:
|
| 72 |
+
# virtualenv < 20 overwrites site.py without getsitepackages
|
| 73 |
+
# fallback on get_purelib/get_platlib.
|
| 74 |
+
# this is known to miss things, but shouldn't in the cases
|
| 75 |
+
# where getsitepackages() has been removed (inside a virtualenv)
|
| 76 |
+
system_sites = [get_purelib(), get_platlib()]
|
| 77 |
+
return {os.path.normcase(path) for path in system_sites}
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class BuildEnvironment:
|
| 81 |
+
"""Creates and manages an isolated environment to install build deps"""
|
| 82 |
+
|
| 83 |
+
def __init__(self) -> None:
|
| 84 |
+
temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True)
|
| 85 |
+
|
| 86 |
+
self._prefixes = OrderedDict(
|
| 87 |
+
(name, _Prefix(os.path.join(temp_dir.path, name)))
|
| 88 |
+
for name in ("normal", "overlay")
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
self._bin_dirs: List[str] = []
|
| 92 |
+
self._lib_dirs: List[str] = []
|
| 93 |
+
for prefix in reversed(list(self._prefixes.values())):
|
| 94 |
+
self._bin_dirs.append(prefix.bin_dir)
|
| 95 |
+
self._lib_dirs.extend(prefix.lib_dirs)
|
| 96 |
+
|
| 97 |
+
# Customize site to:
|
| 98 |
+
# - ensure .pth files are honored
|
| 99 |
+
# - prevent access to system site packages
|
| 100 |
+
system_sites = _get_system_sitepackages()
|
| 101 |
+
|
| 102 |
+
self._site_dir = os.path.join(temp_dir.path, "site")
|
| 103 |
+
if not os.path.exists(self._site_dir):
|
| 104 |
+
os.mkdir(self._site_dir)
|
| 105 |
+
with open(
|
| 106 |
+
os.path.join(self._site_dir, "sitecustomize.py"), "w", encoding="utf-8"
|
| 107 |
+
) as fp:
|
| 108 |
+
fp.write(
|
| 109 |
+
textwrap.dedent(
|
| 110 |
+
"""
|
| 111 |
+
import os, site, sys
|
| 112 |
+
|
| 113 |
+
# First, drop system-sites related paths.
|
| 114 |
+
original_sys_path = sys.path[:]
|
| 115 |
+
known_paths = set()
|
| 116 |
+
for path in {system_sites!r}:
|
| 117 |
+
site.addsitedir(path, known_paths=known_paths)
|
| 118 |
+
system_paths = set(
|
| 119 |
+
os.path.normcase(path)
|
| 120 |
+
for path in sys.path[len(original_sys_path):]
|
| 121 |
+
)
|
| 122 |
+
original_sys_path = [
|
| 123 |
+
path for path in original_sys_path
|
| 124 |
+
if os.path.normcase(path) not in system_paths
|
| 125 |
+
]
|
| 126 |
+
sys.path = original_sys_path
|
| 127 |
+
|
| 128 |
+
# Second, add lib directories.
|
| 129 |
+
# ensuring .pth file are processed.
|
| 130 |
+
for path in {lib_dirs!r}:
|
| 131 |
+
assert not path in sys.path
|
| 132 |
+
site.addsitedir(path)
|
| 133 |
+
"""
|
| 134 |
+
).format(system_sites=system_sites, lib_dirs=self._lib_dirs)
|
| 135 |
+
)
|
| 136 |
+
|
| 137 |
+
def __enter__(self) -> None:
|
| 138 |
+
self._save_env = {
|
| 139 |
+
name: os.environ.get(name, None)
|
| 140 |
+
for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH")
|
| 141 |
+
}
|
| 142 |
+
|
| 143 |
+
path = self._bin_dirs[:]
|
| 144 |
+
old_path = self._save_env["PATH"]
|
| 145 |
+
if old_path:
|
| 146 |
+
path.extend(old_path.split(os.pathsep))
|
| 147 |
+
|
| 148 |
+
pythonpath = [self._site_dir]
|
| 149 |
+
|
| 150 |
+
os.environ.update(
|
| 151 |
+
{
|
| 152 |
+
"PATH": os.pathsep.join(path),
|
| 153 |
+
"PYTHONNOUSERSITE": "1",
|
| 154 |
+
"PYTHONPATH": os.pathsep.join(pythonpath),
|
| 155 |
+
}
|
| 156 |
+
)
|
| 157 |
+
|
| 158 |
+
def __exit__(
|
| 159 |
+
self,
|
| 160 |
+
exc_type: Optional[Type[BaseException]],
|
| 161 |
+
exc_val: Optional[BaseException],
|
| 162 |
+
exc_tb: Optional[TracebackType],
|
| 163 |
+
) -> None:
|
| 164 |
+
for varname, old_value in self._save_env.items():
|
| 165 |
+
if old_value is None:
|
| 166 |
+
os.environ.pop(varname, None)
|
| 167 |
+
else:
|
| 168 |
+
os.environ[varname] = old_value
|
| 169 |
+
|
| 170 |
+
def check_requirements(
|
| 171 |
+
self, reqs: Iterable[str]
|
| 172 |
+
) -> Tuple[Set[Tuple[str, str]], Set[str]]:
|
| 173 |
+
"""Return 2 sets:
|
| 174 |
+
- conflicting requirements: set of (installed, wanted) reqs tuples
|
| 175 |
+
- missing requirements: set of reqs
|
| 176 |
+
"""
|
| 177 |
+
missing = set()
|
| 178 |
+
conflicting = set()
|
| 179 |
+
if reqs:
|
| 180 |
+
env = (
|
| 181 |
+
get_environment(self._lib_dirs)
|
| 182 |
+
if hasattr(self, "_lib_dirs")
|
| 183 |
+
else get_default_environment()
|
| 184 |
+
)
|
| 185 |
+
for req_str in reqs:
|
| 186 |
+
req = get_requirement(req_str)
|
| 187 |
+
# We're explicitly evaluating with an empty extra value, since build
|
| 188 |
+
# environments are not provided any mechanism to select specific extras.
|
| 189 |
+
if req.marker is not None and not req.marker.evaluate({"extra": ""}):
|
| 190 |
+
continue
|
| 191 |
+
dist = env.get_distribution(req.name)
|
| 192 |
+
if not dist:
|
| 193 |
+
missing.add(req_str)
|
| 194 |
+
continue
|
| 195 |
+
if isinstance(dist.version, Version):
|
| 196 |
+
installed_req_str = f"{req.name}=={dist.version}"
|
| 197 |
+
else:
|
| 198 |
+
installed_req_str = f"{req.name}==={dist.version}"
|
| 199 |
+
if not req.specifier.contains(dist.version, prereleases=True):
|
| 200 |
+
conflicting.add((installed_req_str, req_str))
|
| 201 |
+
# FIXME: Consider direct URL?
|
| 202 |
+
return conflicting, missing
|
| 203 |
+
|
| 204 |
+
def install_requirements(
|
| 205 |
+
self,
|
| 206 |
+
finder: "PackageFinder",
|
| 207 |
+
requirements: Iterable[str],
|
| 208 |
+
prefix_as_string: str,
|
| 209 |
+
*,
|
| 210 |
+
kind: str,
|
| 211 |
+
) -> None:
|
| 212 |
+
prefix = self._prefixes[prefix_as_string]
|
| 213 |
+
assert not prefix.setup
|
| 214 |
+
prefix.setup = True
|
| 215 |
+
if not requirements:
|
| 216 |
+
return
|
| 217 |
+
self._install_requirements(
|
| 218 |
+
get_runnable_pip(),
|
| 219 |
+
finder,
|
| 220 |
+
requirements,
|
| 221 |
+
prefix,
|
| 222 |
+
kind=kind,
|
| 223 |
+
)
|
| 224 |
+
|
| 225 |
+
@staticmethod
|
| 226 |
+
def _install_requirements(
|
| 227 |
+
pip_runnable: str,
|
| 228 |
+
finder: "PackageFinder",
|
| 229 |
+
requirements: Iterable[str],
|
| 230 |
+
prefix: _Prefix,
|
| 231 |
+
*,
|
| 232 |
+
kind: str,
|
| 233 |
+
) -> None:
|
| 234 |
+
args: List[str] = [
|
| 235 |
+
sys.executable,
|
| 236 |
+
pip_runnable,
|
| 237 |
+
"install",
|
| 238 |
+
"--ignore-installed",
|
| 239 |
+
"--no-user",
|
| 240 |
+
"--prefix",
|
| 241 |
+
prefix.path,
|
| 242 |
+
"--no-warn-script-location",
|
| 243 |
+
"--disable-pip-version-check",
|
| 244 |
+
# The prefix specified two lines above, thus
|
| 245 |
+
# target from config file or env var should be ignored
|
| 246 |
+
"--target",
|
| 247 |
+
"",
|
| 248 |
+
]
|
| 249 |
+
if logger.getEffectiveLevel() <= logging.DEBUG:
|
| 250 |
+
args.append("-vv")
|
| 251 |
+
elif logger.getEffectiveLevel() <= VERBOSE:
|
| 252 |
+
args.append("-v")
|
| 253 |
+
for format_control in ("no_binary", "only_binary"):
|
| 254 |
+
formats = getattr(finder.format_control, format_control)
|
| 255 |
+
args.extend(
|
| 256 |
+
(
|
| 257 |
+
"--" + format_control.replace("_", "-"),
|
| 258 |
+
",".join(sorted(formats or {":none:"})),
|
| 259 |
+
)
|
| 260 |
+
)
|
| 261 |
+
|
| 262 |
+
index_urls = finder.index_urls
|
| 263 |
+
if index_urls:
|
| 264 |
+
args.extend(["-i", index_urls[0]])
|
| 265 |
+
for extra_index in index_urls[1:]:
|
| 266 |
+
args.extend(["--extra-index-url", extra_index])
|
| 267 |
+
else:
|
| 268 |
+
args.append("--no-index")
|
| 269 |
+
for link in finder.find_links:
|
| 270 |
+
args.extend(["--find-links", link])
|
| 271 |
+
|
| 272 |
+
if finder.proxy:
|
| 273 |
+
args.extend(["--proxy", finder.proxy])
|
| 274 |
+
for host in finder.trusted_hosts:
|
| 275 |
+
args.extend(["--trusted-host", host])
|
| 276 |
+
if finder.custom_cert:
|
| 277 |
+
args.extend(["--cert", finder.custom_cert])
|
| 278 |
+
if finder.client_cert:
|
| 279 |
+
args.extend(["--client-cert", finder.client_cert])
|
| 280 |
+
if finder.allow_all_prereleases:
|
| 281 |
+
args.append("--pre")
|
| 282 |
+
if finder.prefer_binary:
|
| 283 |
+
args.append("--prefer-binary")
|
| 284 |
+
args.append("--")
|
| 285 |
+
args.extend(requirements)
|
| 286 |
+
with open_spinner(f"Installing {kind}") as spinner:
|
| 287 |
+
call_subprocess(
|
| 288 |
+
args,
|
| 289 |
+
command_desc=f"pip subprocess to install {kind}",
|
| 290 |
+
spinner=spinner,
|
| 291 |
+
)
|
| 292 |
+
|
| 293 |
+
|
| 294 |
+
class NoOpBuildEnvironment(BuildEnvironment):
|
| 295 |
+
"""A no-op drop-in replacement for BuildEnvironment"""
|
| 296 |
+
|
| 297 |
+
def __init__(self) -> None:
|
| 298 |
+
pass
|
| 299 |
+
|
| 300 |
+
def __enter__(self) -> None:
|
| 301 |
+
pass
|
| 302 |
+
|
| 303 |
+
def __exit__(
|
| 304 |
+
self,
|
| 305 |
+
exc_type: Optional[Type[BaseException]],
|
| 306 |
+
exc_val: Optional[BaseException],
|
| 307 |
+
exc_tb: Optional[TracebackType],
|
| 308 |
+
) -> None:
|
| 309 |
+
pass
|
| 310 |
+
|
| 311 |
+
def cleanup(self) -> None:
|
| 312 |
+
pass
|
| 313 |
+
|
| 314 |
+
def install_requirements(
|
| 315 |
+
self,
|
| 316 |
+
finder: "PackageFinder",
|
| 317 |
+
requirements: Iterable[str],
|
| 318 |
+
prefix_as_string: str,
|
| 319 |
+
*,
|
| 320 |
+
kind: str,
|
| 321 |
+
) -> None:
|
| 322 |
+
raise NotImplementedError()
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/cache.py
ADDED
|
@@ -0,0 +1,290 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Cache Management
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import hashlib
|
| 5 |
+
import json
|
| 6 |
+
import logging
|
| 7 |
+
import os
|
| 8 |
+
from pathlib import Path
|
| 9 |
+
from typing import Any, Dict, List, Optional
|
| 10 |
+
|
| 11 |
+
from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
|
| 12 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 13 |
+
|
| 14 |
+
from pip._internal.exceptions import InvalidWheelFilename
|
| 15 |
+
from pip._internal.models.direct_url import DirectUrl
|
| 16 |
+
from pip._internal.models.link import Link
|
| 17 |
+
from pip._internal.models.wheel import Wheel
|
| 18 |
+
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
| 19 |
+
from pip._internal.utils.urls import path_to_url
|
| 20 |
+
|
| 21 |
+
logger = logging.getLogger(__name__)
|
| 22 |
+
|
| 23 |
+
ORIGIN_JSON_NAME = "origin.json"
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def _hash_dict(d: Dict[str, str]) -> str:
|
| 27 |
+
"""Return a stable sha224 of a dictionary."""
|
| 28 |
+
s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
|
| 29 |
+
return hashlib.sha224(s.encode("ascii")).hexdigest()
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class Cache:
|
| 33 |
+
"""An abstract class - provides cache directories for data from links
|
| 34 |
+
|
| 35 |
+
:param cache_dir: The root of the cache.
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
def __init__(self, cache_dir: str) -> None:
|
| 39 |
+
super().__init__()
|
| 40 |
+
assert not cache_dir or os.path.isabs(cache_dir)
|
| 41 |
+
self.cache_dir = cache_dir or None
|
| 42 |
+
|
| 43 |
+
def _get_cache_path_parts(self, link: Link) -> List[str]:
|
| 44 |
+
"""Get parts of part that must be os.path.joined with cache_dir"""
|
| 45 |
+
|
| 46 |
+
# We want to generate an url to use as our cache key, we don't want to
|
| 47 |
+
# just reuse the URL because it might have other items in the fragment
|
| 48 |
+
# and we don't care about those.
|
| 49 |
+
key_parts = {"url": link.url_without_fragment}
|
| 50 |
+
if link.hash_name is not None and link.hash is not None:
|
| 51 |
+
key_parts[link.hash_name] = link.hash
|
| 52 |
+
if link.subdirectory_fragment:
|
| 53 |
+
key_parts["subdirectory"] = link.subdirectory_fragment
|
| 54 |
+
|
| 55 |
+
# Include interpreter name, major and minor version in cache key
|
| 56 |
+
# to cope with ill-behaved sdists that build a different wheel
|
| 57 |
+
# depending on the python version their setup.py is being run on,
|
| 58 |
+
# and don't encode the difference in compatibility tags.
|
| 59 |
+
# https://github.com/pypa/pip/issues/7296
|
| 60 |
+
key_parts["interpreter_name"] = interpreter_name()
|
| 61 |
+
key_parts["interpreter_version"] = interpreter_version()
|
| 62 |
+
|
| 63 |
+
# Encode our key url with sha224, we'll use this because it has similar
|
| 64 |
+
# security properties to sha256, but with a shorter total output (and
|
| 65 |
+
# thus less secure). However the differences don't make a lot of
|
| 66 |
+
# difference for our use case here.
|
| 67 |
+
hashed = _hash_dict(key_parts)
|
| 68 |
+
|
| 69 |
+
# We want to nest the directories some to prevent having a ton of top
|
| 70 |
+
# level directories where we might run out of sub directories on some
|
| 71 |
+
# FS.
|
| 72 |
+
parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
|
| 73 |
+
|
| 74 |
+
return parts
|
| 75 |
+
|
| 76 |
+
def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]:
|
| 77 |
+
can_not_cache = not self.cache_dir or not canonical_package_name or not link
|
| 78 |
+
if can_not_cache:
|
| 79 |
+
return []
|
| 80 |
+
|
| 81 |
+
path = self.get_path_for_link(link)
|
| 82 |
+
if os.path.isdir(path):
|
| 83 |
+
return [(candidate, path) for candidate in os.listdir(path)]
|
| 84 |
+
return []
|
| 85 |
+
|
| 86 |
+
def get_path_for_link(self, link: Link) -> str:
|
| 87 |
+
"""Return a directory to store cached items in for link."""
|
| 88 |
+
raise NotImplementedError()
|
| 89 |
+
|
| 90 |
+
def get(
|
| 91 |
+
self,
|
| 92 |
+
link: Link,
|
| 93 |
+
package_name: Optional[str],
|
| 94 |
+
supported_tags: List[Tag],
|
| 95 |
+
) -> Link:
|
| 96 |
+
"""Returns a link to a cached item if it exists, otherwise returns the
|
| 97 |
+
passed link.
|
| 98 |
+
"""
|
| 99 |
+
raise NotImplementedError()
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
class SimpleWheelCache(Cache):
|
| 103 |
+
"""A cache of wheels for future installs."""
|
| 104 |
+
|
| 105 |
+
def __init__(self, cache_dir: str) -> None:
|
| 106 |
+
super().__init__(cache_dir)
|
| 107 |
+
|
| 108 |
+
def get_path_for_link(self, link: Link) -> str:
|
| 109 |
+
"""Return a directory to store cached wheels for link
|
| 110 |
+
|
| 111 |
+
Because there are M wheels for any one sdist, we provide a directory
|
| 112 |
+
to cache them in, and then consult that directory when looking up
|
| 113 |
+
cache hits.
|
| 114 |
+
|
| 115 |
+
We only insert things into the cache if they have plausible version
|
| 116 |
+
numbers, so that we don't contaminate the cache with things that were
|
| 117 |
+
not unique. E.g. ./package might have dozens of installs done for it
|
| 118 |
+
and build a version of 0.0...and if we built and cached a wheel, we'd
|
| 119 |
+
end up using the same wheel even if the source has been edited.
|
| 120 |
+
|
| 121 |
+
:param link: The link of the sdist for which this will cache wheels.
|
| 122 |
+
"""
|
| 123 |
+
parts = self._get_cache_path_parts(link)
|
| 124 |
+
assert self.cache_dir
|
| 125 |
+
# Store wheels within the root cache_dir
|
| 126 |
+
return os.path.join(self.cache_dir, "wheels", *parts)
|
| 127 |
+
|
| 128 |
+
def get(
|
| 129 |
+
self,
|
| 130 |
+
link: Link,
|
| 131 |
+
package_name: Optional[str],
|
| 132 |
+
supported_tags: List[Tag],
|
| 133 |
+
) -> Link:
|
| 134 |
+
candidates = []
|
| 135 |
+
|
| 136 |
+
if not package_name:
|
| 137 |
+
return link
|
| 138 |
+
|
| 139 |
+
canonical_package_name = canonicalize_name(package_name)
|
| 140 |
+
for wheel_name, wheel_dir in self._get_candidates(link, canonical_package_name):
|
| 141 |
+
try:
|
| 142 |
+
wheel = Wheel(wheel_name)
|
| 143 |
+
except InvalidWheelFilename:
|
| 144 |
+
continue
|
| 145 |
+
if canonicalize_name(wheel.name) != canonical_package_name:
|
| 146 |
+
logger.debug(
|
| 147 |
+
"Ignoring cached wheel %s for %s as it "
|
| 148 |
+
"does not match the expected distribution name %s.",
|
| 149 |
+
wheel_name,
|
| 150 |
+
link,
|
| 151 |
+
package_name,
|
| 152 |
+
)
|
| 153 |
+
continue
|
| 154 |
+
if not wheel.supported(supported_tags):
|
| 155 |
+
# Built for a different python/arch/etc
|
| 156 |
+
continue
|
| 157 |
+
candidates.append(
|
| 158 |
+
(
|
| 159 |
+
wheel.support_index_min(supported_tags),
|
| 160 |
+
wheel_name,
|
| 161 |
+
wheel_dir,
|
| 162 |
+
)
|
| 163 |
+
)
|
| 164 |
+
|
| 165 |
+
if not candidates:
|
| 166 |
+
return link
|
| 167 |
+
|
| 168 |
+
_, wheel_name, wheel_dir = min(candidates)
|
| 169 |
+
return Link(path_to_url(os.path.join(wheel_dir, wheel_name)))
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
class EphemWheelCache(SimpleWheelCache):
|
| 173 |
+
"""A SimpleWheelCache that creates it's own temporary cache directory"""
|
| 174 |
+
|
| 175 |
+
def __init__(self) -> None:
|
| 176 |
+
self._temp_dir = TempDirectory(
|
| 177 |
+
kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
|
| 178 |
+
globally_managed=True,
|
| 179 |
+
)
|
| 180 |
+
|
| 181 |
+
super().__init__(self._temp_dir.path)
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
class CacheEntry:
|
| 185 |
+
def __init__(
|
| 186 |
+
self,
|
| 187 |
+
link: Link,
|
| 188 |
+
persistent: bool,
|
| 189 |
+
):
|
| 190 |
+
self.link = link
|
| 191 |
+
self.persistent = persistent
|
| 192 |
+
self.origin: Optional[DirectUrl] = None
|
| 193 |
+
origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME
|
| 194 |
+
if origin_direct_url_path.exists():
|
| 195 |
+
try:
|
| 196 |
+
self.origin = DirectUrl.from_json(
|
| 197 |
+
origin_direct_url_path.read_text(encoding="utf-8")
|
| 198 |
+
)
|
| 199 |
+
except Exception as e:
|
| 200 |
+
logger.warning(
|
| 201 |
+
"Ignoring invalid cache entry origin file %s for %s (%s)",
|
| 202 |
+
origin_direct_url_path,
|
| 203 |
+
link.filename,
|
| 204 |
+
e,
|
| 205 |
+
)
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
class WheelCache(Cache):
|
| 209 |
+
"""Wraps EphemWheelCache and SimpleWheelCache into a single Cache
|
| 210 |
+
|
| 211 |
+
This Cache allows for gracefully degradation, using the ephem wheel cache
|
| 212 |
+
when a certain link is not found in the simple wheel cache first.
|
| 213 |
+
"""
|
| 214 |
+
|
| 215 |
+
def __init__(self, cache_dir: str) -> None:
|
| 216 |
+
super().__init__(cache_dir)
|
| 217 |
+
self._wheel_cache = SimpleWheelCache(cache_dir)
|
| 218 |
+
self._ephem_cache = EphemWheelCache()
|
| 219 |
+
|
| 220 |
+
def get_path_for_link(self, link: Link) -> str:
|
| 221 |
+
return self._wheel_cache.get_path_for_link(link)
|
| 222 |
+
|
| 223 |
+
def get_ephem_path_for_link(self, link: Link) -> str:
|
| 224 |
+
return self._ephem_cache.get_path_for_link(link)
|
| 225 |
+
|
| 226 |
+
def get(
|
| 227 |
+
self,
|
| 228 |
+
link: Link,
|
| 229 |
+
package_name: Optional[str],
|
| 230 |
+
supported_tags: List[Tag],
|
| 231 |
+
) -> Link:
|
| 232 |
+
cache_entry = self.get_cache_entry(link, package_name, supported_tags)
|
| 233 |
+
if cache_entry is None:
|
| 234 |
+
return link
|
| 235 |
+
return cache_entry.link
|
| 236 |
+
|
| 237 |
+
def get_cache_entry(
|
| 238 |
+
self,
|
| 239 |
+
link: Link,
|
| 240 |
+
package_name: Optional[str],
|
| 241 |
+
supported_tags: List[Tag],
|
| 242 |
+
) -> Optional[CacheEntry]:
|
| 243 |
+
"""Returns a CacheEntry with a link to a cached item if it exists or
|
| 244 |
+
None. The cache entry indicates if the item was found in the persistent
|
| 245 |
+
or ephemeral cache.
|
| 246 |
+
"""
|
| 247 |
+
retval = self._wheel_cache.get(
|
| 248 |
+
link=link,
|
| 249 |
+
package_name=package_name,
|
| 250 |
+
supported_tags=supported_tags,
|
| 251 |
+
)
|
| 252 |
+
if retval is not link:
|
| 253 |
+
return CacheEntry(retval, persistent=True)
|
| 254 |
+
|
| 255 |
+
retval = self._ephem_cache.get(
|
| 256 |
+
link=link,
|
| 257 |
+
package_name=package_name,
|
| 258 |
+
supported_tags=supported_tags,
|
| 259 |
+
)
|
| 260 |
+
if retval is not link:
|
| 261 |
+
return CacheEntry(retval, persistent=False)
|
| 262 |
+
|
| 263 |
+
return None
|
| 264 |
+
|
| 265 |
+
@staticmethod
|
| 266 |
+
def record_download_origin(cache_dir: str, download_info: DirectUrl) -> None:
|
| 267 |
+
origin_path = Path(cache_dir) / ORIGIN_JSON_NAME
|
| 268 |
+
if origin_path.exists():
|
| 269 |
+
try:
|
| 270 |
+
origin = DirectUrl.from_json(origin_path.read_text(encoding="utf-8"))
|
| 271 |
+
except Exception as e:
|
| 272 |
+
logger.warning(
|
| 273 |
+
"Could not read origin file %s in cache entry (%s). "
|
| 274 |
+
"Will attempt to overwrite it.",
|
| 275 |
+
origin_path,
|
| 276 |
+
e,
|
| 277 |
+
)
|
| 278 |
+
else:
|
| 279 |
+
# TODO: use DirectUrl.equivalent when
|
| 280 |
+
# https://github.com/pypa/pip/pull/10564 is merged.
|
| 281 |
+
if origin.url != download_info.url:
|
| 282 |
+
logger.warning(
|
| 283 |
+
"Origin URL %s in cache entry %s does not match download URL "
|
| 284 |
+
"%s. This is likely a pip bug or a cache corruption issue. "
|
| 285 |
+
"Will overwrite it with the new value.",
|
| 286 |
+
origin.url,
|
| 287 |
+
cache_dir,
|
| 288 |
+
download_info.url,
|
| 289 |
+
)
|
| 290 |
+
origin_path.write_text(download_info.to_json(), encoding="utf-8")
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/configuration.py
ADDED
|
@@ -0,0 +1,383 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Configuration management setup
|
| 2 |
+
|
| 3 |
+
Some terminology:
|
| 4 |
+
- name
|
| 5 |
+
As written in config files.
|
| 6 |
+
- value
|
| 7 |
+
Value associated with a name
|
| 8 |
+
- key
|
| 9 |
+
Name combined with it's section (section.name)
|
| 10 |
+
- variant
|
| 11 |
+
A single word describing where the configuration key-value pair came from
|
| 12 |
+
"""
|
| 13 |
+
|
| 14 |
+
import configparser
|
| 15 |
+
import locale
|
| 16 |
+
import os
|
| 17 |
+
import sys
|
| 18 |
+
from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple
|
| 19 |
+
|
| 20 |
+
from pip._internal.exceptions import (
|
| 21 |
+
ConfigurationError,
|
| 22 |
+
ConfigurationFileCouldNotBeLoaded,
|
| 23 |
+
)
|
| 24 |
+
from pip._internal.utils import appdirs
|
| 25 |
+
from pip._internal.utils.compat import WINDOWS
|
| 26 |
+
from pip._internal.utils.logging import getLogger
|
| 27 |
+
from pip._internal.utils.misc import ensure_dir, enum
|
| 28 |
+
|
| 29 |
+
RawConfigParser = configparser.RawConfigParser # Shorthand
|
| 30 |
+
Kind = NewType("Kind", str)
|
| 31 |
+
|
| 32 |
+
CONFIG_BASENAME = "pip.ini" if WINDOWS else "pip.conf"
|
| 33 |
+
ENV_NAMES_IGNORED = "version", "help"
|
| 34 |
+
|
| 35 |
+
# The kinds of configurations there are.
|
| 36 |
+
kinds = enum(
|
| 37 |
+
USER="user", # User Specific
|
| 38 |
+
GLOBAL="global", # System Wide
|
| 39 |
+
SITE="site", # [Virtual] Environment Specific
|
| 40 |
+
ENV="env", # from PIP_CONFIG_FILE
|
| 41 |
+
ENV_VAR="env-var", # from Environment Variables
|
| 42 |
+
)
|
| 43 |
+
OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
|
| 44 |
+
VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE
|
| 45 |
+
|
| 46 |
+
logger = getLogger(__name__)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
# NOTE: Maybe use the optionx attribute to normalize keynames.
|
| 50 |
+
def _normalize_name(name: str) -> str:
|
| 51 |
+
"""Make a name consistent regardless of source (environment or file)"""
|
| 52 |
+
name = name.lower().replace("_", "-")
|
| 53 |
+
if name.startswith("--"):
|
| 54 |
+
name = name[2:] # only prefer long opts
|
| 55 |
+
return name
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def _disassemble_key(name: str) -> List[str]:
|
| 59 |
+
if "." not in name:
|
| 60 |
+
error_message = (
|
| 61 |
+
"Key does not contain dot separated section and key. "
|
| 62 |
+
f"Perhaps you wanted to use 'global.{name}' instead?"
|
| 63 |
+
)
|
| 64 |
+
raise ConfigurationError(error_message)
|
| 65 |
+
return name.split(".", 1)
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def get_configuration_files() -> Dict[Kind, List[str]]:
|
| 69 |
+
global_config_files = [
|
| 70 |
+
os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip")
|
| 71 |
+
]
|
| 72 |
+
|
| 73 |
+
site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
|
| 74 |
+
legacy_config_file = os.path.join(
|
| 75 |
+
os.path.expanduser("~"),
|
| 76 |
+
"pip" if WINDOWS else ".pip",
|
| 77 |
+
CONFIG_BASENAME,
|
| 78 |
+
)
|
| 79 |
+
new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME)
|
| 80 |
+
return {
|
| 81 |
+
kinds.GLOBAL: global_config_files,
|
| 82 |
+
kinds.SITE: [site_config_file],
|
| 83 |
+
kinds.USER: [legacy_config_file, new_config_file],
|
| 84 |
+
}
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
class Configuration:
|
| 88 |
+
"""Handles management of configuration.
|
| 89 |
+
|
| 90 |
+
Provides an interface to accessing and managing configuration files.
|
| 91 |
+
|
| 92 |
+
This class converts provides an API that takes "section.key-name" style
|
| 93 |
+
keys and stores the value associated with it as "key-name" under the
|
| 94 |
+
section "section".
|
| 95 |
+
|
| 96 |
+
This allows for a clean interface wherein the both the section and the
|
| 97 |
+
key-name are preserved in an easy to manage form in the configuration files
|
| 98 |
+
and the data stored is also nice.
|
| 99 |
+
"""
|
| 100 |
+
|
| 101 |
+
def __init__(self, isolated: bool, load_only: Optional[Kind] = None) -> None:
|
| 102 |
+
super().__init__()
|
| 103 |
+
|
| 104 |
+
if load_only is not None and load_only not in VALID_LOAD_ONLY:
|
| 105 |
+
raise ConfigurationError(
|
| 106 |
+
"Got invalid value for load_only - should be one of {}".format(
|
| 107 |
+
", ".join(map(repr, VALID_LOAD_ONLY))
|
| 108 |
+
)
|
| 109 |
+
)
|
| 110 |
+
self.isolated = isolated
|
| 111 |
+
self.load_only = load_only
|
| 112 |
+
|
| 113 |
+
# Because we keep track of where we got the data from
|
| 114 |
+
self._parsers: Dict[Kind, List[Tuple[str, RawConfigParser]]] = {
|
| 115 |
+
variant: [] for variant in OVERRIDE_ORDER
|
| 116 |
+
}
|
| 117 |
+
self._config: Dict[Kind, Dict[str, Any]] = {
|
| 118 |
+
variant: {} for variant in OVERRIDE_ORDER
|
| 119 |
+
}
|
| 120 |
+
self._modified_parsers: List[Tuple[str, RawConfigParser]] = []
|
| 121 |
+
|
| 122 |
+
def load(self) -> None:
|
| 123 |
+
"""Loads configuration from configuration files and environment"""
|
| 124 |
+
self._load_config_files()
|
| 125 |
+
if not self.isolated:
|
| 126 |
+
self._load_environment_vars()
|
| 127 |
+
|
| 128 |
+
def get_file_to_edit(self) -> Optional[str]:
|
| 129 |
+
"""Returns the file with highest priority in configuration"""
|
| 130 |
+
assert self.load_only is not None, "Need to be specified a file to be editing"
|
| 131 |
+
|
| 132 |
+
try:
|
| 133 |
+
return self._get_parser_to_modify()[0]
|
| 134 |
+
except IndexError:
|
| 135 |
+
return None
|
| 136 |
+
|
| 137 |
+
def items(self) -> Iterable[Tuple[str, Any]]:
|
| 138 |
+
"""Returns key-value pairs like dict.items() representing the loaded
|
| 139 |
+
configuration
|
| 140 |
+
"""
|
| 141 |
+
return self._dictionary.items()
|
| 142 |
+
|
| 143 |
+
def get_value(self, key: str) -> Any:
|
| 144 |
+
"""Get a value from the configuration."""
|
| 145 |
+
orig_key = key
|
| 146 |
+
key = _normalize_name(key)
|
| 147 |
+
try:
|
| 148 |
+
return self._dictionary[key]
|
| 149 |
+
except KeyError:
|
| 150 |
+
# disassembling triggers a more useful error message than simply
|
| 151 |
+
# "No such key" in the case that the key isn't in the form command.option
|
| 152 |
+
_disassemble_key(key)
|
| 153 |
+
raise ConfigurationError(f"No such key - {orig_key}")
|
| 154 |
+
|
| 155 |
+
def set_value(self, key: str, value: Any) -> None:
|
| 156 |
+
"""Modify a value in the configuration."""
|
| 157 |
+
key = _normalize_name(key)
|
| 158 |
+
self._ensure_have_load_only()
|
| 159 |
+
|
| 160 |
+
assert self.load_only
|
| 161 |
+
fname, parser = self._get_parser_to_modify()
|
| 162 |
+
|
| 163 |
+
if parser is not None:
|
| 164 |
+
section, name = _disassemble_key(key)
|
| 165 |
+
|
| 166 |
+
# Modify the parser and the configuration
|
| 167 |
+
if not parser.has_section(section):
|
| 168 |
+
parser.add_section(section)
|
| 169 |
+
parser.set(section, name, value)
|
| 170 |
+
|
| 171 |
+
self._config[self.load_only][key] = value
|
| 172 |
+
self._mark_as_modified(fname, parser)
|
| 173 |
+
|
| 174 |
+
def unset_value(self, key: str) -> None:
|
| 175 |
+
"""Unset a value in the configuration."""
|
| 176 |
+
orig_key = key
|
| 177 |
+
key = _normalize_name(key)
|
| 178 |
+
self._ensure_have_load_only()
|
| 179 |
+
|
| 180 |
+
assert self.load_only
|
| 181 |
+
if key not in self._config[self.load_only]:
|
| 182 |
+
raise ConfigurationError(f"No such key - {orig_key}")
|
| 183 |
+
|
| 184 |
+
fname, parser = self._get_parser_to_modify()
|
| 185 |
+
|
| 186 |
+
if parser is not None:
|
| 187 |
+
section, name = _disassemble_key(key)
|
| 188 |
+
if not (
|
| 189 |
+
parser.has_section(section) and parser.remove_option(section, name)
|
| 190 |
+
):
|
| 191 |
+
# The option was not removed.
|
| 192 |
+
raise ConfigurationError(
|
| 193 |
+
"Fatal Internal error [id=1]. Please report as a bug."
|
| 194 |
+
)
|
| 195 |
+
|
| 196 |
+
# The section may be empty after the option was removed.
|
| 197 |
+
if not parser.items(section):
|
| 198 |
+
parser.remove_section(section)
|
| 199 |
+
self._mark_as_modified(fname, parser)
|
| 200 |
+
|
| 201 |
+
del self._config[self.load_only][key]
|
| 202 |
+
|
| 203 |
+
def save(self) -> None:
|
| 204 |
+
"""Save the current in-memory state."""
|
| 205 |
+
self._ensure_have_load_only()
|
| 206 |
+
|
| 207 |
+
for fname, parser in self._modified_parsers:
|
| 208 |
+
logger.info("Writing to %s", fname)
|
| 209 |
+
|
| 210 |
+
# Ensure directory exists.
|
| 211 |
+
ensure_dir(os.path.dirname(fname))
|
| 212 |
+
|
| 213 |
+
# Ensure directory's permission(need to be writeable)
|
| 214 |
+
try:
|
| 215 |
+
with open(fname, "w") as f:
|
| 216 |
+
parser.write(f)
|
| 217 |
+
except OSError as error:
|
| 218 |
+
raise ConfigurationError(
|
| 219 |
+
f"An error occurred while writing to the configuration file "
|
| 220 |
+
f"{fname}: {error}"
|
| 221 |
+
)
|
| 222 |
+
|
| 223 |
+
#
|
| 224 |
+
# Private routines
|
| 225 |
+
#
|
| 226 |
+
|
| 227 |
+
def _ensure_have_load_only(self) -> None:
|
| 228 |
+
if self.load_only is None:
|
| 229 |
+
raise ConfigurationError("Needed a specific file to be modifying.")
|
| 230 |
+
logger.debug("Will be working with %s variant only", self.load_only)
|
| 231 |
+
|
| 232 |
+
@property
|
| 233 |
+
def _dictionary(self) -> Dict[str, Any]:
|
| 234 |
+
"""A dictionary representing the loaded configuration."""
|
| 235 |
+
# NOTE: Dictionaries are not populated if not loaded. So, conditionals
|
| 236 |
+
# are not needed here.
|
| 237 |
+
retval = {}
|
| 238 |
+
|
| 239 |
+
for variant in OVERRIDE_ORDER:
|
| 240 |
+
retval.update(self._config[variant])
|
| 241 |
+
|
| 242 |
+
return retval
|
| 243 |
+
|
| 244 |
+
def _load_config_files(self) -> None:
|
| 245 |
+
"""Loads configuration from configuration files"""
|
| 246 |
+
config_files = dict(self.iter_config_files())
|
| 247 |
+
if config_files[kinds.ENV][0:1] == [os.devnull]:
|
| 248 |
+
logger.debug(
|
| 249 |
+
"Skipping loading configuration files due to "
|
| 250 |
+
"environment's PIP_CONFIG_FILE being os.devnull"
|
| 251 |
+
)
|
| 252 |
+
return
|
| 253 |
+
|
| 254 |
+
for variant, files in config_files.items():
|
| 255 |
+
for fname in files:
|
| 256 |
+
# If there's specific variant set in `load_only`, load only
|
| 257 |
+
# that variant, not the others.
|
| 258 |
+
if self.load_only is not None and variant != self.load_only:
|
| 259 |
+
logger.debug("Skipping file '%s' (variant: %s)", fname, variant)
|
| 260 |
+
continue
|
| 261 |
+
|
| 262 |
+
parser = self._load_file(variant, fname)
|
| 263 |
+
|
| 264 |
+
# Keeping track of the parsers used
|
| 265 |
+
self._parsers[variant].append((fname, parser))
|
| 266 |
+
|
| 267 |
+
def _load_file(self, variant: Kind, fname: str) -> RawConfigParser:
|
| 268 |
+
logger.verbose("For variant '%s', will try loading '%s'", variant, fname)
|
| 269 |
+
parser = self._construct_parser(fname)
|
| 270 |
+
|
| 271 |
+
for section in parser.sections():
|
| 272 |
+
items = parser.items(section)
|
| 273 |
+
self._config[variant].update(self._normalized_keys(section, items))
|
| 274 |
+
|
| 275 |
+
return parser
|
| 276 |
+
|
| 277 |
+
def _construct_parser(self, fname: str) -> RawConfigParser:
|
| 278 |
+
parser = configparser.RawConfigParser()
|
| 279 |
+
# If there is no such file, don't bother reading it but create the
|
| 280 |
+
# parser anyway, to hold the data.
|
| 281 |
+
# Doing this is useful when modifying and saving files, where we don't
|
| 282 |
+
# need to construct a parser.
|
| 283 |
+
if os.path.exists(fname):
|
| 284 |
+
locale_encoding = locale.getpreferredencoding(False)
|
| 285 |
+
try:
|
| 286 |
+
parser.read(fname, encoding=locale_encoding)
|
| 287 |
+
except UnicodeDecodeError:
|
| 288 |
+
# See https://github.com/pypa/pip/issues/4963
|
| 289 |
+
raise ConfigurationFileCouldNotBeLoaded(
|
| 290 |
+
reason=f"contains invalid {locale_encoding} characters",
|
| 291 |
+
fname=fname,
|
| 292 |
+
)
|
| 293 |
+
except configparser.Error as error:
|
| 294 |
+
# See https://github.com/pypa/pip/issues/4893
|
| 295 |
+
raise ConfigurationFileCouldNotBeLoaded(error=error)
|
| 296 |
+
return parser
|
| 297 |
+
|
| 298 |
+
def _load_environment_vars(self) -> None:
|
| 299 |
+
"""Loads configuration from environment variables"""
|
| 300 |
+
self._config[kinds.ENV_VAR].update(
|
| 301 |
+
self._normalized_keys(":env:", self.get_environ_vars())
|
| 302 |
+
)
|
| 303 |
+
|
| 304 |
+
def _normalized_keys(
|
| 305 |
+
self, section: str, items: Iterable[Tuple[str, Any]]
|
| 306 |
+
) -> Dict[str, Any]:
|
| 307 |
+
"""Normalizes items to construct a dictionary with normalized keys.
|
| 308 |
+
|
| 309 |
+
This routine is where the names become keys and are made the same
|
| 310 |
+
regardless of source - configuration files or environment.
|
| 311 |
+
"""
|
| 312 |
+
normalized = {}
|
| 313 |
+
for name, val in items:
|
| 314 |
+
key = section + "." + _normalize_name(name)
|
| 315 |
+
normalized[key] = val
|
| 316 |
+
return normalized
|
| 317 |
+
|
| 318 |
+
def get_environ_vars(self) -> Iterable[Tuple[str, str]]:
|
| 319 |
+
"""Returns a generator with all environmental vars with prefix PIP_"""
|
| 320 |
+
for key, val in os.environ.items():
|
| 321 |
+
if key.startswith("PIP_"):
|
| 322 |
+
name = key[4:].lower()
|
| 323 |
+
if name not in ENV_NAMES_IGNORED:
|
| 324 |
+
yield name, val
|
| 325 |
+
|
| 326 |
+
# XXX: This is patched in the tests.
|
| 327 |
+
def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]:
|
| 328 |
+
"""Yields variant and configuration files associated with it.
|
| 329 |
+
|
| 330 |
+
This should be treated like items of a dictionary. The order
|
| 331 |
+
here doesn't affect what gets overridden. That is controlled
|
| 332 |
+
by OVERRIDE_ORDER. However this does control the order they are
|
| 333 |
+
displayed to the user. It's probably most ergonomic to display
|
| 334 |
+
things in the same order as OVERRIDE_ORDER
|
| 335 |
+
"""
|
| 336 |
+
# SMELL: Move the conditions out of this function
|
| 337 |
+
|
| 338 |
+
env_config_file = os.environ.get("PIP_CONFIG_FILE", None)
|
| 339 |
+
config_files = get_configuration_files()
|
| 340 |
+
|
| 341 |
+
yield kinds.GLOBAL, config_files[kinds.GLOBAL]
|
| 342 |
+
|
| 343 |
+
# per-user config is not loaded when env_config_file exists
|
| 344 |
+
should_load_user_config = not self.isolated and not (
|
| 345 |
+
env_config_file and os.path.exists(env_config_file)
|
| 346 |
+
)
|
| 347 |
+
if should_load_user_config:
|
| 348 |
+
# The legacy config file is overridden by the new config file
|
| 349 |
+
yield kinds.USER, config_files[kinds.USER]
|
| 350 |
+
|
| 351 |
+
# virtualenv config
|
| 352 |
+
yield kinds.SITE, config_files[kinds.SITE]
|
| 353 |
+
|
| 354 |
+
if env_config_file is not None:
|
| 355 |
+
yield kinds.ENV, [env_config_file]
|
| 356 |
+
else:
|
| 357 |
+
yield kinds.ENV, []
|
| 358 |
+
|
| 359 |
+
def get_values_in_config(self, variant: Kind) -> Dict[str, Any]:
|
| 360 |
+
"""Get values present in a config file"""
|
| 361 |
+
return self._config[variant]
|
| 362 |
+
|
| 363 |
+
def _get_parser_to_modify(self) -> Tuple[str, RawConfigParser]:
|
| 364 |
+
# Determine which parser to modify
|
| 365 |
+
assert self.load_only
|
| 366 |
+
parsers = self._parsers[self.load_only]
|
| 367 |
+
if not parsers:
|
| 368 |
+
# This should not happen if everything works correctly.
|
| 369 |
+
raise ConfigurationError(
|
| 370 |
+
"Fatal Internal error [id=2]. Please report as a bug."
|
| 371 |
+
)
|
| 372 |
+
|
| 373 |
+
# Use the highest priority parser.
|
| 374 |
+
return parsers[-1]
|
| 375 |
+
|
| 376 |
+
# XXX: This is patched in the tests.
|
| 377 |
+
def _mark_as_modified(self, fname: str, parser: RawConfigParser) -> None:
|
| 378 |
+
file_parser_tuple = (fname, parser)
|
| 379 |
+
if file_parser_tuple not in self._modified_parsers:
|
| 380 |
+
self._modified_parsers.append(file_parser_tuple)
|
| 381 |
+
|
| 382 |
+
def __repr__(self) -> str:
|
| 383 |
+
return f"{self.__class__.__name__}({self._dictionary!r})"
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/exceptions.py
ADDED
|
@@ -0,0 +1,809 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Exceptions used throughout package.
|
| 2 |
+
|
| 3 |
+
This module MUST NOT try to import from anything within `pip._internal` to
|
| 4 |
+
operate. This is expected to be importable from any/all files within the
|
| 5 |
+
subpackage and, thus, should not depend on them.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import configparser
|
| 9 |
+
import contextlib
|
| 10 |
+
import locale
|
| 11 |
+
import logging
|
| 12 |
+
import pathlib
|
| 13 |
+
import re
|
| 14 |
+
import sys
|
| 15 |
+
from itertools import chain, groupby, repeat
|
| 16 |
+
from typing import TYPE_CHECKING, Dict, Iterator, List, Literal, Optional, Union
|
| 17 |
+
|
| 18 |
+
from pip._vendor.packaging.requirements import InvalidRequirement
|
| 19 |
+
from pip._vendor.packaging.version import InvalidVersion
|
| 20 |
+
from pip._vendor.rich.console import Console, ConsoleOptions, RenderResult
|
| 21 |
+
from pip._vendor.rich.markup import escape
|
| 22 |
+
from pip._vendor.rich.text import Text
|
| 23 |
+
|
| 24 |
+
if TYPE_CHECKING:
|
| 25 |
+
from hashlib import _Hash
|
| 26 |
+
|
| 27 |
+
from pip._vendor.requests.models import Request, Response
|
| 28 |
+
|
| 29 |
+
from pip._internal.metadata import BaseDistribution
|
| 30 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 31 |
+
|
| 32 |
+
logger = logging.getLogger(__name__)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
#
|
| 36 |
+
# Scaffolding
|
| 37 |
+
#
|
| 38 |
+
def _is_kebab_case(s: str) -> bool:
|
| 39 |
+
return re.match(r"^[a-z]+(-[a-z]+)*$", s) is not None
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def _prefix_with_indent(
|
| 43 |
+
s: Union[Text, str],
|
| 44 |
+
console: Console,
|
| 45 |
+
*,
|
| 46 |
+
prefix: str,
|
| 47 |
+
indent: str,
|
| 48 |
+
) -> Text:
|
| 49 |
+
if isinstance(s, Text):
|
| 50 |
+
text = s
|
| 51 |
+
else:
|
| 52 |
+
text = console.render_str(s)
|
| 53 |
+
|
| 54 |
+
return console.render_str(prefix, overflow="ignore") + console.render_str(
|
| 55 |
+
f"\n{indent}", overflow="ignore"
|
| 56 |
+
).join(text.split(allow_blank=True))
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class PipError(Exception):
|
| 60 |
+
"""The base pip error."""
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class DiagnosticPipError(PipError):
|
| 64 |
+
"""An error, that presents diagnostic information to the user.
|
| 65 |
+
|
| 66 |
+
This contains a bunch of logic, to enable pretty presentation of our error
|
| 67 |
+
messages. Each error gets a unique reference. Each error can also include
|
| 68 |
+
additional context, a hint and/or a note -- which are presented with the
|
| 69 |
+
main error message in a consistent style.
|
| 70 |
+
|
| 71 |
+
This is adapted from the error output styling in `sphinx-theme-builder`.
|
| 72 |
+
"""
|
| 73 |
+
|
| 74 |
+
reference: str
|
| 75 |
+
|
| 76 |
+
def __init__(
|
| 77 |
+
self,
|
| 78 |
+
*,
|
| 79 |
+
kind: 'Literal["error", "warning"]' = "error",
|
| 80 |
+
reference: Optional[str] = None,
|
| 81 |
+
message: Union[str, Text],
|
| 82 |
+
context: Optional[Union[str, Text]],
|
| 83 |
+
hint_stmt: Optional[Union[str, Text]],
|
| 84 |
+
note_stmt: Optional[Union[str, Text]] = None,
|
| 85 |
+
link: Optional[str] = None,
|
| 86 |
+
) -> None:
|
| 87 |
+
# Ensure a proper reference is provided.
|
| 88 |
+
if reference is None:
|
| 89 |
+
assert hasattr(self, "reference"), "error reference not provided!"
|
| 90 |
+
reference = self.reference
|
| 91 |
+
assert _is_kebab_case(reference), "error reference must be kebab-case!"
|
| 92 |
+
|
| 93 |
+
self.kind = kind
|
| 94 |
+
self.reference = reference
|
| 95 |
+
|
| 96 |
+
self.message = message
|
| 97 |
+
self.context = context
|
| 98 |
+
|
| 99 |
+
self.note_stmt = note_stmt
|
| 100 |
+
self.hint_stmt = hint_stmt
|
| 101 |
+
|
| 102 |
+
self.link = link
|
| 103 |
+
|
| 104 |
+
super().__init__(f"<{self.__class__.__name__}: {self.reference}>")
|
| 105 |
+
|
| 106 |
+
def __repr__(self) -> str:
|
| 107 |
+
return (
|
| 108 |
+
f"<{self.__class__.__name__}("
|
| 109 |
+
f"reference={self.reference!r}, "
|
| 110 |
+
f"message={self.message!r}, "
|
| 111 |
+
f"context={self.context!r}, "
|
| 112 |
+
f"note_stmt={self.note_stmt!r}, "
|
| 113 |
+
f"hint_stmt={self.hint_stmt!r}"
|
| 114 |
+
")>"
|
| 115 |
+
)
|
| 116 |
+
|
| 117 |
+
def __rich_console__(
|
| 118 |
+
self,
|
| 119 |
+
console: Console,
|
| 120 |
+
options: ConsoleOptions,
|
| 121 |
+
) -> RenderResult:
|
| 122 |
+
colour = "red" if self.kind == "error" else "yellow"
|
| 123 |
+
|
| 124 |
+
yield f"[{colour} bold]{self.kind}[/]: [bold]{self.reference}[/]"
|
| 125 |
+
yield ""
|
| 126 |
+
|
| 127 |
+
if not options.ascii_only:
|
| 128 |
+
# Present the main message, with relevant context indented.
|
| 129 |
+
if self.context is not None:
|
| 130 |
+
yield _prefix_with_indent(
|
| 131 |
+
self.message,
|
| 132 |
+
console,
|
| 133 |
+
prefix=f"[{colour}]×[/] ",
|
| 134 |
+
indent=f"[{colour}]│[/] ",
|
| 135 |
+
)
|
| 136 |
+
yield _prefix_with_indent(
|
| 137 |
+
self.context,
|
| 138 |
+
console,
|
| 139 |
+
prefix=f"[{colour}]╰─>[/] ",
|
| 140 |
+
indent=f"[{colour}] [/] ",
|
| 141 |
+
)
|
| 142 |
+
else:
|
| 143 |
+
yield _prefix_with_indent(
|
| 144 |
+
self.message,
|
| 145 |
+
console,
|
| 146 |
+
prefix="[red]×[/] ",
|
| 147 |
+
indent=" ",
|
| 148 |
+
)
|
| 149 |
+
else:
|
| 150 |
+
yield self.message
|
| 151 |
+
if self.context is not None:
|
| 152 |
+
yield ""
|
| 153 |
+
yield self.context
|
| 154 |
+
|
| 155 |
+
if self.note_stmt is not None or self.hint_stmt is not None:
|
| 156 |
+
yield ""
|
| 157 |
+
|
| 158 |
+
if self.note_stmt is not None:
|
| 159 |
+
yield _prefix_with_indent(
|
| 160 |
+
self.note_stmt,
|
| 161 |
+
console,
|
| 162 |
+
prefix="[magenta bold]note[/]: ",
|
| 163 |
+
indent=" ",
|
| 164 |
+
)
|
| 165 |
+
if self.hint_stmt is not None:
|
| 166 |
+
yield _prefix_with_indent(
|
| 167 |
+
self.hint_stmt,
|
| 168 |
+
console,
|
| 169 |
+
prefix="[cyan bold]hint[/]: ",
|
| 170 |
+
indent=" ",
|
| 171 |
+
)
|
| 172 |
+
|
| 173 |
+
if self.link is not None:
|
| 174 |
+
yield ""
|
| 175 |
+
yield f"Link: {self.link}"
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
#
|
| 179 |
+
# Actual Errors
|
| 180 |
+
#
|
| 181 |
+
class ConfigurationError(PipError):
|
| 182 |
+
"""General exception in configuration"""
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
class InstallationError(PipError):
|
| 186 |
+
"""General exception during installation"""
|
| 187 |
+
|
| 188 |
+
|
| 189 |
+
class MissingPyProjectBuildRequires(DiagnosticPipError):
|
| 190 |
+
"""Raised when pyproject.toml has `build-system`, but no `build-system.requires`."""
|
| 191 |
+
|
| 192 |
+
reference = "missing-pyproject-build-system-requires"
|
| 193 |
+
|
| 194 |
+
def __init__(self, *, package: str) -> None:
|
| 195 |
+
super().__init__(
|
| 196 |
+
message=f"Can not process {escape(package)}",
|
| 197 |
+
context=Text(
|
| 198 |
+
"This package has an invalid pyproject.toml file.\n"
|
| 199 |
+
"The [build-system] table is missing the mandatory `requires` key."
|
| 200 |
+
),
|
| 201 |
+
note_stmt="This is an issue with the package mentioned above, not pip.",
|
| 202 |
+
hint_stmt=Text("See PEP 518 for the detailed specification."),
|
| 203 |
+
)
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
class InvalidPyProjectBuildRequires(DiagnosticPipError):
|
| 207 |
+
"""Raised when pyproject.toml an invalid `build-system.requires`."""
|
| 208 |
+
|
| 209 |
+
reference = "invalid-pyproject-build-system-requires"
|
| 210 |
+
|
| 211 |
+
def __init__(self, *, package: str, reason: str) -> None:
|
| 212 |
+
super().__init__(
|
| 213 |
+
message=f"Can not process {escape(package)}",
|
| 214 |
+
context=Text(
|
| 215 |
+
"This package has an invalid `build-system.requires` key in "
|
| 216 |
+
f"pyproject.toml.\n{reason}"
|
| 217 |
+
),
|
| 218 |
+
note_stmt="This is an issue with the package mentioned above, not pip.",
|
| 219 |
+
hint_stmt=Text("See PEP 518 for the detailed specification."),
|
| 220 |
+
)
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
class NoneMetadataError(PipError):
|
| 224 |
+
"""Raised when accessing a Distribution's "METADATA" or "PKG-INFO".
|
| 225 |
+
|
| 226 |
+
This signifies an inconsistency, when the Distribution claims to have
|
| 227 |
+
the metadata file (if not, raise ``FileNotFoundError`` instead), but is
|
| 228 |
+
not actually able to produce its content. This may be due to permission
|
| 229 |
+
errors.
|
| 230 |
+
"""
|
| 231 |
+
|
| 232 |
+
def __init__(
|
| 233 |
+
self,
|
| 234 |
+
dist: "BaseDistribution",
|
| 235 |
+
metadata_name: str,
|
| 236 |
+
) -> None:
|
| 237 |
+
"""
|
| 238 |
+
:param dist: A Distribution object.
|
| 239 |
+
:param metadata_name: The name of the metadata being accessed
|
| 240 |
+
(can be "METADATA" or "PKG-INFO").
|
| 241 |
+
"""
|
| 242 |
+
self.dist = dist
|
| 243 |
+
self.metadata_name = metadata_name
|
| 244 |
+
|
| 245 |
+
def __str__(self) -> str:
|
| 246 |
+
# Use `dist` in the error message because its stringification
|
| 247 |
+
# includes more information, like the version and location.
|
| 248 |
+
return f"None {self.metadata_name} metadata found for distribution: {self.dist}"
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
class UserInstallationInvalid(InstallationError):
|
| 252 |
+
"""A --user install is requested on an environment without user site."""
|
| 253 |
+
|
| 254 |
+
def __str__(self) -> str:
|
| 255 |
+
return "User base directory is not specified"
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
class InvalidSchemeCombination(InstallationError):
|
| 259 |
+
def __str__(self) -> str:
|
| 260 |
+
before = ", ".join(str(a) for a in self.args[:-1])
|
| 261 |
+
return f"Cannot set {before} and {self.args[-1]} together"
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
class DistributionNotFound(InstallationError):
|
| 265 |
+
"""Raised when a distribution cannot be found to satisfy a requirement"""
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
class RequirementsFileParseError(InstallationError):
|
| 269 |
+
"""Raised when a general error occurs parsing a requirements file line."""
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
class BestVersionAlreadyInstalled(PipError):
|
| 273 |
+
"""Raised when the most up-to-date version of a package is already
|
| 274 |
+
installed."""
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
class BadCommand(PipError):
|
| 278 |
+
"""Raised when virtualenv or a command is not found"""
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
class CommandError(PipError):
|
| 282 |
+
"""Raised when there is an error in command-line arguments"""
|
| 283 |
+
|
| 284 |
+
|
| 285 |
+
class PreviousBuildDirError(PipError):
|
| 286 |
+
"""Raised when there's a previous conflicting build directory"""
|
| 287 |
+
|
| 288 |
+
|
| 289 |
+
class NetworkConnectionError(PipError):
|
| 290 |
+
"""HTTP connection error"""
|
| 291 |
+
|
| 292 |
+
def __init__(
|
| 293 |
+
self,
|
| 294 |
+
error_msg: str,
|
| 295 |
+
response: Optional["Response"] = None,
|
| 296 |
+
request: Optional["Request"] = None,
|
| 297 |
+
) -> None:
|
| 298 |
+
"""
|
| 299 |
+
Initialize NetworkConnectionError with `request` and `response`
|
| 300 |
+
objects.
|
| 301 |
+
"""
|
| 302 |
+
self.response = response
|
| 303 |
+
self.request = request
|
| 304 |
+
self.error_msg = error_msg
|
| 305 |
+
if (
|
| 306 |
+
self.response is not None
|
| 307 |
+
and not self.request
|
| 308 |
+
and hasattr(response, "request")
|
| 309 |
+
):
|
| 310 |
+
self.request = self.response.request
|
| 311 |
+
super().__init__(error_msg, response, request)
|
| 312 |
+
|
| 313 |
+
def __str__(self) -> str:
|
| 314 |
+
return str(self.error_msg)
|
| 315 |
+
|
| 316 |
+
|
| 317 |
+
class InvalidWheelFilename(InstallationError):
|
| 318 |
+
"""Invalid wheel filename."""
|
| 319 |
+
|
| 320 |
+
|
| 321 |
+
class UnsupportedWheel(InstallationError):
|
| 322 |
+
"""Unsupported wheel."""
|
| 323 |
+
|
| 324 |
+
|
| 325 |
+
class InvalidWheel(InstallationError):
|
| 326 |
+
"""Invalid (e.g. corrupt) wheel."""
|
| 327 |
+
|
| 328 |
+
def __init__(self, location: str, name: str):
|
| 329 |
+
self.location = location
|
| 330 |
+
self.name = name
|
| 331 |
+
|
| 332 |
+
def __str__(self) -> str:
|
| 333 |
+
return f"Wheel '{self.name}' located at {self.location} is invalid."
|
| 334 |
+
|
| 335 |
+
|
| 336 |
+
class MetadataInconsistent(InstallationError):
|
| 337 |
+
"""Built metadata contains inconsistent information.
|
| 338 |
+
|
| 339 |
+
This is raised when the metadata contains values (e.g. name and version)
|
| 340 |
+
that do not match the information previously obtained from sdist filename,
|
| 341 |
+
user-supplied ``#egg=`` value, or an install requirement name.
|
| 342 |
+
"""
|
| 343 |
+
|
| 344 |
+
def __init__(
|
| 345 |
+
self, ireq: "InstallRequirement", field: str, f_val: str, m_val: str
|
| 346 |
+
) -> None:
|
| 347 |
+
self.ireq = ireq
|
| 348 |
+
self.field = field
|
| 349 |
+
self.f_val = f_val
|
| 350 |
+
self.m_val = m_val
|
| 351 |
+
|
| 352 |
+
def __str__(self) -> str:
|
| 353 |
+
return (
|
| 354 |
+
f"Requested {self.ireq} has inconsistent {self.field}: "
|
| 355 |
+
f"expected {self.f_val!r}, but metadata has {self.m_val!r}"
|
| 356 |
+
)
|
| 357 |
+
|
| 358 |
+
|
| 359 |
+
class MetadataInvalid(InstallationError):
|
| 360 |
+
"""Metadata is invalid."""
|
| 361 |
+
|
| 362 |
+
def __init__(self, ireq: "InstallRequirement", error: str) -> None:
|
| 363 |
+
self.ireq = ireq
|
| 364 |
+
self.error = error
|
| 365 |
+
|
| 366 |
+
def __str__(self) -> str:
|
| 367 |
+
return f"Requested {self.ireq} has invalid metadata: {self.error}"
|
| 368 |
+
|
| 369 |
+
|
| 370 |
+
class InstallationSubprocessError(DiagnosticPipError, InstallationError):
|
| 371 |
+
"""A subprocess call failed."""
|
| 372 |
+
|
| 373 |
+
reference = "subprocess-exited-with-error"
|
| 374 |
+
|
| 375 |
+
def __init__(
|
| 376 |
+
self,
|
| 377 |
+
*,
|
| 378 |
+
command_description: str,
|
| 379 |
+
exit_code: int,
|
| 380 |
+
output_lines: Optional[List[str]],
|
| 381 |
+
) -> None:
|
| 382 |
+
if output_lines is None:
|
| 383 |
+
output_prompt = Text("See above for output.")
|
| 384 |
+
else:
|
| 385 |
+
output_prompt = (
|
| 386 |
+
Text.from_markup(f"[red][{len(output_lines)} lines of output][/]\n")
|
| 387 |
+
+ Text("".join(output_lines))
|
| 388 |
+
+ Text.from_markup(R"[red]\[end of output][/]")
|
| 389 |
+
)
|
| 390 |
+
|
| 391 |
+
super().__init__(
|
| 392 |
+
message=(
|
| 393 |
+
f"[green]{escape(command_description)}[/] did not run successfully.\n"
|
| 394 |
+
f"exit code: {exit_code}"
|
| 395 |
+
),
|
| 396 |
+
context=output_prompt,
|
| 397 |
+
hint_stmt=None,
|
| 398 |
+
note_stmt=(
|
| 399 |
+
"This error originates from a subprocess, and is likely not a "
|
| 400 |
+
"problem with pip."
|
| 401 |
+
),
|
| 402 |
+
)
|
| 403 |
+
|
| 404 |
+
self.command_description = command_description
|
| 405 |
+
self.exit_code = exit_code
|
| 406 |
+
|
| 407 |
+
def __str__(self) -> str:
|
| 408 |
+
return f"{self.command_description} exited with {self.exit_code}"
|
| 409 |
+
|
| 410 |
+
|
| 411 |
+
class MetadataGenerationFailed(InstallationSubprocessError, InstallationError):
|
| 412 |
+
reference = "metadata-generation-failed"
|
| 413 |
+
|
| 414 |
+
def __init__(
|
| 415 |
+
self,
|
| 416 |
+
*,
|
| 417 |
+
package_details: str,
|
| 418 |
+
) -> None:
|
| 419 |
+
super(InstallationSubprocessError, self).__init__(
|
| 420 |
+
message="Encountered error while generating package metadata.",
|
| 421 |
+
context=escape(package_details),
|
| 422 |
+
hint_stmt="See above for details.",
|
| 423 |
+
note_stmt="This is an issue with the package mentioned above, not pip.",
|
| 424 |
+
)
|
| 425 |
+
|
| 426 |
+
def __str__(self) -> str:
|
| 427 |
+
return "metadata generation failed"
|
| 428 |
+
|
| 429 |
+
|
| 430 |
+
class HashErrors(InstallationError):
|
| 431 |
+
"""Multiple HashError instances rolled into one for reporting"""
|
| 432 |
+
|
| 433 |
+
def __init__(self) -> None:
|
| 434 |
+
self.errors: List[HashError] = []
|
| 435 |
+
|
| 436 |
+
def append(self, error: "HashError") -> None:
|
| 437 |
+
self.errors.append(error)
|
| 438 |
+
|
| 439 |
+
def __str__(self) -> str:
|
| 440 |
+
lines = []
|
| 441 |
+
self.errors.sort(key=lambda e: e.order)
|
| 442 |
+
for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
|
| 443 |
+
lines.append(cls.head)
|
| 444 |
+
lines.extend(e.body() for e in errors_of_cls)
|
| 445 |
+
if lines:
|
| 446 |
+
return "\n".join(lines)
|
| 447 |
+
return ""
|
| 448 |
+
|
| 449 |
+
def __bool__(self) -> bool:
|
| 450 |
+
return bool(self.errors)
|
| 451 |
+
|
| 452 |
+
|
| 453 |
+
class HashError(InstallationError):
|
| 454 |
+
"""
|
| 455 |
+
A failure to verify a package against known-good hashes
|
| 456 |
+
|
| 457 |
+
:cvar order: An int sorting hash exception classes by difficulty of
|
| 458 |
+
recovery (lower being harder), so the user doesn't bother fretting
|
| 459 |
+
about unpinned packages when he has deeper issues, like VCS
|
| 460 |
+
dependencies, to deal with. Also keeps error reports in a
|
| 461 |
+
deterministic order.
|
| 462 |
+
:cvar head: A section heading for display above potentially many
|
| 463 |
+
exceptions of this kind
|
| 464 |
+
:ivar req: The InstallRequirement that triggered this error. This is
|
| 465 |
+
pasted on after the exception is instantiated, because it's not
|
| 466 |
+
typically available earlier.
|
| 467 |
+
|
| 468 |
+
"""
|
| 469 |
+
|
| 470 |
+
req: Optional["InstallRequirement"] = None
|
| 471 |
+
head = ""
|
| 472 |
+
order: int = -1
|
| 473 |
+
|
| 474 |
+
def body(self) -> str:
|
| 475 |
+
"""Return a summary of me for display under the heading.
|
| 476 |
+
|
| 477 |
+
This default implementation simply prints a description of the
|
| 478 |
+
triggering requirement.
|
| 479 |
+
|
| 480 |
+
:param req: The InstallRequirement that provoked this error, with
|
| 481 |
+
its link already populated by the resolver's _populate_link().
|
| 482 |
+
|
| 483 |
+
"""
|
| 484 |
+
return f" {self._requirement_name()}"
|
| 485 |
+
|
| 486 |
+
def __str__(self) -> str:
|
| 487 |
+
return f"{self.head}\n{self.body()}"
|
| 488 |
+
|
| 489 |
+
def _requirement_name(self) -> str:
|
| 490 |
+
"""Return a description of the requirement that triggered me.
|
| 491 |
+
|
| 492 |
+
This default implementation returns long description of the req, with
|
| 493 |
+
line numbers
|
| 494 |
+
|
| 495 |
+
"""
|
| 496 |
+
return str(self.req) if self.req else "unknown package"
|
| 497 |
+
|
| 498 |
+
|
| 499 |
+
class VcsHashUnsupported(HashError):
|
| 500 |
+
"""A hash was provided for a version-control-system-based requirement, but
|
| 501 |
+
we don't have a method for hashing those."""
|
| 502 |
+
|
| 503 |
+
order = 0
|
| 504 |
+
head = (
|
| 505 |
+
"Can't verify hashes for these requirements because we don't "
|
| 506 |
+
"have a way to hash version control repositories:"
|
| 507 |
+
)
|
| 508 |
+
|
| 509 |
+
|
| 510 |
+
class DirectoryUrlHashUnsupported(HashError):
|
| 511 |
+
"""A hash was provided for a version-control-system-based requirement, but
|
| 512 |
+
we don't have a method for hashing those."""
|
| 513 |
+
|
| 514 |
+
order = 1
|
| 515 |
+
head = (
|
| 516 |
+
"Can't verify hashes for these file:// requirements because they "
|
| 517 |
+
"point to directories:"
|
| 518 |
+
)
|
| 519 |
+
|
| 520 |
+
|
| 521 |
+
class HashMissing(HashError):
|
| 522 |
+
"""A hash was needed for a requirement but is absent."""
|
| 523 |
+
|
| 524 |
+
order = 2
|
| 525 |
+
head = (
|
| 526 |
+
"Hashes are required in --require-hashes mode, but they are "
|
| 527 |
+
"missing from some requirements. Here is a list of those "
|
| 528 |
+
"requirements along with the hashes their downloaded archives "
|
| 529 |
+
"actually had. Add lines like these to your requirements files to "
|
| 530 |
+
"prevent tampering. (If you did not enable --require-hashes "
|
| 531 |
+
"manually, note that it turns on automatically when any package "
|
| 532 |
+
"has a hash.)"
|
| 533 |
+
)
|
| 534 |
+
|
| 535 |
+
def __init__(self, gotten_hash: str) -> None:
|
| 536 |
+
"""
|
| 537 |
+
:param gotten_hash: The hash of the (possibly malicious) archive we
|
| 538 |
+
just downloaded
|
| 539 |
+
"""
|
| 540 |
+
self.gotten_hash = gotten_hash
|
| 541 |
+
|
| 542 |
+
def body(self) -> str:
|
| 543 |
+
# Dodge circular import.
|
| 544 |
+
from pip._internal.utils.hashes import FAVORITE_HASH
|
| 545 |
+
|
| 546 |
+
package = None
|
| 547 |
+
if self.req:
|
| 548 |
+
# In the case of URL-based requirements, display the original URL
|
| 549 |
+
# seen in the requirements file rather than the package name,
|
| 550 |
+
# so the output can be directly copied into the requirements file.
|
| 551 |
+
package = (
|
| 552 |
+
self.req.original_link
|
| 553 |
+
if self.req.is_direct
|
| 554 |
+
# In case someone feeds something downright stupid
|
| 555 |
+
# to InstallRequirement's constructor.
|
| 556 |
+
else getattr(self.req, "req", None)
|
| 557 |
+
)
|
| 558 |
+
return " {} --hash={}:{}".format(
|
| 559 |
+
package or "unknown package", FAVORITE_HASH, self.gotten_hash
|
| 560 |
+
)
|
| 561 |
+
|
| 562 |
+
|
| 563 |
+
class HashUnpinned(HashError):
|
| 564 |
+
"""A requirement had a hash specified but was not pinned to a specific
|
| 565 |
+
version."""
|
| 566 |
+
|
| 567 |
+
order = 3
|
| 568 |
+
head = (
|
| 569 |
+
"In --require-hashes mode, all requirements must have their "
|
| 570 |
+
"versions pinned with ==. These do not:"
|
| 571 |
+
)
|
| 572 |
+
|
| 573 |
+
|
| 574 |
+
class HashMismatch(HashError):
|
| 575 |
+
"""
|
| 576 |
+
Distribution file hash values don't match.
|
| 577 |
+
|
| 578 |
+
:ivar package_name: The name of the package that triggered the hash
|
| 579 |
+
mismatch. Feel free to write to this after the exception is raise to
|
| 580 |
+
improve its error message.
|
| 581 |
+
|
| 582 |
+
"""
|
| 583 |
+
|
| 584 |
+
order = 4
|
| 585 |
+
head = (
|
| 586 |
+
"THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS "
|
| 587 |
+
"FILE. If you have updated the package versions, please update "
|
| 588 |
+
"the hashes. Otherwise, examine the package contents carefully; "
|
| 589 |
+
"someone may have tampered with them."
|
| 590 |
+
)
|
| 591 |
+
|
| 592 |
+
def __init__(self, allowed: Dict[str, List[str]], gots: Dict[str, "_Hash"]) -> None:
|
| 593 |
+
"""
|
| 594 |
+
:param allowed: A dict of algorithm names pointing to lists of allowed
|
| 595 |
+
hex digests
|
| 596 |
+
:param gots: A dict of algorithm names pointing to hashes we
|
| 597 |
+
actually got from the files under suspicion
|
| 598 |
+
"""
|
| 599 |
+
self.allowed = allowed
|
| 600 |
+
self.gots = gots
|
| 601 |
+
|
| 602 |
+
def body(self) -> str:
|
| 603 |
+
return f" {self._requirement_name()}:\n{self._hash_comparison()}"
|
| 604 |
+
|
| 605 |
+
def _hash_comparison(self) -> str:
|
| 606 |
+
"""
|
| 607 |
+
Return a comparison of actual and expected hash values.
|
| 608 |
+
|
| 609 |
+
Example::
|
| 610 |
+
|
| 611 |
+
Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
|
| 612 |
+
or 123451234512345123451234512345123451234512345
|
| 613 |
+
Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
|
| 614 |
+
|
| 615 |
+
"""
|
| 616 |
+
|
| 617 |
+
def hash_then_or(hash_name: str) -> "chain[str]":
|
| 618 |
+
# For now, all the decent hashes have 6-char names, so we can get
|
| 619 |
+
# away with hard-coding space literals.
|
| 620 |
+
return chain([hash_name], repeat(" or"))
|
| 621 |
+
|
| 622 |
+
lines: List[str] = []
|
| 623 |
+
for hash_name, expecteds in self.allowed.items():
|
| 624 |
+
prefix = hash_then_or(hash_name)
|
| 625 |
+
lines.extend((f" Expected {next(prefix)} {e}") for e in expecteds)
|
| 626 |
+
lines.append(
|
| 627 |
+
f" Got {self.gots[hash_name].hexdigest()}\n"
|
| 628 |
+
)
|
| 629 |
+
return "\n".join(lines)
|
| 630 |
+
|
| 631 |
+
|
| 632 |
+
class UnsupportedPythonVersion(InstallationError):
|
| 633 |
+
"""Unsupported python version according to Requires-Python package
|
| 634 |
+
metadata."""
|
| 635 |
+
|
| 636 |
+
|
| 637 |
+
class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
|
| 638 |
+
"""When there are errors while loading a configuration file"""
|
| 639 |
+
|
| 640 |
+
def __init__(
|
| 641 |
+
self,
|
| 642 |
+
reason: str = "could not be loaded",
|
| 643 |
+
fname: Optional[str] = None,
|
| 644 |
+
error: Optional[configparser.Error] = None,
|
| 645 |
+
) -> None:
|
| 646 |
+
super().__init__(error)
|
| 647 |
+
self.reason = reason
|
| 648 |
+
self.fname = fname
|
| 649 |
+
self.error = error
|
| 650 |
+
|
| 651 |
+
def __str__(self) -> str:
|
| 652 |
+
if self.fname is not None:
|
| 653 |
+
message_part = f" in {self.fname}."
|
| 654 |
+
else:
|
| 655 |
+
assert self.error is not None
|
| 656 |
+
message_part = f".\n{self.error}\n"
|
| 657 |
+
return f"Configuration file {self.reason}{message_part}"
|
| 658 |
+
|
| 659 |
+
|
| 660 |
+
_DEFAULT_EXTERNALLY_MANAGED_ERROR = f"""\
|
| 661 |
+
The Python environment under {sys.prefix} is managed externally, and may not be
|
| 662 |
+
manipulated by the user. Please use specific tooling from the distributor of
|
| 663 |
+
the Python installation to interact with this environment instead.
|
| 664 |
+
"""
|
| 665 |
+
|
| 666 |
+
|
| 667 |
+
class ExternallyManagedEnvironment(DiagnosticPipError):
|
| 668 |
+
"""The current environment is externally managed.
|
| 669 |
+
|
| 670 |
+
This is raised when the current environment is externally managed, as
|
| 671 |
+
defined by `PEP 668`_. The ``EXTERNALLY-MANAGED`` configuration is checked
|
| 672 |
+
and displayed when the error is bubbled up to the user.
|
| 673 |
+
|
| 674 |
+
:param error: The error message read from ``EXTERNALLY-MANAGED``.
|
| 675 |
+
"""
|
| 676 |
+
|
| 677 |
+
reference = "externally-managed-environment"
|
| 678 |
+
|
| 679 |
+
def __init__(self, error: Optional[str]) -> None:
|
| 680 |
+
if error is None:
|
| 681 |
+
context = Text(_DEFAULT_EXTERNALLY_MANAGED_ERROR)
|
| 682 |
+
else:
|
| 683 |
+
context = Text(error)
|
| 684 |
+
super().__init__(
|
| 685 |
+
message="This environment is externally managed",
|
| 686 |
+
context=context,
|
| 687 |
+
note_stmt=(
|
| 688 |
+
"If you believe this is a mistake, please contact your "
|
| 689 |
+
"Python installation or OS distribution provider. "
|
| 690 |
+
"You can override this, at the risk of breaking your Python "
|
| 691 |
+
"installation or OS, by passing --break-system-packages."
|
| 692 |
+
),
|
| 693 |
+
hint_stmt=Text("See PEP 668 for the detailed specification."),
|
| 694 |
+
)
|
| 695 |
+
|
| 696 |
+
@staticmethod
|
| 697 |
+
def _iter_externally_managed_error_keys() -> Iterator[str]:
|
| 698 |
+
# LC_MESSAGES is in POSIX, but not the C standard. The most common
|
| 699 |
+
# platform that does not implement this category is Windows, where
|
| 700 |
+
# using other categories for console message localization is equally
|
| 701 |
+
# unreliable, so we fall back to the locale-less vendor message. This
|
| 702 |
+
# can always be re-evaluated when a vendor proposes a new alternative.
|
| 703 |
+
try:
|
| 704 |
+
category = locale.LC_MESSAGES
|
| 705 |
+
except AttributeError:
|
| 706 |
+
lang: Optional[str] = None
|
| 707 |
+
else:
|
| 708 |
+
lang, _ = locale.getlocale(category)
|
| 709 |
+
if lang is not None:
|
| 710 |
+
yield f"Error-{lang}"
|
| 711 |
+
for sep in ("-", "_"):
|
| 712 |
+
before, found, _ = lang.partition(sep)
|
| 713 |
+
if not found:
|
| 714 |
+
continue
|
| 715 |
+
yield f"Error-{before}"
|
| 716 |
+
yield "Error"
|
| 717 |
+
|
| 718 |
+
@classmethod
|
| 719 |
+
def from_config(
|
| 720 |
+
cls,
|
| 721 |
+
config: Union[pathlib.Path, str],
|
| 722 |
+
) -> "ExternallyManagedEnvironment":
|
| 723 |
+
parser = configparser.ConfigParser(interpolation=None)
|
| 724 |
+
try:
|
| 725 |
+
parser.read(config, encoding="utf-8")
|
| 726 |
+
section = parser["externally-managed"]
|
| 727 |
+
for key in cls._iter_externally_managed_error_keys():
|
| 728 |
+
with contextlib.suppress(KeyError):
|
| 729 |
+
return cls(section[key])
|
| 730 |
+
except KeyError:
|
| 731 |
+
pass
|
| 732 |
+
except (OSError, UnicodeDecodeError, configparser.ParsingError):
|
| 733 |
+
from pip._internal.utils._log import VERBOSE
|
| 734 |
+
|
| 735 |
+
exc_info = logger.isEnabledFor(VERBOSE)
|
| 736 |
+
logger.warning("Failed to read %s", config, exc_info=exc_info)
|
| 737 |
+
return cls(None)
|
| 738 |
+
|
| 739 |
+
|
| 740 |
+
class UninstallMissingRecord(DiagnosticPipError):
|
| 741 |
+
reference = "uninstall-no-record-file"
|
| 742 |
+
|
| 743 |
+
def __init__(self, *, distribution: "BaseDistribution") -> None:
|
| 744 |
+
installer = distribution.installer
|
| 745 |
+
if not installer or installer == "pip":
|
| 746 |
+
dep = f"{distribution.raw_name}=={distribution.version}"
|
| 747 |
+
hint = Text.assemble(
|
| 748 |
+
"You might be able to recover from this via: ",
|
| 749 |
+
(f"pip install --force-reinstall --no-deps {dep}", "green"),
|
| 750 |
+
)
|
| 751 |
+
else:
|
| 752 |
+
hint = Text(
|
| 753 |
+
f"The package was installed by {installer}. "
|
| 754 |
+
"You should check if it can uninstall the package."
|
| 755 |
+
)
|
| 756 |
+
|
| 757 |
+
super().__init__(
|
| 758 |
+
message=Text(f"Cannot uninstall {distribution}"),
|
| 759 |
+
context=(
|
| 760 |
+
"The package's contents are unknown: "
|
| 761 |
+
f"no RECORD file was found for {distribution.raw_name}."
|
| 762 |
+
),
|
| 763 |
+
hint_stmt=hint,
|
| 764 |
+
)
|
| 765 |
+
|
| 766 |
+
|
| 767 |
+
class LegacyDistutilsInstall(DiagnosticPipError):
|
| 768 |
+
reference = "uninstall-distutils-installed-package"
|
| 769 |
+
|
| 770 |
+
def __init__(self, *, distribution: "BaseDistribution") -> None:
|
| 771 |
+
super().__init__(
|
| 772 |
+
message=Text(f"Cannot uninstall {distribution}"),
|
| 773 |
+
context=(
|
| 774 |
+
"It is a distutils installed project and thus we cannot accurately "
|
| 775 |
+
"determine which files belong to it which would lead to only a partial "
|
| 776 |
+
"uninstall."
|
| 777 |
+
),
|
| 778 |
+
hint_stmt=None,
|
| 779 |
+
)
|
| 780 |
+
|
| 781 |
+
|
| 782 |
+
class InvalidInstalledPackage(DiagnosticPipError):
|
| 783 |
+
reference = "invalid-installed-package"
|
| 784 |
+
|
| 785 |
+
def __init__(
|
| 786 |
+
self,
|
| 787 |
+
*,
|
| 788 |
+
dist: "BaseDistribution",
|
| 789 |
+
invalid_exc: Union[InvalidRequirement, InvalidVersion],
|
| 790 |
+
) -> None:
|
| 791 |
+
installed_location = dist.installed_location
|
| 792 |
+
|
| 793 |
+
if isinstance(invalid_exc, InvalidRequirement):
|
| 794 |
+
invalid_type = "requirement"
|
| 795 |
+
else:
|
| 796 |
+
invalid_type = "version"
|
| 797 |
+
|
| 798 |
+
super().__init__(
|
| 799 |
+
message=Text(
|
| 800 |
+
f"Cannot process installed package {dist} "
|
| 801 |
+
+ (f"in {installed_location!r} " if installed_location else "")
|
| 802 |
+
+ f"because it has an invalid {invalid_type}:\n{invalid_exc.args[0]}"
|
| 803 |
+
),
|
| 804 |
+
context=(
|
| 805 |
+
"Starting with pip 24.1, packages with invalid "
|
| 806 |
+
f"{invalid_type}s can not be processed."
|
| 807 |
+
),
|
| 808 |
+
hint_stmt="To proceed this package must be uninstalled.",
|
| 809 |
+
)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/main.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List, Optional
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def main(args: Optional[List[str]] = None) -> int:
|
| 5 |
+
"""This is preserved for old console scripts that may still be referencing
|
| 6 |
+
it.
|
| 7 |
+
|
| 8 |
+
For additional details, see https://github.com/pypa/pip/issues/7498.
|
| 9 |
+
"""
|
| 10 |
+
from pip._internal.utils.entrypoints import _wrapper
|
| 11 |
+
|
| 12 |
+
return _wrapper(args)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/pyproject.py
ADDED
|
@@ -0,0 +1,185 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import importlib.util
|
| 2 |
+
import os
|
| 3 |
+
import sys
|
| 4 |
+
from collections import namedtuple
|
| 5 |
+
from typing import Any, List, Optional
|
| 6 |
+
|
| 7 |
+
if sys.version_info >= (3, 11):
|
| 8 |
+
import tomllib
|
| 9 |
+
else:
|
| 10 |
+
from pip._vendor import tomli as tomllib
|
| 11 |
+
|
| 12 |
+
from pip._vendor.packaging.requirements import InvalidRequirement
|
| 13 |
+
|
| 14 |
+
from pip._internal.exceptions import (
|
| 15 |
+
InstallationError,
|
| 16 |
+
InvalidPyProjectBuildRequires,
|
| 17 |
+
MissingPyProjectBuildRequires,
|
| 18 |
+
)
|
| 19 |
+
from pip._internal.utils.packaging import get_requirement
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def _is_list_of_str(obj: Any) -> bool:
|
| 23 |
+
return isinstance(obj, list) and all(isinstance(item, str) for item in obj)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def make_pyproject_path(unpacked_source_directory: str) -> str:
|
| 27 |
+
return os.path.join(unpacked_source_directory, "pyproject.toml")
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
BuildSystemDetails = namedtuple(
|
| 31 |
+
"BuildSystemDetails", ["requires", "backend", "check", "backend_path"]
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def load_pyproject_toml(
|
| 36 |
+
use_pep517: Optional[bool], pyproject_toml: str, setup_py: str, req_name: str
|
| 37 |
+
) -> Optional[BuildSystemDetails]:
|
| 38 |
+
"""Load the pyproject.toml file.
|
| 39 |
+
|
| 40 |
+
Parameters:
|
| 41 |
+
use_pep517 - Has the user requested PEP 517 processing? None
|
| 42 |
+
means the user hasn't explicitly specified.
|
| 43 |
+
pyproject_toml - Location of the project's pyproject.toml file
|
| 44 |
+
setup_py - Location of the project's setup.py file
|
| 45 |
+
req_name - The name of the requirement we're processing (for
|
| 46 |
+
error reporting)
|
| 47 |
+
|
| 48 |
+
Returns:
|
| 49 |
+
None if we should use the legacy code path, otherwise a tuple
|
| 50 |
+
(
|
| 51 |
+
requirements from pyproject.toml,
|
| 52 |
+
name of PEP 517 backend,
|
| 53 |
+
requirements we should check are installed after setting
|
| 54 |
+
up the build environment
|
| 55 |
+
directory paths to import the backend from (backend-path),
|
| 56 |
+
relative to the project root.
|
| 57 |
+
)
|
| 58 |
+
"""
|
| 59 |
+
has_pyproject = os.path.isfile(pyproject_toml)
|
| 60 |
+
has_setup = os.path.isfile(setup_py)
|
| 61 |
+
|
| 62 |
+
if not has_pyproject and not has_setup:
|
| 63 |
+
raise InstallationError(
|
| 64 |
+
f"{req_name} does not appear to be a Python project: "
|
| 65 |
+
f"neither 'setup.py' nor 'pyproject.toml' found."
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
if has_pyproject:
|
| 69 |
+
with open(pyproject_toml, encoding="utf-8") as f:
|
| 70 |
+
pp_toml = tomllib.loads(f.read())
|
| 71 |
+
build_system = pp_toml.get("build-system")
|
| 72 |
+
else:
|
| 73 |
+
build_system = None
|
| 74 |
+
|
| 75 |
+
# The following cases must use PEP 517
|
| 76 |
+
# We check for use_pep517 being non-None and falsy because that means
|
| 77 |
+
# the user explicitly requested --no-use-pep517. The value 0 as
|
| 78 |
+
# opposed to False can occur when the value is provided via an
|
| 79 |
+
# environment variable or config file option (due to the quirk of
|
| 80 |
+
# strtobool() returning an integer in pip's configuration code).
|
| 81 |
+
if has_pyproject and not has_setup:
|
| 82 |
+
if use_pep517 is not None and not use_pep517:
|
| 83 |
+
raise InstallationError(
|
| 84 |
+
"Disabling PEP 517 processing is invalid: "
|
| 85 |
+
"project does not have a setup.py"
|
| 86 |
+
)
|
| 87 |
+
use_pep517 = True
|
| 88 |
+
elif build_system and "build-backend" in build_system:
|
| 89 |
+
if use_pep517 is not None and not use_pep517:
|
| 90 |
+
raise InstallationError(
|
| 91 |
+
"Disabling PEP 517 processing is invalid: "
|
| 92 |
+
"project specifies a build backend of {} "
|
| 93 |
+
"in pyproject.toml".format(build_system["build-backend"])
|
| 94 |
+
)
|
| 95 |
+
use_pep517 = True
|
| 96 |
+
|
| 97 |
+
# If we haven't worked out whether to use PEP 517 yet,
|
| 98 |
+
# and the user hasn't explicitly stated a preference,
|
| 99 |
+
# we do so if the project has a pyproject.toml file
|
| 100 |
+
# or if we cannot import setuptools or wheels.
|
| 101 |
+
|
| 102 |
+
# We fallback to PEP 517 when without setuptools or without the wheel package,
|
| 103 |
+
# so setuptools can be installed as a default build backend.
|
| 104 |
+
# For more info see:
|
| 105 |
+
# https://discuss.python.org/t/pip-without-setuptools-could-the-experience-be-improved/11810/9
|
| 106 |
+
# https://github.com/pypa/pip/issues/8559
|
| 107 |
+
elif use_pep517 is None:
|
| 108 |
+
use_pep517 = (
|
| 109 |
+
has_pyproject
|
| 110 |
+
or not importlib.util.find_spec("setuptools")
|
| 111 |
+
or not importlib.util.find_spec("wheel")
|
| 112 |
+
)
|
| 113 |
+
|
| 114 |
+
# At this point, we know whether we're going to use PEP 517.
|
| 115 |
+
assert use_pep517 is not None
|
| 116 |
+
|
| 117 |
+
# If we're using the legacy code path, there is nothing further
|
| 118 |
+
# for us to do here.
|
| 119 |
+
if not use_pep517:
|
| 120 |
+
return None
|
| 121 |
+
|
| 122 |
+
if build_system is None:
|
| 123 |
+
# Either the user has a pyproject.toml with no build-system
|
| 124 |
+
# section, or the user has no pyproject.toml, but has opted in
|
| 125 |
+
# explicitly via --use-pep517.
|
| 126 |
+
# In the absence of any explicit backend specification, we
|
| 127 |
+
# assume the setuptools backend that most closely emulates the
|
| 128 |
+
# traditional direct setup.py execution, and require wheel and
|
| 129 |
+
# a version of setuptools that supports that backend.
|
| 130 |
+
|
| 131 |
+
build_system = {
|
| 132 |
+
"requires": ["setuptools>=40.8.0"],
|
| 133 |
+
"build-backend": "setuptools.build_meta:__legacy__",
|
| 134 |
+
}
|
| 135 |
+
|
| 136 |
+
# If we're using PEP 517, we have build system information (either
|
| 137 |
+
# from pyproject.toml, or defaulted by the code above).
|
| 138 |
+
# Note that at this point, we do not know if the user has actually
|
| 139 |
+
# specified a backend, though.
|
| 140 |
+
assert build_system is not None
|
| 141 |
+
|
| 142 |
+
# Ensure that the build-system section in pyproject.toml conforms
|
| 143 |
+
# to PEP 518.
|
| 144 |
+
|
| 145 |
+
# Specifying the build-system table but not the requires key is invalid
|
| 146 |
+
if "requires" not in build_system:
|
| 147 |
+
raise MissingPyProjectBuildRequires(package=req_name)
|
| 148 |
+
|
| 149 |
+
# Error out if requires is not a list of strings
|
| 150 |
+
requires = build_system["requires"]
|
| 151 |
+
if not _is_list_of_str(requires):
|
| 152 |
+
raise InvalidPyProjectBuildRequires(
|
| 153 |
+
package=req_name,
|
| 154 |
+
reason="It is not a list of strings.",
|
| 155 |
+
)
|
| 156 |
+
|
| 157 |
+
# Each requirement must be valid as per PEP 508
|
| 158 |
+
for requirement in requires:
|
| 159 |
+
try:
|
| 160 |
+
get_requirement(requirement)
|
| 161 |
+
except InvalidRequirement as error:
|
| 162 |
+
raise InvalidPyProjectBuildRequires(
|
| 163 |
+
package=req_name,
|
| 164 |
+
reason=f"It contains an invalid requirement: {requirement!r}",
|
| 165 |
+
) from error
|
| 166 |
+
|
| 167 |
+
backend = build_system.get("build-backend")
|
| 168 |
+
backend_path = build_system.get("backend-path", [])
|
| 169 |
+
check: List[str] = []
|
| 170 |
+
if backend is None:
|
| 171 |
+
# If the user didn't specify a backend, we assume they want to use
|
| 172 |
+
# the setuptools backend. But we can't be sure they have included
|
| 173 |
+
# a version of setuptools which supplies the backend. So we
|
| 174 |
+
# make a note to check that this requirement is present once
|
| 175 |
+
# we have set up the environment.
|
| 176 |
+
# This is quite a lot of work to check for a very specific case. But
|
| 177 |
+
# the problem is, that case is potentially quite common - projects that
|
| 178 |
+
# adopted PEP 518 early for the ability to specify requirements to
|
| 179 |
+
# execute setup.py, but never considered needing to mention the build
|
| 180 |
+
# tools themselves. The original PEP 518 code had a similar check (but
|
| 181 |
+
# implemented in a different way).
|
| 182 |
+
backend = "setuptools.build_meta:__legacy__"
|
| 183 |
+
check = ["setuptools>=40.8.0"]
|
| 184 |
+
|
| 185 |
+
return BuildSystemDetails(requires, backend, check, backend_path)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/req/__init__.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
import logging
|
| 3 |
+
from dataclasses import dataclass
|
| 4 |
+
from typing import Generator, List, Optional, Sequence, Tuple
|
| 5 |
+
|
| 6 |
+
from pip._internal.utils.logging import indent_log
|
| 7 |
+
|
| 8 |
+
from .req_file import parse_requirements
|
| 9 |
+
from .req_install import InstallRequirement
|
| 10 |
+
from .req_set import RequirementSet
|
| 11 |
+
|
| 12 |
+
__all__ = [
|
| 13 |
+
"RequirementSet",
|
| 14 |
+
"InstallRequirement",
|
| 15 |
+
"parse_requirements",
|
| 16 |
+
"install_given_reqs",
|
| 17 |
+
]
|
| 18 |
+
|
| 19 |
+
logger = logging.getLogger(__name__)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
@dataclass(frozen=True)
|
| 23 |
+
class InstallationResult:
|
| 24 |
+
name: str
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def _validate_requirements(
|
| 28 |
+
requirements: List[InstallRequirement],
|
| 29 |
+
) -> Generator[Tuple[str, InstallRequirement], None, None]:
|
| 30 |
+
for req in requirements:
|
| 31 |
+
assert req.name, f"invalid to-be-installed requirement: {req}"
|
| 32 |
+
yield req.name, req
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def install_given_reqs(
|
| 36 |
+
requirements: List[InstallRequirement],
|
| 37 |
+
global_options: Sequence[str],
|
| 38 |
+
root: Optional[str],
|
| 39 |
+
home: Optional[str],
|
| 40 |
+
prefix: Optional[str],
|
| 41 |
+
warn_script_location: bool,
|
| 42 |
+
use_user_site: bool,
|
| 43 |
+
pycompile: bool,
|
| 44 |
+
) -> List[InstallationResult]:
|
| 45 |
+
"""
|
| 46 |
+
Install everything in the given list.
|
| 47 |
+
|
| 48 |
+
(to be called after having downloaded and unpacked the packages)
|
| 49 |
+
"""
|
| 50 |
+
to_install = collections.OrderedDict(_validate_requirements(requirements))
|
| 51 |
+
|
| 52 |
+
if to_install:
|
| 53 |
+
logger.info(
|
| 54 |
+
"Installing collected packages: %s",
|
| 55 |
+
", ".join(to_install.keys()),
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
installed = []
|
| 59 |
+
|
| 60 |
+
with indent_log():
|
| 61 |
+
for req_name, requirement in to_install.items():
|
| 62 |
+
if requirement.should_reinstall:
|
| 63 |
+
logger.info("Attempting uninstall: %s", req_name)
|
| 64 |
+
with indent_log():
|
| 65 |
+
uninstalled_pathset = requirement.uninstall(auto_confirm=True)
|
| 66 |
+
else:
|
| 67 |
+
uninstalled_pathset = None
|
| 68 |
+
|
| 69 |
+
try:
|
| 70 |
+
requirement.install(
|
| 71 |
+
global_options,
|
| 72 |
+
root=root,
|
| 73 |
+
home=home,
|
| 74 |
+
prefix=prefix,
|
| 75 |
+
warn_script_location=warn_script_location,
|
| 76 |
+
use_user_site=use_user_site,
|
| 77 |
+
pycompile=pycompile,
|
| 78 |
+
)
|
| 79 |
+
except Exception:
|
| 80 |
+
# if install did not succeed, rollback previous uninstall
|
| 81 |
+
if uninstalled_pathset and not requirement.install_succeeded:
|
| 82 |
+
uninstalled_pathset.rollback()
|
| 83 |
+
raise
|
| 84 |
+
else:
|
| 85 |
+
if uninstalled_pathset and requirement.install_succeeded:
|
| 86 |
+
uninstalled_pathset.commit()
|
| 87 |
+
|
| 88 |
+
installed.append(InstallationResult(req_name))
|
| 89 |
+
|
| 90 |
+
return installed
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_install.cpython-310.pyc
ADDED
|
Binary file (24.8 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/req/req_install.py
ADDED
|
@@ -0,0 +1,934 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import logging
|
| 3 |
+
import os
|
| 4 |
+
import shutil
|
| 5 |
+
import sys
|
| 6 |
+
import uuid
|
| 7 |
+
import zipfile
|
| 8 |
+
from optparse import Values
|
| 9 |
+
from pathlib import Path
|
| 10 |
+
from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union
|
| 11 |
+
|
| 12 |
+
from pip._vendor.packaging.markers import Marker
|
| 13 |
+
from pip._vendor.packaging.requirements import Requirement
|
| 14 |
+
from pip._vendor.packaging.specifiers import SpecifierSet
|
| 15 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 16 |
+
from pip._vendor.packaging.version import Version
|
| 17 |
+
from pip._vendor.packaging.version import parse as parse_version
|
| 18 |
+
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
|
| 19 |
+
|
| 20 |
+
from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
|
| 21 |
+
from pip._internal.exceptions import InstallationError, PreviousBuildDirError
|
| 22 |
+
from pip._internal.locations import get_scheme
|
| 23 |
+
from pip._internal.metadata import (
|
| 24 |
+
BaseDistribution,
|
| 25 |
+
get_default_environment,
|
| 26 |
+
get_directory_distribution,
|
| 27 |
+
get_wheel_distribution,
|
| 28 |
+
)
|
| 29 |
+
from pip._internal.metadata.base import FilesystemWheel
|
| 30 |
+
from pip._internal.models.direct_url import DirectUrl
|
| 31 |
+
from pip._internal.models.link import Link
|
| 32 |
+
from pip._internal.operations.build.metadata import generate_metadata
|
| 33 |
+
from pip._internal.operations.build.metadata_editable import generate_editable_metadata
|
| 34 |
+
from pip._internal.operations.build.metadata_legacy import (
|
| 35 |
+
generate_metadata as generate_metadata_legacy,
|
| 36 |
+
)
|
| 37 |
+
from pip._internal.operations.install.editable_legacy import (
|
| 38 |
+
install_editable as install_editable_legacy,
|
| 39 |
+
)
|
| 40 |
+
from pip._internal.operations.install.wheel import install_wheel
|
| 41 |
+
from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
|
| 42 |
+
from pip._internal.req.req_uninstall import UninstallPathSet
|
| 43 |
+
from pip._internal.utils.deprecation import deprecated
|
| 44 |
+
from pip._internal.utils.hashes import Hashes
|
| 45 |
+
from pip._internal.utils.misc import (
|
| 46 |
+
ConfiguredBuildBackendHookCaller,
|
| 47 |
+
ask_path_exists,
|
| 48 |
+
backup_dir,
|
| 49 |
+
display_path,
|
| 50 |
+
hide_url,
|
| 51 |
+
is_installable_dir,
|
| 52 |
+
redact_auth_from_requirement,
|
| 53 |
+
redact_auth_from_url,
|
| 54 |
+
)
|
| 55 |
+
from pip._internal.utils.packaging import get_requirement
|
| 56 |
+
from pip._internal.utils.subprocess import runner_with_spinner_message
|
| 57 |
+
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
| 58 |
+
from pip._internal.utils.unpacking import unpack_file
|
| 59 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
| 60 |
+
from pip._internal.vcs import vcs
|
| 61 |
+
|
| 62 |
+
logger = logging.getLogger(__name__)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
class InstallRequirement:
|
| 66 |
+
"""
|
| 67 |
+
Represents something that may be installed later on, may have information
|
| 68 |
+
about where to fetch the relevant requirement and also contains logic for
|
| 69 |
+
installing the said requirement.
|
| 70 |
+
"""
|
| 71 |
+
|
| 72 |
+
def __init__(
|
| 73 |
+
self,
|
| 74 |
+
req: Optional[Requirement],
|
| 75 |
+
comes_from: Optional[Union[str, "InstallRequirement"]],
|
| 76 |
+
editable: bool = False,
|
| 77 |
+
link: Optional[Link] = None,
|
| 78 |
+
markers: Optional[Marker] = None,
|
| 79 |
+
use_pep517: Optional[bool] = None,
|
| 80 |
+
isolated: bool = False,
|
| 81 |
+
*,
|
| 82 |
+
global_options: Optional[List[str]] = None,
|
| 83 |
+
hash_options: Optional[Dict[str, List[str]]] = None,
|
| 84 |
+
config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
|
| 85 |
+
constraint: bool = False,
|
| 86 |
+
extras: Collection[str] = (),
|
| 87 |
+
user_supplied: bool = False,
|
| 88 |
+
permit_editable_wheels: bool = False,
|
| 89 |
+
) -> None:
|
| 90 |
+
assert req is None or isinstance(req, Requirement), req
|
| 91 |
+
self.req = req
|
| 92 |
+
self.comes_from = comes_from
|
| 93 |
+
self.constraint = constraint
|
| 94 |
+
self.editable = editable
|
| 95 |
+
self.permit_editable_wheels = permit_editable_wheels
|
| 96 |
+
|
| 97 |
+
# source_dir is the local directory where the linked requirement is
|
| 98 |
+
# located, or unpacked. In case unpacking is needed, creating and
|
| 99 |
+
# populating source_dir is done by the RequirementPreparer. Note this
|
| 100 |
+
# is not necessarily the directory where pyproject.toml or setup.py is
|
| 101 |
+
# located - that one is obtained via unpacked_source_directory.
|
| 102 |
+
self.source_dir: Optional[str] = None
|
| 103 |
+
if self.editable:
|
| 104 |
+
assert link
|
| 105 |
+
if link.is_file:
|
| 106 |
+
self.source_dir = os.path.normpath(os.path.abspath(link.file_path))
|
| 107 |
+
|
| 108 |
+
# original_link is the direct URL that was provided by the user for the
|
| 109 |
+
# requirement, either directly or via a constraints file.
|
| 110 |
+
if link is None and req and req.url:
|
| 111 |
+
# PEP 508 URL requirement
|
| 112 |
+
link = Link(req.url)
|
| 113 |
+
self.link = self.original_link = link
|
| 114 |
+
|
| 115 |
+
# When this InstallRequirement is a wheel obtained from the cache of locally
|
| 116 |
+
# built wheels, this is the source link corresponding to the cache entry, which
|
| 117 |
+
# was used to download and build the cached wheel.
|
| 118 |
+
self.cached_wheel_source_link: Optional[Link] = None
|
| 119 |
+
|
| 120 |
+
# Information about the location of the artifact that was downloaded . This
|
| 121 |
+
# property is guaranteed to be set in resolver results.
|
| 122 |
+
self.download_info: Optional[DirectUrl] = None
|
| 123 |
+
|
| 124 |
+
# Path to any downloaded or already-existing package.
|
| 125 |
+
self.local_file_path: Optional[str] = None
|
| 126 |
+
if self.link and self.link.is_file:
|
| 127 |
+
self.local_file_path = self.link.file_path
|
| 128 |
+
|
| 129 |
+
if extras:
|
| 130 |
+
self.extras = extras
|
| 131 |
+
elif req:
|
| 132 |
+
self.extras = req.extras
|
| 133 |
+
else:
|
| 134 |
+
self.extras = set()
|
| 135 |
+
if markers is None and req:
|
| 136 |
+
markers = req.marker
|
| 137 |
+
self.markers = markers
|
| 138 |
+
|
| 139 |
+
# This holds the Distribution object if this requirement is already installed.
|
| 140 |
+
self.satisfied_by: Optional[BaseDistribution] = None
|
| 141 |
+
# Whether the installation process should try to uninstall an existing
|
| 142 |
+
# distribution before installing this requirement.
|
| 143 |
+
self.should_reinstall = False
|
| 144 |
+
# Temporary build location
|
| 145 |
+
self._temp_build_dir: Optional[TempDirectory] = None
|
| 146 |
+
# Set to True after successful installation
|
| 147 |
+
self.install_succeeded: Optional[bool] = None
|
| 148 |
+
# Supplied options
|
| 149 |
+
self.global_options = global_options if global_options else []
|
| 150 |
+
self.hash_options = hash_options if hash_options else {}
|
| 151 |
+
self.config_settings = config_settings
|
| 152 |
+
# Set to True after successful preparation of this requirement
|
| 153 |
+
self.prepared = False
|
| 154 |
+
# User supplied requirement are explicitly requested for installation
|
| 155 |
+
# by the user via CLI arguments or requirements files, as opposed to,
|
| 156 |
+
# e.g. dependencies, extras or constraints.
|
| 157 |
+
self.user_supplied = user_supplied
|
| 158 |
+
|
| 159 |
+
self.isolated = isolated
|
| 160 |
+
self.build_env: BuildEnvironment = NoOpBuildEnvironment()
|
| 161 |
+
|
| 162 |
+
# For PEP 517, the directory where we request the project metadata
|
| 163 |
+
# gets stored. We need this to pass to build_wheel, so the backend
|
| 164 |
+
# can ensure that the wheel matches the metadata (see the PEP for
|
| 165 |
+
# details).
|
| 166 |
+
self.metadata_directory: Optional[str] = None
|
| 167 |
+
|
| 168 |
+
# The static build requirements (from pyproject.toml)
|
| 169 |
+
self.pyproject_requires: Optional[List[str]] = None
|
| 170 |
+
|
| 171 |
+
# Build requirements that we will check are available
|
| 172 |
+
self.requirements_to_check: List[str] = []
|
| 173 |
+
|
| 174 |
+
# The PEP 517 backend we should use to build the project
|
| 175 |
+
self.pep517_backend: Optional[BuildBackendHookCaller] = None
|
| 176 |
+
|
| 177 |
+
# Are we using PEP 517 for this requirement?
|
| 178 |
+
# After pyproject.toml has been loaded, the only valid values are True
|
| 179 |
+
# and False. Before loading, None is valid (meaning "use the default").
|
| 180 |
+
# Setting an explicit value before loading pyproject.toml is supported,
|
| 181 |
+
# but after loading this flag should be treated as read only.
|
| 182 |
+
self.use_pep517 = use_pep517
|
| 183 |
+
|
| 184 |
+
# If config settings are provided, enforce PEP 517.
|
| 185 |
+
if self.config_settings:
|
| 186 |
+
if self.use_pep517 is False:
|
| 187 |
+
logger.warning(
|
| 188 |
+
"--no-use-pep517 ignored for %s "
|
| 189 |
+
"because --config-settings are specified.",
|
| 190 |
+
self,
|
| 191 |
+
)
|
| 192 |
+
self.use_pep517 = True
|
| 193 |
+
|
| 194 |
+
# This requirement needs more preparation before it can be built
|
| 195 |
+
self.needs_more_preparation = False
|
| 196 |
+
|
| 197 |
+
# This requirement needs to be unpacked before it can be installed.
|
| 198 |
+
self._archive_source: Optional[Path] = None
|
| 199 |
+
|
| 200 |
+
def __str__(self) -> str:
|
| 201 |
+
if self.req:
|
| 202 |
+
s = redact_auth_from_requirement(self.req)
|
| 203 |
+
if self.link:
|
| 204 |
+
s += f" from {redact_auth_from_url(self.link.url)}"
|
| 205 |
+
elif self.link:
|
| 206 |
+
s = redact_auth_from_url(self.link.url)
|
| 207 |
+
else:
|
| 208 |
+
s = "<InstallRequirement>"
|
| 209 |
+
if self.satisfied_by is not None:
|
| 210 |
+
if self.satisfied_by.location is not None:
|
| 211 |
+
location = display_path(self.satisfied_by.location)
|
| 212 |
+
else:
|
| 213 |
+
location = "<memory>"
|
| 214 |
+
s += f" in {location}"
|
| 215 |
+
if self.comes_from:
|
| 216 |
+
if isinstance(self.comes_from, str):
|
| 217 |
+
comes_from: Optional[str] = self.comes_from
|
| 218 |
+
else:
|
| 219 |
+
comes_from = self.comes_from.from_path()
|
| 220 |
+
if comes_from:
|
| 221 |
+
s += f" (from {comes_from})"
|
| 222 |
+
return s
|
| 223 |
+
|
| 224 |
+
def __repr__(self) -> str:
|
| 225 |
+
return (
|
| 226 |
+
f"<{self.__class__.__name__} object: "
|
| 227 |
+
f"{str(self)} editable={self.editable!r}>"
|
| 228 |
+
)
|
| 229 |
+
|
| 230 |
+
def format_debug(self) -> str:
|
| 231 |
+
"""An un-tested helper for getting state, for debugging."""
|
| 232 |
+
attributes = vars(self)
|
| 233 |
+
names = sorted(attributes)
|
| 234 |
+
|
| 235 |
+
state = (f"{attr}={attributes[attr]!r}" for attr in sorted(names))
|
| 236 |
+
return "<{name} object: {{{state}}}>".format(
|
| 237 |
+
name=self.__class__.__name__,
|
| 238 |
+
state=", ".join(state),
|
| 239 |
+
)
|
| 240 |
+
|
| 241 |
+
# Things that are valid for all kinds of requirements?
|
| 242 |
+
@property
|
| 243 |
+
def name(self) -> Optional[str]:
|
| 244 |
+
if self.req is None:
|
| 245 |
+
return None
|
| 246 |
+
return self.req.name
|
| 247 |
+
|
| 248 |
+
@functools.cached_property
|
| 249 |
+
def supports_pyproject_editable(self) -> bool:
|
| 250 |
+
if not self.use_pep517:
|
| 251 |
+
return False
|
| 252 |
+
assert self.pep517_backend
|
| 253 |
+
with self.build_env:
|
| 254 |
+
runner = runner_with_spinner_message(
|
| 255 |
+
"Checking if build backend supports build_editable"
|
| 256 |
+
)
|
| 257 |
+
with self.pep517_backend.subprocess_runner(runner):
|
| 258 |
+
return "build_editable" in self.pep517_backend._supported_features()
|
| 259 |
+
|
| 260 |
+
@property
|
| 261 |
+
def specifier(self) -> SpecifierSet:
|
| 262 |
+
assert self.req is not None
|
| 263 |
+
return self.req.specifier
|
| 264 |
+
|
| 265 |
+
@property
|
| 266 |
+
def is_direct(self) -> bool:
|
| 267 |
+
"""Whether this requirement was specified as a direct URL."""
|
| 268 |
+
return self.original_link is not None
|
| 269 |
+
|
| 270 |
+
@property
|
| 271 |
+
def is_pinned(self) -> bool:
|
| 272 |
+
"""Return whether I am pinned to an exact version.
|
| 273 |
+
|
| 274 |
+
For example, some-package==1.2 is pinned; some-package>1.2 is not.
|
| 275 |
+
"""
|
| 276 |
+
assert self.req is not None
|
| 277 |
+
specifiers = self.req.specifier
|
| 278 |
+
return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="}
|
| 279 |
+
|
| 280 |
+
def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
|
| 281 |
+
if not extras_requested:
|
| 282 |
+
# Provide an extra to safely evaluate the markers
|
| 283 |
+
# without matching any extra
|
| 284 |
+
extras_requested = ("",)
|
| 285 |
+
if self.markers is not None:
|
| 286 |
+
return any(
|
| 287 |
+
self.markers.evaluate({"extra": extra}) for extra in extras_requested
|
| 288 |
+
)
|
| 289 |
+
else:
|
| 290 |
+
return True
|
| 291 |
+
|
| 292 |
+
@property
|
| 293 |
+
def has_hash_options(self) -> bool:
|
| 294 |
+
"""Return whether any known-good hashes are specified as options.
|
| 295 |
+
|
| 296 |
+
These activate --require-hashes mode; hashes specified as part of a
|
| 297 |
+
URL do not.
|
| 298 |
+
|
| 299 |
+
"""
|
| 300 |
+
return bool(self.hash_options)
|
| 301 |
+
|
| 302 |
+
def hashes(self, trust_internet: bool = True) -> Hashes:
|
| 303 |
+
"""Return a hash-comparer that considers my option- and URL-based
|
| 304 |
+
hashes to be known-good.
|
| 305 |
+
|
| 306 |
+
Hashes in URLs--ones embedded in the requirements file, not ones
|
| 307 |
+
downloaded from an index server--are almost peers with ones from
|
| 308 |
+
flags. They satisfy --require-hashes (whether it was implicitly or
|
| 309 |
+
explicitly activated) but do not activate it. md5 and sha224 are not
|
| 310 |
+
allowed in flags, which should nudge people toward good algos. We
|
| 311 |
+
always OR all hashes together, even ones from URLs.
|
| 312 |
+
|
| 313 |
+
:param trust_internet: Whether to trust URL-based (#md5=...) hashes
|
| 314 |
+
downloaded from the internet, as by populate_link()
|
| 315 |
+
|
| 316 |
+
"""
|
| 317 |
+
good_hashes = self.hash_options.copy()
|
| 318 |
+
if trust_internet:
|
| 319 |
+
link = self.link
|
| 320 |
+
elif self.is_direct and self.user_supplied:
|
| 321 |
+
link = self.original_link
|
| 322 |
+
else:
|
| 323 |
+
link = None
|
| 324 |
+
if link and link.hash:
|
| 325 |
+
assert link.hash_name is not None
|
| 326 |
+
good_hashes.setdefault(link.hash_name, []).append(link.hash)
|
| 327 |
+
return Hashes(good_hashes)
|
| 328 |
+
|
| 329 |
+
def from_path(self) -> Optional[str]:
|
| 330 |
+
"""Format a nice indicator to show where this "comes from" """
|
| 331 |
+
if self.req is None:
|
| 332 |
+
return None
|
| 333 |
+
s = str(self.req)
|
| 334 |
+
if self.comes_from:
|
| 335 |
+
comes_from: Optional[str]
|
| 336 |
+
if isinstance(self.comes_from, str):
|
| 337 |
+
comes_from = self.comes_from
|
| 338 |
+
else:
|
| 339 |
+
comes_from = self.comes_from.from_path()
|
| 340 |
+
if comes_from:
|
| 341 |
+
s += "->" + comes_from
|
| 342 |
+
return s
|
| 343 |
+
|
| 344 |
+
def ensure_build_location(
|
| 345 |
+
self, build_dir: str, autodelete: bool, parallel_builds: bool
|
| 346 |
+
) -> str:
|
| 347 |
+
assert build_dir is not None
|
| 348 |
+
if self._temp_build_dir is not None:
|
| 349 |
+
assert self._temp_build_dir.path
|
| 350 |
+
return self._temp_build_dir.path
|
| 351 |
+
if self.req is None:
|
| 352 |
+
# Some systems have /tmp as a symlink which confuses custom
|
| 353 |
+
# builds (such as numpy). Thus, we ensure that the real path
|
| 354 |
+
# is returned.
|
| 355 |
+
self._temp_build_dir = TempDirectory(
|
| 356 |
+
kind=tempdir_kinds.REQ_BUILD, globally_managed=True
|
| 357 |
+
)
|
| 358 |
+
|
| 359 |
+
return self._temp_build_dir.path
|
| 360 |
+
|
| 361 |
+
# This is the only remaining place where we manually determine the path
|
| 362 |
+
# for the temporary directory. It is only needed for editables where
|
| 363 |
+
# it is the value of the --src option.
|
| 364 |
+
|
| 365 |
+
# When parallel builds are enabled, add a UUID to the build directory
|
| 366 |
+
# name so multiple builds do not interfere with each other.
|
| 367 |
+
dir_name: str = canonicalize_name(self.req.name)
|
| 368 |
+
if parallel_builds:
|
| 369 |
+
dir_name = f"{dir_name}_{uuid.uuid4().hex}"
|
| 370 |
+
|
| 371 |
+
# FIXME: Is there a better place to create the build_dir? (hg and bzr
|
| 372 |
+
# need this)
|
| 373 |
+
if not os.path.exists(build_dir):
|
| 374 |
+
logger.debug("Creating directory %s", build_dir)
|
| 375 |
+
os.makedirs(build_dir)
|
| 376 |
+
actual_build_dir = os.path.join(build_dir, dir_name)
|
| 377 |
+
# `None` indicates that we respect the globally-configured deletion
|
| 378 |
+
# settings, which is what we actually want when auto-deleting.
|
| 379 |
+
delete_arg = None if autodelete else False
|
| 380 |
+
return TempDirectory(
|
| 381 |
+
path=actual_build_dir,
|
| 382 |
+
delete=delete_arg,
|
| 383 |
+
kind=tempdir_kinds.REQ_BUILD,
|
| 384 |
+
globally_managed=True,
|
| 385 |
+
).path
|
| 386 |
+
|
| 387 |
+
def _set_requirement(self) -> None:
|
| 388 |
+
"""Set requirement after generating metadata."""
|
| 389 |
+
assert self.req is None
|
| 390 |
+
assert self.metadata is not None
|
| 391 |
+
assert self.source_dir is not None
|
| 392 |
+
|
| 393 |
+
# Construct a Requirement object from the generated metadata
|
| 394 |
+
if isinstance(parse_version(self.metadata["Version"]), Version):
|
| 395 |
+
op = "=="
|
| 396 |
+
else:
|
| 397 |
+
op = "==="
|
| 398 |
+
|
| 399 |
+
self.req = get_requirement(
|
| 400 |
+
"".join(
|
| 401 |
+
[
|
| 402 |
+
self.metadata["Name"],
|
| 403 |
+
op,
|
| 404 |
+
self.metadata["Version"],
|
| 405 |
+
]
|
| 406 |
+
)
|
| 407 |
+
)
|
| 408 |
+
|
| 409 |
+
def warn_on_mismatching_name(self) -> None:
|
| 410 |
+
assert self.req is not None
|
| 411 |
+
metadata_name = canonicalize_name(self.metadata["Name"])
|
| 412 |
+
if canonicalize_name(self.req.name) == metadata_name:
|
| 413 |
+
# Everything is fine.
|
| 414 |
+
return
|
| 415 |
+
|
| 416 |
+
# If we're here, there's a mismatch. Log a warning about it.
|
| 417 |
+
logger.warning(
|
| 418 |
+
"Generating metadata for package %s "
|
| 419 |
+
"produced metadata for project name %s. Fix your "
|
| 420 |
+
"#egg=%s fragments.",
|
| 421 |
+
self.name,
|
| 422 |
+
metadata_name,
|
| 423 |
+
self.name,
|
| 424 |
+
)
|
| 425 |
+
self.req = get_requirement(metadata_name)
|
| 426 |
+
|
| 427 |
+
def check_if_exists(self, use_user_site: bool) -> None:
|
| 428 |
+
"""Find an installed distribution that satisfies or conflicts
|
| 429 |
+
with this requirement, and set self.satisfied_by or
|
| 430 |
+
self.should_reinstall appropriately.
|
| 431 |
+
"""
|
| 432 |
+
if self.req is None:
|
| 433 |
+
return
|
| 434 |
+
existing_dist = get_default_environment().get_distribution(self.req.name)
|
| 435 |
+
if not existing_dist:
|
| 436 |
+
return
|
| 437 |
+
|
| 438 |
+
version_compatible = self.req.specifier.contains(
|
| 439 |
+
existing_dist.version,
|
| 440 |
+
prereleases=True,
|
| 441 |
+
)
|
| 442 |
+
if not version_compatible:
|
| 443 |
+
self.satisfied_by = None
|
| 444 |
+
if use_user_site:
|
| 445 |
+
if existing_dist.in_usersite:
|
| 446 |
+
self.should_reinstall = True
|
| 447 |
+
elif running_under_virtualenv() and existing_dist.in_site_packages:
|
| 448 |
+
raise InstallationError(
|
| 449 |
+
f"Will not install to the user site because it will "
|
| 450 |
+
f"lack sys.path precedence to {existing_dist.raw_name} "
|
| 451 |
+
f"in {existing_dist.location}"
|
| 452 |
+
)
|
| 453 |
+
else:
|
| 454 |
+
self.should_reinstall = True
|
| 455 |
+
else:
|
| 456 |
+
if self.editable:
|
| 457 |
+
self.should_reinstall = True
|
| 458 |
+
# when installing editables, nothing pre-existing should ever
|
| 459 |
+
# satisfy
|
| 460 |
+
self.satisfied_by = None
|
| 461 |
+
else:
|
| 462 |
+
self.satisfied_by = existing_dist
|
| 463 |
+
|
| 464 |
+
# Things valid for wheels
|
| 465 |
+
@property
|
| 466 |
+
def is_wheel(self) -> bool:
|
| 467 |
+
if not self.link:
|
| 468 |
+
return False
|
| 469 |
+
return self.link.is_wheel
|
| 470 |
+
|
| 471 |
+
@property
|
| 472 |
+
def is_wheel_from_cache(self) -> bool:
|
| 473 |
+
# When True, it means that this InstallRequirement is a local wheel file in the
|
| 474 |
+
# cache of locally built wheels.
|
| 475 |
+
return self.cached_wheel_source_link is not None
|
| 476 |
+
|
| 477 |
+
# Things valid for sdists
|
| 478 |
+
@property
|
| 479 |
+
def unpacked_source_directory(self) -> str:
|
| 480 |
+
assert self.source_dir, f"No source dir for {self}"
|
| 481 |
+
return os.path.join(
|
| 482 |
+
self.source_dir, self.link and self.link.subdirectory_fragment or ""
|
| 483 |
+
)
|
| 484 |
+
|
| 485 |
+
@property
|
| 486 |
+
def setup_py_path(self) -> str:
|
| 487 |
+
assert self.source_dir, f"No source dir for {self}"
|
| 488 |
+
setup_py = os.path.join(self.unpacked_source_directory, "setup.py")
|
| 489 |
+
|
| 490 |
+
return setup_py
|
| 491 |
+
|
| 492 |
+
@property
|
| 493 |
+
def setup_cfg_path(self) -> str:
|
| 494 |
+
assert self.source_dir, f"No source dir for {self}"
|
| 495 |
+
setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg")
|
| 496 |
+
|
| 497 |
+
return setup_cfg
|
| 498 |
+
|
| 499 |
+
@property
|
| 500 |
+
def pyproject_toml_path(self) -> str:
|
| 501 |
+
assert self.source_dir, f"No source dir for {self}"
|
| 502 |
+
return make_pyproject_path(self.unpacked_source_directory)
|
| 503 |
+
|
| 504 |
+
def load_pyproject_toml(self) -> None:
|
| 505 |
+
"""Load the pyproject.toml file.
|
| 506 |
+
|
| 507 |
+
After calling this routine, all of the attributes related to PEP 517
|
| 508 |
+
processing for this requirement have been set. In particular, the
|
| 509 |
+
use_pep517 attribute can be used to determine whether we should
|
| 510 |
+
follow the PEP 517 or legacy (setup.py) code path.
|
| 511 |
+
"""
|
| 512 |
+
pyproject_toml_data = load_pyproject_toml(
|
| 513 |
+
self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self)
|
| 514 |
+
)
|
| 515 |
+
|
| 516 |
+
if pyproject_toml_data is None:
|
| 517 |
+
assert not self.config_settings
|
| 518 |
+
self.use_pep517 = False
|
| 519 |
+
return
|
| 520 |
+
|
| 521 |
+
self.use_pep517 = True
|
| 522 |
+
requires, backend, check, backend_path = pyproject_toml_data
|
| 523 |
+
self.requirements_to_check = check
|
| 524 |
+
self.pyproject_requires = requires
|
| 525 |
+
self.pep517_backend = ConfiguredBuildBackendHookCaller(
|
| 526 |
+
self,
|
| 527 |
+
self.unpacked_source_directory,
|
| 528 |
+
backend,
|
| 529 |
+
backend_path=backend_path,
|
| 530 |
+
)
|
| 531 |
+
|
| 532 |
+
def isolated_editable_sanity_check(self) -> None:
|
| 533 |
+
"""Check that an editable requirement if valid for use with PEP 517/518.
|
| 534 |
+
|
| 535 |
+
This verifies that an editable that has a pyproject.toml either supports PEP 660
|
| 536 |
+
or as a setup.py or a setup.cfg
|
| 537 |
+
"""
|
| 538 |
+
if (
|
| 539 |
+
self.editable
|
| 540 |
+
and self.use_pep517
|
| 541 |
+
and not self.supports_pyproject_editable
|
| 542 |
+
and not os.path.isfile(self.setup_py_path)
|
| 543 |
+
and not os.path.isfile(self.setup_cfg_path)
|
| 544 |
+
):
|
| 545 |
+
raise InstallationError(
|
| 546 |
+
f"Project {self} has a 'pyproject.toml' and its build "
|
| 547 |
+
f"backend is missing the 'build_editable' hook. Since it does not "
|
| 548 |
+
f"have a 'setup.py' nor a 'setup.cfg', "
|
| 549 |
+
f"it cannot be installed in editable mode. "
|
| 550 |
+
f"Consider using a build backend that supports PEP 660."
|
| 551 |
+
)
|
| 552 |
+
|
| 553 |
+
def prepare_metadata(self) -> None:
|
| 554 |
+
"""Ensure that project metadata is available.
|
| 555 |
+
|
| 556 |
+
Under PEP 517 and PEP 660, call the backend hook to prepare the metadata.
|
| 557 |
+
Under legacy processing, call setup.py egg-info.
|
| 558 |
+
"""
|
| 559 |
+
assert self.source_dir, f"No source dir for {self}"
|
| 560 |
+
details = self.name or f"from {self.link}"
|
| 561 |
+
|
| 562 |
+
if self.use_pep517:
|
| 563 |
+
assert self.pep517_backend is not None
|
| 564 |
+
if (
|
| 565 |
+
self.editable
|
| 566 |
+
and self.permit_editable_wheels
|
| 567 |
+
and self.supports_pyproject_editable
|
| 568 |
+
):
|
| 569 |
+
self.metadata_directory = generate_editable_metadata(
|
| 570 |
+
build_env=self.build_env,
|
| 571 |
+
backend=self.pep517_backend,
|
| 572 |
+
details=details,
|
| 573 |
+
)
|
| 574 |
+
else:
|
| 575 |
+
self.metadata_directory = generate_metadata(
|
| 576 |
+
build_env=self.build_env,
|
| 577 |
+
backend=self.pep517_backend,
|
| 578 |
+
details=details,
|
| 579 |
+
)
|
| 580 |
+
else:
|
| 581 |
+
self.metadata_directory = generate_metadata_legacy(
|
| 582 |
+
build_env=self.build_env,
|
| 583 |
+
setup_py_path=self.setup_py_path,
|
| 584 |
+
source_dir=self.unpacked_source_directory,
|
| 585 |
+
isolated=self.isolated,
|
| 586 |
+
details=details,
|
| 587 |
+
)
|
| 588 |
+
|
| 589 |
+
# Act on the newly generated metadata, based on the name and version.
|
| 590 |
+
if not self.name:
|
| 591 |
+
self._set_requirement()
|
| 592 |
+
else:
|
| 593 |
+
self.warn_on_mismatching_name()
|
| 594 |
+
|
| 595 |
+
self.assert_source_matches_version()
|
| 596 |
+
|
| 597 |
+
@property
|
| 598 |
+
def metadata(self) -> Any:
|
| 599 |
+
if not hasattr(self, "_metadata"):
|
| 600 |
+
self._metadata = self.get_dist().metadata
|
| 601 |
+
|
| 602 |
+
return self._metadata
|
| 603 |
+
|
| 604 |
+
def get_dist(self) -> BaseDistribution:
|
| 605 |
+
if self.metadata_directory:
|
| 606 |
+
return get_directory_distribution(self.metadata_directory)
|
| 607 |
+
elif self.local_file_path and self.is_wheel:
|
| 608 |
+
assert self.req is not None
|
| 609 |
+
return get_wheel_distribution(
|
| 610 |
+
FilesystemWheel(self.local_file_path),
|
| 611 |
+
canonicalize_name(self.req.name),
|
| 612 |
+
)
|
| 613 |
+
raise AssertionError(
|
| 614 |
+
f"InstallRequirement {self} has no metadata directory and no wheel: "
|
| 615 |
+
f"can't make a distribution."
|
| 616 |
+
)
|
| 617 |
+
|
| 618 |
+
def assert_source_matches_version(self) -> None:
|
| 619 |
+
assert self.source_dir, f"No source dir for {self}"
|
| 620 |
+
version = self.metadata["version"]
|
| 621 |
+
if self.req and self.req.specifier and version not in self.req.specifier:
|
| 622 |
+
logger.warning(
|
| 623 |
+
"Requested %s, but installing version %s",
|
| 624 |
+
self,
|
| 625 |
+
version,
|
| 626 |
+
)
|
| 627 |
+
else:
|
| 628 |
+
logger.debug(
|
| 629 |
+
"Source in %s has version %s, which satisfies requirement %s",
|
| 630 |
+
display_path(self.source_dir),
|
| 631 |
+
version,
|
| 632 |
+
self,
|
| 633 |
+
)
|
| 634 |
+
|
| 635 |
+
# For both source distributions and editables
|
| 636 |
+
def ensure_has_source_dir(
|
| 637 |
+
self,
|
| 638 |
+
parent_dir: str,
|
| 639 |
+
autodelete: bool = False,
|
| 640 |
+
parallel_builds: bool = False,
|
| 641 |
+
) -> None:
|
| 642 |
+
"""Ensure that a source_dir is set.
|
| 643 |
+
|
| 644 |
+
This will create a temporary build dir if the name of the requirement
|
| 645 |
+
isn't known yet.
|
| 646 |
+
|
| 647 |
+
:param parent_dir: The ideal pip parent_dir for the source_dir.
|
| 648 |
+
Generally src_dir for editables and build_dir for sdists.
|
| 649 |
+
:return: self.source_dir
|
| 650 |
+
"""
|
| 651 |
+
if self.source_dir is None:
|
| 652 |
+
self.source_dir = self.ensure_build_location(
|
| 653 |
+
parent_dir,
|
| 654 |
+
autodelete=autodelete,
|
| 655 |
+
parallel_builds=parallel_builds,
|
| 656 |
+
)
|
| 657 |
+
|
| 658 |
+
def needs_unpacked_archive(self, archive_source: Path) -> None:
|
| 659 |
+
assert self._archive_source is None
|
| 660 |
+
self._archive_source = archive_source
|
| 661 |
+
|
| 662 |
+
def ensure_pristine_source_checkout(self) -> None:
|
| 663 |
+
"""Ensure the source directory has not yet been built in."""
|
| 664 |
+
assert self.source_dir is not None
|
| 665 |
+
if self._archive_source is not None:
|
| 666 |
+
unpack_file(str(self._archive_source), self.source_dir)
|
| 667 |
+
elif is_installable_dir(self.source_dir):
|
| 668 |
+
# If a checkout exists, it's unwise to keep going.
|
| 669 |
+
# version inconsistencies are logged later, but do not fail
|
| 670 |
+
# the installation.
|
| 671 |
+
raise PreviousBuildDirError(
|
| 672 |
+
f"pip can't proceed with requirements '{self}' due to a "
|
| 673 |
+
f"pre-existing build directory ({self.source_dir}). This is likely "
|
| 674 |
+
"due to a previous installation that failed . pip is "
|
| 675 |
+
"being responsible and not assuming it can delete this. "
|
| 676 |
+
"Please delete it and try again."
|
| 677 |
+
)
|
| 678 |
+
|
| 679 |
+
# For editable installations
|
| 680 |
+
def update_editable(self) -> None:
|
| 681 |
+
if not self.link:
|
| 682 |
+
logger.debug(
|
| 683 |
+
"Cannot update repository at %s; repository location is unknown",
|
| 684 |
+
self.source_dir,
|
| 685 |
+
)
|
| 686 |
+
return
|
| 687 |
+
assert self.editable
|
| 688 |
+
assert self.source_dir
|
| 689 |
+
if self.link.scheme == "file":
|
| 690 |
+
# Static paths don't get updated
|
| 691 |
+
return
|
| 692 |
+
vcs_backend = vcs.get_backend_for_scheme(self.link.scheme)
|
| 693 |
+
# Editable requirements are validated in Requirement constructors.
|
| 694 |
+
# So here, if it's neither a path nor a valid VCS URL, it's a bug.
|
| 695 |
+
assert vcs_backend, f"Unsupported VCS URL {self.link.url}"
|
| 696 |
+
hidden_url = hide_url(self.link.url)
|
| 697 |
+
vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0)
|
| 698 |
+
|
| 699 |
+
# Top-level Actions
|
| 700 |
+
def uninstall(
|
| 701 |
+
self, auto_confirm: bool = False, verbose: bool = False
|
| 702 |
+
) -> Optional[UninstallPathSet]:
|
| 703 |
+
"""
|
| 704 |
+
Uninstall the distribution currently satisfying this requirement.
|
| 705 |
+
|
| 706 |
+
Prompts before removing or modifying files unless
|
| 707 |
+
``auto_confirm`` is True.
|
| 708 |
+
|
| 709 |
+
Refuses to delete or modify files outside of ``sys.prefix`` -
|
| 710 |
+
thus uninstallation within a virtual environment can only
|
| 711 |
+
modify that virtual environment, even if the virtualenv is
|
| 712 |
+
linked to global site-packages.
|
| 713 |
+
|
| 714 |
+
"""
|
| 715 |
+
assert self.req
|
| 716 |
+
dist = get_default_environment().get_distribution(self.req.name)
|
| 717 |
+
if not dist:
|
| 718 |
+
logger.warning("Skipping %s as it is not installed.", self.name)
|
| 719 |
+
return None
|
| 720 |
+
logger.info("Found existing installation: %s", dist)
|
| 721 |
+
|
| 722 |
+
uninstalled_pathset = UninstallPathSet.from_dist(dist)
|
| 723 |
+
uninstalled_pathset.remove(auto_confirm, verbose)
|
| 724 |
+
return uninstalled_pathset
|
| 725 |
+
|
| 726 |
+
def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str:
|
| 727 |
+
def _clean_zip_name(name: str, prefix: str) -> str:
|
| 728 |
+
assert name.startswith(
|
| 729 |
+
prefix + os.path.sep
|
| 730 |
+
), f"name {name!r} doesn't start with prefix {prefix!r}"
|
| 731 |
+
name = name[len(prefix) + 1 :]
|
| 732 |
+
name = name.replace(os.path.sep, "/")
|
| 733 |
+
return name
|
| 734 |
+
|
| 735 |
+
assert self.req is not None
|
| 736 |
+
path = os.path.join(parentdir, path)
|
| 737 |
+
name = _clean_zip_name(path, rootdir)
|
| 738 |
+
return self.req.name + "/" + name
|
| 739 |
+
|
| 740 |
+
def archive(self, build_dir: Optional[str]) -> None:
|
| 741 |
+
"""Saves archive to provided build_dir.
|
| 742 |
+
|
| 743 |
+
Used for saving downloaded VCS requirements as part of `pip download`.
|
| 744 |
+
"""
|
| 745 |
+
assert self.source_dir
|
| 746 |
+
if build_dir is None:
|
| 747 |
+
return
|
| 748 |
+
|
| 749 |
+
create_archive = True
|
| 750 |
+
archive_name = "{}-{}.zip".format(self.name, self.metadata["version"])
|
| 751 |
+
archive_path = os.path.join(build_dir, archive_name)
|
| 752 |
+
|
| 753 |
+
if os.path.exists(archive_path):
|
| 754 |
+
response = ask_path_exists(
|
| 755 |
+
f"The file {display_path(archive_path)} exists. (i)gnore, (w)ipe, "
|
| 756 |
+
"(b)ackup, (a)bort ",
|
| 757 |
+
("i", "w", "b", "a"),
|
| 758 |
+
)
|
| 759 |
+
if response == "i":
|
| 760 |
+
create_archive = False
|
| 761 |
+
elif response == "w":
|
| 762 |
+
logger.warning("Deleting %s", display_path(archive_path))
|
| 763 |
+
os.remove(archive_path)
|
| 764 |
+
elif response == "b":
|
| 765 |
+
dest_file = backup_dir(archive_path)
|
| 766 |
+
logger.warning(
|
| 767 |
+
"Backing up %s to %s",
|
| 768 |
+
display_path(archive_path),
|
| 769 |
+
display_path(dest_file),
|
| 770 |
+
)
|
| 771 |
+
shutil.move(archive_path, dest_file)
|
| 772 |
+
elif response == "a":
|
| 773 |
+
sys.exit(-1)
|
| 774 |
+
|
| 775 |
+
if not create_archive:
|
| 776 |
+
return
|
| 777 |
+
|
| 778 |
+
zip_output = zipfile.ZipFile(
|
| 779 |
+
archive_path,
|
| 780 |
+
"w",
|
| 781 |
+
zipfile.ZIP_DEFLATED,
|
| 782 |
+
allowZip64=True,
|
| 783 |
+
)
|
| 784 |
+
with zip_output:
|
| 785 |
+
dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory))
|
| 786 |
+
for dirpath, dirnames, filenames in os.walk(dir):
|
| 787 |
+
for dirname in dirnames:
|
| 788 |
+
dir_arcname = self._get_archive_name(
|
| 789 |
+
dirname,
|
| 790 |
+
parentdir=dirpath,
|
| 791 |
+
rootdir=dir,
|
| 792 |
+
)
|
| 793 |
+
zipdir = zipfile.ZipInfo(dir_arcname + "/")
|
| 794 |
+
zipdir.external_attr = 0x1ED << 16 # 0o755
|
| 795 |
+
zip_output.writestr(zipdir, "")
|
| 796 |
+
for filename in filenames:
|
| 797 |
+
file_arcname = self._get_archive_name(
|
| 798 |
+
filename,
|
| 799 |
+
parentdir=dirpath,
|
| 800 |
+
rootdir=dir,
|
| 801 |
+
)
|
| 802 |
+
filename = os.path.join(dirpath, filename)
|
| 803 |
+
zip_output.write(filename, file_arcname)
|
| 804 |
+
|
| 805 |
+
logger.info("Saved %s", display_path(archive_path))
|
| 806 |
+
|
| 807 |
+
def install(
|
| 808 |
+
self,
|
| 809 |
+
global_options: Optional[Sequence[str]] = None,
|
| 810 |
+
root: Optional[str] = None,
|
| 811 |
+
home: Optional[str] = None,
|
| 812 |
+
prefix: Optional[str] = None,
|
| 813 |
+
warn_script_location: bool = True,
|
| 814 |
+
use_user_site: bool = False,
|
| 815 |
+
pycompile: bool = True,
|
| 816 |
+
) -> None:
|
| 817 |
+
assert self.req is not None
|
| 818 |
+
scheme = get_scheme(
|
| 819 |
+
self.req.name,
|
| 820 |
+
user=use_user_site,
|
| 821 |
+
home=home,
|
| 822 |
+
root=root,
|
| 823 |
+
isolated=self.isolated,
|
| 824 |
+
prefix=prefix,
|
| 825 |
+
)
|
| 826 |
+
|
| 827 |
+
if self.editable and not self.is_wheel:
|
| 828 |
+
deprecated(
|
| 829 |
+
reason=(
|
| 830 |
+
f"Legacy editable install of {self} (setup.py develop) "
|
| 831 |
+
"is deprecated."
|
| 832 |
+
),
|
| 833 |
+
replacement=(
|
| 834 |
+
"to add a pyproject.toml or enable --use-pep517, "
|
| 835 |
+
"and use setuptools >= 64. "
|
| 836 |
+
"If the resulting installation is not behaving as expected, "
|
| 837 |
+
"try using --config-settings editable_mode=compat. "
|
| 838 |
+
"Please consult the setuptools documentation for more information"
|
| 839 |
+
),
|
| 840 |
+
gone_in="25.1",
|
| 841 |
+
issue=11457,
|
| 842 |
+
)
|
| 843 |
+
if self.config_settings:
|
| 844 |
+
logger.warning(
|
| 845 |
+
"--config-settings ignored for legacy editable install of %s. "
|
| 846 |
+
"Consider upgrading to a version of setuptools "
|
| 847 |
+
"that supports PEP 660 (>= 64).",
|
| 848 |
+
self,
|
| 849 |
+
)
|
| 850 |
+
install_editable_legacy(
|
| 851 |
+
global_options=global_options if global_options is not None else [],
|
| 852 |
+
prefix=prefix,
|
| 853 |
+
home=home,
|
| 854 |
+
use_user_site=use_user_site,
|
| 855 |
+
name=self.req.name,
|
| 856 |
+
setup_py_path=self.setup_py_path,
|
| 857 |
+
isolated=self.isolated,
|
| 858 |
+
build_env=self.build_env,
|
| 859 |
+
unpacked_source_directory=self.unpacked_source_directory,
|
| 860 |
+
)
|
| 861 |
+
self.install_succeeded = True
|
| 862 |
+
return
|
| 863 |
+
|
| 864 |
+
assert self.is_wheel
|
| 865 |
+
assert self.local_file_path
|
| 866 |
+
|
| 867 |
+
install_wheel(
|
| 868 |
+
self.req.name,
|
| 869 |
+
self.local_file_path,
|
| 870 |
+
scheme=scheme,
|
| 871 |
+
req_description=str(self.req),
|
| 872 |
+
pycompile=pycompile,
|
| 873 |
+
warn_script_location=warn_script_location,
|
| 874 |
+
direct_url=self.download_info if self.is_direct else None,
|
| 875 |
+
requested=self.user_supplied,
|
| 876 |
+
)
|
| 877 |
+
self.install_succeeded = True
|
| 878 |
+
|
| 879 |
+
|
| 880 |
+
def check_invalid_constraint_type(req: InstallRequirement) -> str:
|
| 881 |
+
# Check for unsupported forms
|
| 882 |
+
problem = ""
|
| 883 |
+
if not req.name:
|
| 884 |
+
problem = "Unnamed requirements are not allowed as constraints"
|
| 885 |
+
elif req.editable:
|
| 886 |
+
problem = "Editable requirements are not allowed as constraints"
|
| 887 |
+
elif req.extras:
|
| 888 |
+
problem = "Constraints cannot have extras"
|
| 889 |
+
|
| 890 |
+
if problem:
|
| 891 |
+
deprecated(
|
| 892 |
+
reason=(
|
| 893 |
+
"Constraints are only allowed to take the form of a package "
|
| 894 |
+
"name and a version specifier. Other forms were originally "
|
| 895 |
+
"permitted as an accident of the implementation, but were "
|
| 896 |
+
"undocumented. The new implementation of the resolver no "
|
| 897 |
+
"longer supports these forms."
|
| 898 |
+
),
|
| 899 |
+
replacement="replacing the constraint with a requirement",
|
| 900 |
+
# No plan yet for when the new resolver becomes default
|
| 901 |
+
gone_in=None,
|
| 902 |
+
issue=8210,
|
| 903 |
+
)
|
| 904 |
+
|
| 905 |
+
return problem
|
| 906 |
+
|
| 907 |
+
|
| 908 |
+
def _has_option(options: Values, reqs: List[InstallRequirement], option: str) -> bool:
|
| 909 |
+
if getattr(options, option, None):
|
| 910 |
+
return True
|
| 911 |
+
for req in reqs:
|
| 912 |
+
if getattr(req, option, None):
|
| 913 |
+
return True
|
| 914 |
+
return False
|
| 915 |
+
|
| 916 |
+
|
| 917 |
+
def check_legacy_setup_py_options(
|
| 918 |
+
options: Values,
|
| 919 |
+
reqs: List[InstallRequirement],
|
| 920 |
+
) -> None:
|
| 921 |
+
has_build_options = _has_option(options, reqs, "build_options")
|
| 922 |
+
has_global_options = _has_option(options, reqs, "global_options")
|
| 923 |
+
if has_build_options or has_global_options:
|
| 924 |
+
deprecated(
|
| 925 |
+
reason="--build-option and --global-option are deprecated.",
|
| 926 |
+
issue=11859,
|
| 927 |
+
replacement="to use --config-settings",
|
| 928 |
+
gone_in=None,
|
| 929 |
+
)
|
| 930 |
+
logger.warning(
|
| 931 |
+
"Implying --no-binary=:all: due to the presence of "
|
| 932 |
+
"--build-option / --global-option. "
|
| 933 |
+
)
|
| 934 |
+
options.format_control.disallow_binaries()
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/req/req_set.py
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from collections import OrderedDict
|
| 3 |
+
from typing import Dict, List
|
| 4 |
+
|
| 5 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 6 |
+
|
| 7 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 8 |
+
|
| 9 |
+
logger = logging.getLogger(__name__)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class RequirementSet:
|
| 13 |
+
def __init__(self, check_supported_wheels: bool = True) -> None:
|
| 14 |
+
"""Create a RequirementSet."""
|
| 15 |
+
|
| 16 |
+
self.requirements: Dict[str, InstallRequirement] = OrderedDict()
|
| 17 |
+
self.check_supported_wheels = check_supported_wheels
|
| 18 |
+
|
| 19 |
+
self.unnamed_requirements: List[InstallRequirement] = []
|
| 20 |
+
|
| 21 |
+
def __str__(self) -> str:
|
| 22 |
+
requirements = sorted(
|
| 23 |
+
(req for req in self.requirements.values() if not req.comes_from),
|
| 24 |
+
key=lambda req: canonicalize_name(req.name or ""),
|
| 25 |
+
)
|
| 26 |
+
return " ".join(str(req.req) for req in requirements)
|
| 27 |
+
|
| 28 |
+
def __repr__(self) -> str:
|
| 29 |
+
requirements = sorted(
|
| 30 |
+
self.requirements.values(),
|
| 31 |
+
key=lambda req: canonicalize_name(req.name or ""),
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
format_string = "<{classname} object; {count} requirement(s): {reqs}>"
|
| 35 |
+
return format_string.format(
|
| 36 |
+
classname=self.__class__.__name__,
|
| 37 |
+
count=len(requirements),
|
| 38 |
+
reqs=", ".join(str(req.req) for req in requirements),
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
def add_unnamed_requirement(self, install_req: InstallRequirement) -> None:
|
| 42 |
+
assert not install_req.name
|
| 43 |
+
self.unnamed_requirements.append(install_req)
|
| 44 |
+
|
| 45 |
+
def add_named_requirement(self, install_req: InstallRequirement) -> None:
|
| 46 |
+
assert install_req.name
|
| 47 |
+
|
| 48 |
+
project_name = canonicalize_name(install_req.name)
|
| 49 |
+
self.requirements[project_name] = install_req
|
| 50 |
+
|
| 51 |
+
def has_requirement(self, name: str) -> bool:
|
| 52 |
+
project_name = canonicalize_name(name)
|
| 53 |
+
|
| 54 |
+
return (
|
| 55 |
+
project_name in self.requirements
|
| 56 |
+
and not self.requirements[project_name].constraint
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
def get_requirement(self, name: str) -> InstallRequirement:
|
| 60 |
+
project_name = canonicalize_name(name)
|
| 61 |
+
|
| 62 |
+
if project_name in self.requirements:
|
| 63 |
+
return self.requirements[project_name]
|
| 64 |
+
|
| 65 |
+
raise KeyError(f"No project with the name {name!r}")
|
| 66 |
+
|
| 67 |
+
@property
|
| 68 |
+
def all_requirements(self) -> List[InstallRequirement]:
|
| 69 |
+
return self.unnamed_requirements + list(self.requirements.values())
|
| 70 |
+
|
| 71 |
+
@property
|
| 72 |
+
def requirements_to_install(self) -> List[InstallRequirement]:
|
| 73 |
+
"""Return the list of requirements that need to be installed.
|
| 74 |
+
|
| 75 |
+
TODO remove this property together with the legacy resolver, since the new
|
| 76 |
+
resolver only returns requirements that need to be installed.
|
| 77 |
+
"""
|
| 78 |
+
return [
|
| 79 |
+
install_req
|
| 80 |
+
for install_req in self.all_requirements
|
| 81 |
+
if not install_req.constraint and not install_req.satisfied_by
|
| 82 |
+
]
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/resolution/base.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Callable, List, Optional
|
| 2 |
+
|
| 3 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 4 |
+
from pip._internal.req.req_set import RequirementSet
|
| 5 |
+
|
| 6 |
+
InstallRequirementProvider = Callable[
|
| 7 |
+
[str, Optional[InstallRequirement]], InstallRequirement
|
| 8 |
+
]
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class BaseResolver:
|
| 12 |
+
def resolve(
|
| 13 |
+
self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
|
| 14 |
+
) -> RequirementSet:
|
| 15 |
+
raise NotImplementedError()
|
| 16 |
+
|
| 17 |
+
def get_installation_order(
|
| 18 |
+
self, req_set: RequirementSet
|
| 19 |
+
) -> List[InstallRequirement]:
|
| 20 |
+
raise NotImplementedError()
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/base.py
ADDED
|
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from dataclasses import dataclass
|
| 2 |
+
from typing import FrozenSet, Iterable, Optional, Tuple
|
| 3 |
+
|
| 4 |
+
from pip._vendor.packaging.specifiers import SpecifierSet
|
| 5 |
+
from pip._vendor.packaging.utils import NormalizedName
|
| 6 |
+
from pip._vendor.packaging.version import Version
|
| 7 |
+
|
| 8 |
+
from pip._internal.models.link import Link, links_equivalent
|
| 9 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 10 |
+
from pip._internal.utils.hashes import Hashes
|
| 11 |
+
|
| 12 |
+
CandidateLookup = Tuple[Optional["Candidate"], Optional[InstallRequirement]]
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def format_name(project: NormalizedName, extras: FrozenSet[NormalizedName]) -> str:
|
| 16 |
+
if not extras:
|
| 17 |
+
return project
|
| 18 |
+
extras_expr = ",".join(sorted(extras))
|
| 19 |
+
return f"{project}[{extras_expr}]"
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
@dataclass(frozen=True)
|
| 23 |
+
class Constraint:
|
| 24 |
+
specifier: SpecifierSet
|
| 25 |
+
hashes: Hashes
|
| 26 |
+
links: FrozenSet[Link]
|
| 27 |
+
|
| 28 |
+
@classmethod
|
| 29 |
+
def empty(cls) -> "Constraint":
|
| 30 |
+
return Constraint(SpecifierSet(), Hashes(), frozenset())
|
| 31 |
+
|
| 32 |
+
@classmethod
|
| 33 |
+
def from_ireq(cls, ireq: InstallRequirement) -> "Constraint":
|
| 34 |
+
links = frozenset([ireq.link]) if ireq.link else frozenset()
|
| 35 |
+
return Constraint(ireq.specifier, ireq.hashes(trust_internet=False), links)
|
| 36 |
+
|
| 37 |
+
def __bool__(self) -> bool:
|
| 38 |
+
return bool(self.specifier) or bool(self.hashes) or bool(self.links)
|
| 39 |
+
|
| 40 |
+
def __and__(self, other: InstallRequirement) -> "Constraint":
|
| 41 |
+
if not isinstance(other, InstallRequirement):
|
| 42 |
+
return NotImplemented
|
| 43 |
+
specifier = self.specifier & other.specifier
|
| 44 |
+
hashes = self.hashes & other.hashes(trust_internet=False)
|
| 45 |
+
links = self.links
|
| 46 |
+
if other.link:
|
| 47 |
+
links = links.union([other.link])
|
| 48 |
+
return Constraint(specifier, hashes, links)
|
| 49 |
+
|
| 50 |
+
def is_satisfied_by(self, candidate: "Candidate") -> bool:
|
| 51 |
+
# Reject if there are any mismatched URL constraints on this package.
|
| 52 |
+
if self.links and not all(_match_link(link, candidate) for link in self.links):
|
| 53 |
+
return False
|
| 54 |
+
# We can safely always allow prereleases here since PackageFinder
|
| 55 |
+
# already implements the prerelease logic, and would have filtered out
|
| 56 |
+
# prerelease candidates if the user does not expect them.
|
| 57 |
+
return self.specifier.contains(candidate.version, prereleases=True)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class Requirement:
|
| 61 |
+
@property
|
| 62 |
+
def project_name(self) -> NormalizedName:
|
| 63 |
+
"""The "project name" of a requirement.
|
| 64 |
+
|
| 65 |
+
This is different from ``name`` if this requirement contains extras,
|
| 66 |
+
in which case ``name`` would contain the ``[...]`` part, while this
|
| 67 |
+
refers to the name of the project.
|
| 68 |
+
"""
|
| 69 |
+
raise NotImplementedError("Subclass should override")
|
| 70 |
+
|
| 71 |
+
@property
|
| 72 |
+
def name(self) -> str:
|
| 73 |
+
"""The name identifying this requirement in the resolver.
|
| 74 |
+
|
| 75 |
+
This is different from ``project_name`` if this requirement contains
|
| 76 |
+
extras, where ``project_name`` would not contain the ``[...]`` part.
|
| 77 |
+
"""
|
| 78 |
+
raise NotImplementedError("Subclass should override")
|
| 79 |
+
|
| 80 |
+
def is_satisfied_by(self, candidate: "Candidate") -> bool:
|
| 81 |
+
return False
|
| 82 |
+
|
| 83 |
+
def get_candidate_lookup(self) -> CandidateLookup:
|
| 84 |
+
raise NotImplementedError("Subclass should override")
|
| 85 |
+
|
| 86 |
+
def format_for_error(self) -> str:
|
| 87 |
+
raise NotImplementedError("Subclass should override")
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def _match_link(link: Link, candidate: "Candidate") -> bool:
|
| 91 |
+
if candidate.source_link:
|
| 92 |
+
return links_equivalent(link, candidate.source_link)
|
| 93 |
+
return False
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
class Candidate:
|
| 97 |
+
@property
|
| 98 |
+
def project_name(self) -> NormalizedName:
|
| 99 |
+
"""The "project name" of the candidate.
|
| 100 |
+
|
| 101 |
+
This is different from ``name`` if this candidate contains extras,
|
| 102 |
+
in which case ``name`` would contain the ``[...]`` part, while this
|
| 103 |
+
refers to the name of the project.
|
| 104 |
+
"""
|
| 105 |
+
raise NotImplementedError("Override in subclass")
|
| 106 |
+
|
| 107 |
+
@property
|
| 108 |
+
def name(self) -> str:
|
| 109 |
+
"""The name identifying this candidate in the resolver.
|
| 110 |
+
|
| 111 |
+
This is different from ``project_name`` if this candidate contains
|
| 112 |
+
extras, where ``project_name`` would not contain the ``[...]`` part.
|
| 113 |
+
"""
|
| 114 |
+
raise NotImplementedError("Override in subclass")
|
| 115 |
+
|
| 116 |
+
@property
|
| 117 |
+
def version(self) -> Version:
|
| 118 |
+
raise NotImplementedError("Override in subclass")
|
| 119 |
+
|
| 120 |
+
@property
|
| 121 |
+
def is_installed(self) -> bool:
|
| 122 |
+
raise NotImplementedError("Override in subclass")
|
| 123 |
+
|
| 124 |
+
@property
|
| 125 |
+
def is_editable(self) -> bool:
|
| 126 |
+
raise NotImplementedError("Override in subclass")
|
| 127 |
+
|
| 128 |
+
@property
|
| 129 |
+
def source_link(self) -> Optional[Link]:
|
| 130 |
+
raise NotImplementedError("Override in subclass")
|
| 131 |
+
|
| 132 |
+
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
| 133 |
+
raise NotImplementedError("Override in subclass")
|
| 134 |
+
|
| 135 |
+
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
| 136 |
+
raise NotImplementedError("Override in subclass")
|
| 137 |
+
|
| 138 |
+
def format_for_error(self) -> str:
|
| 139 |
+
raise NotImplementedError("Subclass should override")
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/candidates.py
ADDED
|
@@ -0,0 +1,574 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import sys
|
| 3 |
+
from typing import TYPE_CHECKING, Any, FrozenSet, Iterable, Optional, Tuple, Union, cast
|
| 4 |
+
|
| 5 |
+
from pip._vendor.packaging.requirements import InvalidRequirement
|
| 6 |
+
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
| 7 |
+
from pip._vendor.packaging.version import Version
|
| 8 |
+
|
| 9 |
+
from pip._internal.exceptions import (
|
| 10 |
+
HashError,
|
| 11 |
+
InstallationSubprocessError,
|
| 12 |
+
InvalidInstalledPackage,
|
| 13 |
+
MetadataInconsistent,
|
| 14 |
+
MetadataInvalid,
|
| 15 |
+
)
|
| 16 |
+
from pip._internal.metadata import BaseDistribution
|
| 17 |
+
from pip._internal.models.link import Link, links_equivalent
|
| 18 |
+
from pip._internal.models.wheel import Wheel
|
| 19 |
+
from pip._internal.req.constructors import (
|
| 20 |
+
install_req_from_editable,
|
| 21 |
+
install_req_from_line,
|
| 22 |
+
)
|
| 23 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 24 |
+
from pip._internal.utils.direct_url_helpers import direct_url_from_link
|
| 25 |
+
from pip._internal.utils.misc import normalize_version_info
|
| 26 |
+
|
| 27 |
+
from .base import Candidate, Requirement, format_name
|
| 28 |
+
|
| 29 |
+
if TYPE_CHECKING:
|
| 30 |
+
from .factory import Factory
|
| 31 |
+
|
| 32 |
+
logger = logging.getLogger(__name__)
|
| 33 |
+
|
| 34 |
+
BaseCandidate = Union[
|
| 35 |
+
"AlreadyInstalledCandidate",
|
| 36 |
+
"EditableCandidate",
|
| 37 |
+
"LinkCandidate",
|
| 38 |
+
]
|
| 39 |
+
|
| 40 |
+
# Avoid conflicting with the PyPI package "Python".
|
| 41 |
+
REQUIRES_PYTHON_IDENTIFIER = cast(NormalizedName, "<Python from Requires-Python>")
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def as_base_candidate(candidate: Candidate) -> Optional[BaseCandidate]:
|
| 45 |
+
"""The runtime version of BaseCandidate."""
|
| 46 |
+
base_candidate_classes = (
|
| 47 |
+
AlreadyInstalledCandidate,
|
| 48 |
+
EditableCandidate,
|
| 49 |
+
LinkCandidate,
|
| 50 |
+
)
|
| 51 |
+
if isinstance(candidate, base_candidate_classes):
|
| 52 |
+
return candidate
|
| 53 |
+
return None
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def make_install_req_from_link(
|
| 57 |
+
link: Link, template: InstallRequirement
|
| 58 |
+
) -> InstallRequirement:
|
| 59 |
+
assert not template.editable, "template is editable"
|
| 60 |
+
if template.req:
|
| 61 |
+
line = str(template.req)
|
| 62 |
+
else:
|
| 63 |
+
line = link.url
|
| 64 |
+
ireq = install_req_from_line(
|
| 65 |
+
line,
|
| 66 |
+
user_supplied=template.user_supplied,
|
| 67 |
+
comes_from=template.comes_from,
|
| 68 |
+
use_pep517=template.use_pep517,
|
| 69 |
+
isolated=template.isolated,
|
| 70 |
+
constraint=template.constraint,
|
| 71 |
+
global_options=template.global_options,
|
| 72 |
+
hash_options=template.hash_options,
|
| 73 |
+
config_settings=template.config_settings,
|
| 74 |
+
)
|
| 75 |
+
ireq.original_link = template.original_link
|
| 76 |
+
ireq.link = link
|
| 77 |
+
ireq.extras = template.extras
|
| 78 |
+
return ireq
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def make_install_req_from_editable(
|
| 82 |
+
link: Link, template: InstallRequirement
|
| 83 |
+
) -> InstallRequirement:
|
| 84 |
+
assert template.editable, "template not editable"
|
| 85 |
+
ireq = install_req_from_editable(
|
| 86 |
+
link.url,
|
| 87 |
+
user_supplied=template.user_supplied,
|
| 88 |
+
comes_from=template.comes_from,
|
| 89 |
+
use_pep517=template.use_pep517,
|
| 90 |
+
isolated=template.isolated,
|
| 91 |
+
constraint=template.constraint,
|
| 92 |
+
permit_editable_wheels=template.permit_editable_wheels,
|
| 93 |
+
global_options=template.global_options,
|
| 94 |
+
hash_options=template.hash_options,
|
| 95 |
+
config_settings=template.config_settings,
|
| 96 |
+
)
|
| 97 |
+
ireq.extras = template.extras
|
| 98 |
+
return ireq
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def _make_install_req_from_dist(
|
| 102 |
+
dist: BaseDistribution, template: InstallRequirement
|
| 103 |
+
) -> InstallRequirement:
|
| 104 |
+
if template.req:
|
| 105 |
+
line = str(template.req)
|
| 106 |
+
elif template.link:
|
| 107 |
+
line = f"{dist.canonical_name} @ {template.link.url}"
|
| 108 |
+
else:
|
| 109 |
+
line = f"{dist.canonical_name}=={dist.version}"
|
| 110 |
+
ireq = install_req_from_line(
|
| 111 |
+
line,
|
| 112 |
+
user_supplied=template.user_supplied,
|
| 113 |
+
comes_from=template.comes_from,
|
| 114 |
+
use_pep517=template.use_pep517,
|
| 115 |
+
isolated=template.isolated,
|
| 116 |
+
constraint=template.constraint,
|
| 117 |
+
global_options=template.global_options,
|
| 118 |
+
hash_options=template.hash_options,
|
| 119 |
+
config_settings=template.config_settings,
|
| 120 |
+
)
|
| 121 |
+
ireq.satisfied_by = dist
|
| 122 |
+
return ireq
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
class _InstallRequirementBackedCandidate(Candidate):
|
| 126 |
+
"""A candidate backed by an ``InstallRequirement``.
|
| 127 |
+
|
| 128 |
+
This represents a package request with the target not being already
|
| 129 |
+
in the environment, and needs to be fetched and installed. The backing
|
| 130 |
+
``InstallRequirement`` is responsible for most of the leg work; this
|
| 131 |
+
class exposes appropriate information to the resolver.
|
| 132 |
+
|
| 133 |
+
:param link: The link passed to the ``InstallRequirement``. The backing
|
| 134 |
+
``InstallRequirement`` will use this link to fetch the distribution.
|
| 135 |
+
:param source_link: The link this candidate "originates" from. This is
|
| 136 |
+
different from ``link`` when the link is found in the wheel cache.
|
| 137 |
+
``link`` would point to the wheel cache, while this points to the
|
| 138 |
+
found remote link (e.g. from pypi.org).
|
| 139 |
+
"""
|
| 140 |
+
|
| 141 |
+
dist: BaseDistribution
|
| 142 |
+
is_installed = False
|
| 143 |
+
|
| 144 |
+
def __init__(
|
| 145 |
+
self,
|
| 146 |
+
link: Link,
|
| 147 |
+
source_link: Link,
|
| 148 |
+
ireq: InstallRequirement,
|
| 149 |
+
factory: "Factory",
|
| 150 |
+
name: Optional[NormalizedName] = None,
|
| 151 |
+
version: Optional[Version] = None,
|
| 152 |
+
) -> None:
|
| 153 |
+
self._link = link
|
| 154 |
+
self._source_link = source_link
|
| 155 |
+
self._factory = factory
|
| 156 |
+
self._ireq = ireq
|
| 157 |
+
self._name = name
|
| 158 |
+
self._version = version
|
| 159 |
+
self.dist = self._prepare()
|
| 160 |
+
self._hash: Optional[int] = None
|
| 161 |
+
|
| 162 |
+
def __str__(self) -> str:
|
| 163 |
+
return f"{self.name} {self.version}"
|
| 164 |
+
|
| 165 |
+
def __repr__(self) -> str:
|
| 166 |
+
return f"{self.__class__.__name__}({str(self._link)!r})"
|
| 167 |
+
|
| 168 |
+
def __hash__(self) -> int:
|
| 169 |
+
if self._hash is not None:
|
| 170 |
+
return self._hash
|
| 171 |
+
|
| 172 |
+
self._hash = hash((self.__class__, self._link))
|
| 173 |
+
return self._hash
|
| 174 |
+
|
| 175 |
+
def __eq__(self, other: Any) -> bool:
|
| 176 |
+
if isinstance(other, self.__class__):
|
| 177 |
+
return links_equivalent(self._link, other._link)
|
| 178 |
+
return False
|
| 179 |
+
|
| 180 |
+
@property
|
| 181 |
+
def source_link(self) -> Optional[Link]:
|
| 182 |
+
return self._source_link
|
| 183 |
+
|
| 184 |
+
@property
|
| 185 |
+
def project_name(self) -> NormalizedName:
|
| 186 |
+
"""The normalised name of the project the candidate refers to"""
|
| 187 |
+
if self._name is None:
|
| 188 |
+
self._name = self.dist.canonical_name
|
| 189 |
+
return self._name
|
| 190 |
+
|
| 191 |
+
@property
|
| 192 |
+
def name(self) -> str:
|
| 193 |
+
return self.project_name
|
| 194 |
+
|
| 195 |
+
@property
|
| 196 |
+
def version(self) -> Version:
|
| 197 |
+
if self._version is None:
|
| 198 |
+
self._version = self.dist.version
|
| 199 |
+
return self._version
|
| 200 |
+
|
| 201 |
+
def format_for_error(self) -> str:
|
| 202 |
+
return (
|
| 203 |
+
f"{self.name} {self.version} "
|
| 204 |
+
f"(from {self._link.file_path if self._link.is_file else self._link})"
|
| 205 |
+
)
|
| 206 |
+
|
| 207 |
+
def _prepare_distribution(self) -> BaseDistribution:
|
| 208 |
+
raise NotImplementedError("Override in subclass")
|
| 209 |
+
|
| 210 |
+
def _check_metadata_consistency(self, dist: BaseDistribution) -> None:
|
| 211 |
+
"""Check for consistency of project name and version of dist."""
|
| 212 |
+
if self._name is not None and self._name != dist.canonical_name:
|
| 213 |
+
raise MetadataInconsistent(
|
| 214 |
+
self._ireq,
|
| 215 |
+
"name",
|
| 216 |
+
self._name,
|
| 217 |
+
dist.canonical_name,
|
| 218 |
+
)
|
| 219 |
+
if self._version is not None and self._version != dist.version:
|
| 220 |
+
raise MetadataInconsistent(
|
| 221 |
+
self._ireq,
|
| 222 |
+
"version",
|
| 223 |
+
str(self._version),
|
| 224 |
+
str(dist.version),
|
| 225 |
+
)
|
| 226 |
+
# check dependencies are valid
|
| 227 |
+
# TODO performance: this means we iterate the dependencies at least twice,
|
| 228 |
+
# we may want to cache parsed Requires-Dist
|
| 229 |
+
try:
|
| 230 |
+
list(dist.iter_dependencies(list(dist.iter_provided_extras())))
|
| 231 |
+
except InvalidRequirement as e:
|
| 232 |
+
raise MetadataInvalid(self._ireq, str(e))
|
| 233 |
+
|
| 234 |
+
def _prepare(self) -> BaseDistribution:
|
| 235 |
+
try:
|
| 236 |
+
dist = self._prepare_distribution()
|
| 237 |
+
except HashError as e:
|
| 238 |
+
# Provide HashError the underlying ireq that caused it. This
|
| 239 |
+
# provides context for the resulting error message to show the
|
| 240 |
+
# offending line to the user.
|
| 241 |
+
e.req = self._ireq
|
| 242 |
+
raise
|
| 243 |
+
except InstallationSubprocessError as exc:
|
| 244 |
+
# The output has been presented already, so don't duplicate it.
|
| 245 |
+
exc.context = "See above for output."
|
| 246 |
+
raise
|
| 247 |
+
|
| 248 |
+
self._check_metadata_consistency(dist)
|
| 249 |
+
return dist
|
| 250 |
+
|
| 251 |
+
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
| 252 |
+
requires = self.dist.iter_dependencies() if with_requires else ()
|
| 253 |
+
for r in requires:
|
| 254 |
+
yield from self._factory.make_requirements_from_spec(str(r), self._ireq)
|
| 255 |
+
yield self._factory.make_requires_python_requirement(self.dist.requires_python)
|
| 256 |
+
|
| 257 |
+
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
| 258 |
+
return self._ireq
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
class LinkCandidate(_InstallRequirementBackedCandidate):
|
| 262 |
+
is_editable = False
|
| 263 |
+
|
| 264 |
+
def __init__(
|
| 265 |
+
self,
|
| 266 |
+
link: Link,
|
| 267 |
+
template: InstallRequirement,
|
| 268 |
+
factory: "Factory",
|
| 269 |
+
name: Optional[NormalizedName] = None,
|
| 270 |
+
version: Optional[Version] = None,
|
| 271 |
+
) -> None:
|
| 272 |
+
source_link = link
|
| 273 |
+
cache_entry = factory.get_wheel_cache_entry(source_link, name)
|
| 274 |
+
if cache_entry is not None:
|
| 275 |
+
logger.debug("Using cached wheel link: %s", cache_entry.link)
|
| 276 |
+
link = cache_entry.link
|
| 277 |
+
ireq = make_install_req_from_link(link, template)
|
| 278 |
+
assert ireq.link == link
|
| 279 |
+
if ireq.link.is_wheel and not ireq.link.is_file:
|
| 280 |
+
wheel = Wheel(ireq.link.filename)
|
| 281 |
+
wheel_name = canonicalize_name(wheel.name)
|
| 282 |
+
assert name == wheel_name, f"{name!r} != {wheel_name!r} for wheel"
|
| 283 |
+
# Version may not be present for PEP 508 direct URLs
|
| 284 |
+
if version is not None:
|
| 285 |
+
wheel_version = Version(wheel.version)
|
| 286 |
+
assert (
|
| 287 |
+
version == wheel_version
|
| 288 |
+
), f"{version!r} != {wheel_version!r} for wheel {name}"
|
| 289 |
+
|
| 290 |
+
if cache_entry is not None:
|
| 291 |
+
assert ireq.link.is_wheel
|
| 292 |
+
assert ireq.link.is_file
|
| 293 |
+
if cache_entry.persistent and template.link is template.original_link:
|
| 294 |
+
ireq.cached_wheel_source_link = source_link
|
| 295 |
+
if cache_entry.origin is not None:
|
| 296 |
+
ireq.download_info = cache_entry.origin
|
| 297 |
+
else:
|
| 298 |
+
# Legacy cache entry that does not have origin.json.
|
| 299 |
+
# download_info may miss the archive_info.hashes field.
|
| 300 |
+
ireq.download_info = direct_url_from_link(
|
| 301 |
+
source_link, link_is_in_wheel_cache=cache_entry.persistent
|
| 302 |
+
)
|
| 303 |
+
|
| 304 |
+
super().__init__(
|
| 305 |
+
link=link,
|
| 306 |
+
source_link=source_link,
|
| 307 |
+
ireq=ireq,
|
| 308 |
+
factory=factory,
|
| 309 |
+
name=name,
|
| 310 |
+
version=version,
|
| 311 |
+
)
|
| 312 |
+
|
| 313 |
+
def _prepare_distribution(self) -> BaseDistribution:
|
| 314 |
+
preparer = self._factory.preparer
|
| 315 |
+
return preparer.prepare_linked_requirement(self._ireq, parallel_builds=True)
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
class EditableCandidate(_InstallRequirementBackedCandidate):
|
| 319 |
+
is_editable = True
|
| 320 |
+
|
| 321 |
+
def __init__(
|
| 322 |
+
self,
|
| 323 |
+
link: Link,
|
| 324 |
+
template: InstallRequirement,
|
| 325 |
+
factory: "Factory",
|
| 326 |
+
name: Optional[NormalizedName] = None,
|
| 327 |
+
version: Optional[Version] = None,
|
| 328 |
+
) -> None:
|
| 329 |
+
super().__init__(
|
| 330 |
+
link=link,
|
| 331 |
+
source_link=link,
|
| 332 |
+
ireq=make_install_req_from_editable(link, template),
|
| 333 |
+
factory=factory,
|
| 334 |
+
name=name,
|
| 335 |
+
version=version,
|
| 336 |
+
)
|
| 337 |
+
|
| 338 |
+
def _prepare_distribution(self) -> BaseDistribution:
|
| 339 |
+
return self._factory.preparer.prepare_editable_requirement(self._ireq)
|
| 340 |
+
|
| 341 |
+
|
| 342 |
+
class AlreadyInstalledCandidate(Candidate):
|
| 343 |
+
is_installed = True
|
| 344 |
+
source_link = None
|
| 345 |
+
|
| 346 |
+
def __init__(
|
| 347 |
+
self,
|
| 348 |
+
dist: BaseDistribution,
|
| 349 |
+
template: InstallRequirement,
|
| 350 |
+
factory: "Factory",
|
| 351 |
+
) -> None:
|
| 352 |
+
self.dist = dist
|
| 353 |
+
self._ireq = _make_install_req_from_dist(dist, template)
|
| 354 |
+
self._factory = factory
|
| 355 |
+
self._version = None
|
| 356 |
+
|
| 357 |
+
# This is just logging some messages, so we can do it eagerly.
|
| 358 |
+
# The returned dist would be exactly the same as self.dist because we
|
| 359 |
+
# set satisfied_by in _make_install_req_from_dist.
|
| 360 |
+
# TODO: Supply reason based on force_reinstall and upgrade_strategy.
|
| 361 |
+
skip_reason = "already satisfied"
|
| 362 |
+
factory.preparer.prepare_installed_requirement(self._ireq, skip_reason)
|
| 363 |
+
|
| 364 |
+
def __str__(self) -> str:
|
| 365 |
+
return str(self.dist)
|
| 366 |
+
|
| 367 |
+
def __repr__(self) -> str:
|
| 368 |
+
return f"{self.__class__.__name__}({self.dist!r})"
|
| 369 |
+
|
| 370 |
+
def __eq__(self, other: object) -> bool:
|
| 371 |
+
if not isinstance(other, AlreadyInstalledCandidate):
|
| 372 |
+
return NotImplemented
|
| 373 |
+
return self.name == other.name and self.version == other.version
|
| 374 |
+
|
| 375 |
+
def __hash__(self) -> int:
|
| 376 |
+
return hash((self.name, self.version))
|
| 377 |
+
|
| 378 |
+
@property
|
| 379 |
+
def project_name(self) -> NormalizedName:
|
| 380 |
+
return self.dist.canonical_name
|
| 381 |
+
|
| 382 |
+
@property
|
| 383 |
+
def name(self) -> str:
|
| 384 |
+
return self.project_name
|
| 385 |
+
|
| 386 |
+
@property
|
| 387 |
+
def version(self) -> Version:
|
| 388 |
+
if self._version is None:
|
| 389 |
+
self._version = self.dist.version
|
| 390 |
+
return self._version
|
| 391 |
+
|
| 392 |
+
@property
|
| 393 |
+
def is_editable(self) -> bool:
|
| 394 |
+
return self.dist.editable
|
| 395 |
+
|
| 396 |
+
def format_for_error(self) -> str:
|
| 397 |
+
return f"{self.name} {self.version} (Installed)"
|
| 398 |
+
|
| 399 |
+
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
| 400 |
+
if not with_requires:
|
| 401 |
+
return
|
| 402 |
+
|
| 403 |
+
try:
|
| 404 |
+
for r in self.dist.iter_dependencies():
|
| 405 |
+
yield from self._factory.make_requirements_from_spec(str(r), self._ireq)
|
| 406 |
+
except InvalidRequirement as exc:
|
| 407 |
+
raise InvalidInstalledPackage(dist=self.dist, invalid_exc=exc) from None
|
| 408 |
+
|
| 409 |
+
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
| 410 |
+
return None
|
| 411 |
+
|
| 412 |
+
|
| 413 |
+
class ExtrasCandidate(Candidate):
|
| 414 |
+
"""A candidate that has 'extras', indicating additional dependencies.
|
| 415 |
+
|
| 416 |
+
Requirements can be for a project with dependencies, something like
|
| 417 |
+
foo[extra]. The extras don't affect the project/version being installed
|
| 418 |
+
directly, but indicate that we need additional dependencies. We model that
|
| 419 |
+
by having an artificial ExtrasCandidate that wraps the "base" candidate.
|
| 420 |
+
|
| 421 |
+
The ExtrasCandidate differs from the base in the following ways:
|
| 422 |
+
|
| 423 |
+
1. It has a unique name, of the form foo[extra]. This causes the resolver
|
| 424 |
+
to treat it as a separate node in the dependency graph.
|
| 425 |
+
2. When we're getting the candidate's dependencies,
|
| 426 |
+
a) We specify that we want the extra dependencies as well.
|
| 427 |
+
b) We add a dependency on the base candidate.
|
| 428 |
+
See below for why this is needed.
|
| 429 |
+
3. We return None for the underlying InstallRequirement, as the base
|
| 430 |
+
candidate will provide it, and we don't want to end up with duplicates.
|
| 431 |
+
|
| 432 |
+
The dependency on the base candidate is needed so that the resolver can't
|
| 433 |
+
decide that it should recommend foo[extra1] version 1.0 and foo[extra2]
|
| 434 |
+
version 2.0. Having those candidates depend on foo=1.0 and foo=2.0
|
| 435 |
+
respectively forces the resolver to recognise that this is a conflict.
|
| 436 |
+
"""
|
| 437 |
+
|
| 438 |
+
def __init__(
|
| 439 |
+
self,
|
| 440 |
+
base: BaseCandidate,
|
| 441 |
+
extras: FrozenSet[str],
|
| 442 |
+
*,
|
| 443 |
+
comes_from: Optional[InstallRequirement] = None,
|
| 444 |
+
) -> None:
|
| 445 |
+
"""
|
| 446 |
+
:param comes_from: the InstallRequirement that led to this candidate if it
|
| 447 |
+
differs from the base's InstallRequirement. This will often be the
|
| 448 |
+
case in the sense that this candidate's requirement has the extras
|
| 449 |
+
while the base's does not. Unlike the InstallRequirement backed
|
| 450 |
+
candidates, this requirement is used solely for reporting purposes,
|
| 451 |
+
it does not do any leg work.
|
| 452 |
+
"""
|
| 453 |
+
self.base = base
|
| 454 |
+
self.extras = frozenset(canonicalize_name(e) for e in extras)
|
| 455 |
+
self._comes_from = comes_from if comes_from is not None else self.base._ireq
|
| 456 |
+
|
| 457 |
+
def __str__(self) -> str:
|
| 458 |
+
name, rest = str(self.base).split(" ", 1)
|
| 459 |
+
return "{}[{}] {}".format(name, ",".join(self.extras), rest)
|
| 460 |
+
|
| 461 |
+
def __repr__(self) -> str:
|
| 462 |
+
return f"{self.__class__.__name__}(base={self.base!r}, extras={self.extras!r})"
|
| 463 |
+
|
| 464 |
+
def __hash__(self) -> int:
|
| 465 |
+
return hash((self.base, self.extras))
|
| 466 |
+
|
| 467 |
+
def __eq__(self, other: Any) -> bool:
|
| 468 |
+
if isinstance(other, self.__class__):
|
| 469 |
+
return self.base == other.base and self.extras == other.extras
|
| 470 |
+
return False
|
| 471 |
+
|
| 472 |
+
@property
|
| 473 |
+
def project_name(self) -> NormalizedName:
|
| 474 |
+
return self.base.project_name
|
| 475 |
+
|
| 476 |
+
@property
|
| 477 |
+
def name(self) -> str:
|
| 478 |
+
"""The normalised name of the project the candidate refers to"""
|
| 479 |
+
return format_name(self.base.project_name, self.extras)
|
| 480 |
+
|
| 481 |
+
@property
|
| 482 |
+
def version(self) -> Version:
|
| 483 |
+
return self.base.version
|
| 484 |
+
|
| 485 |
+
def format_for_error(self) -> str:
|
| 486 |
+
return "{} [{}]".format(
|
| 487 |
+
self.base.format_for_error(), ", ".join(sorted(self.extras))
|
| 488 |
+
)
|
| 489 |
+
|
| 490 |
+
@property
|
| 491 |
+
def is_installed(self) -> bool:
|
| 492 |
+
return self.base.is_installed
|
| 493 |
+
|
| 494 |
+
@property
|
| 495 |
+
def is_editable(self) -> bool:
|
| 496 |
+
return self.base.is_editable
|
| 497 |
+
|
| 498 |
+
@property
|
| 499 |
+
def source_link(self) -> Optional[Link]:
|
| 500 |
+
return self.base.source_link
|
| 501 |
+
|
| 502 |
+
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
| 503 |
+
factory = self.base._factory
|
| 504 |
+
|
| 505 |
+
# Add a dependency on the exact base
|
| 506 |
+
# (See note 2b in the class docstring)
|
| 507 |
+
yield factory.make_requirement_from_candidate(self.base)
|
| 508 |
+
if not with_requires:
|
| 509 |
+
return
|
| 510 |
+
|
| 511 |
+
# The user may have specified extras that the candidate doesn't
|
| 512 |
+
# support. We ignore any unsupported extras here.
|
| 513 |
+
valid_extras = self.extras.intersection(self.base.dist.iter_provided_extras())
|
| 514 |
+
invalid_extras = self.extras.difference(self.base.dist.iter_provided_extras())
|
| 515 |
+
for extra in sorted(invalid_extras):
|
| 516 |
+
logger.warning(
|
| 517 |
+
"%s %s does not provide the extra '%s'",
|
| 518 |
+
self.base.name,
|
| 519 |
+
self.version,
|
| 520 |
+
extra,
|
| 521 |
+
)
|
| 522 |
+
|
| 523 |
+
for r in self.base.dist.iter_dependencies(valid_extras):
|
| 524 |
+
yield from factory.make_requirements_from_spec(
|
| 525 |
+
str(r),
|
| 526 |
+
self._comes_from,
|
| 527 |
+
valid_extras,
|
| 528 |
+
)
|
| 529 |
+
|
| 530 |
+
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
| 531 |
+
# We don't return anything here, because we always
|
| 532 |
+
# depend on the base candidate, and we'll get the
|
| 533 |
+
# install requirement from that.
|
| 534 |
+
return None
|
| 535 |
+
|
| 536 |
+
|
| 537 |
+
class RequiresPythonCandidate(Candidate):
|
| 538 |
+
is_installed = False
|
| 539 |
+
source_link = None
|
| 540 |
+
|
| 541 |
+
def __init__(self, py_version_info: Optional[Tuple[int, ...]]) -> None:
|
| 542 |
+
if py_version_info is not None:
|
| 543 |
+
version_info = normalize_version_info(py_version_info)
|
| 544 |
+
else:
|
| 545 |
+
version_info = sys.version_info[:3]
|
| 546 |
+
self._version = Version(".".join(str(c) for c in version_info))
|
| 547 |
+
|
| 548 |
+
# We don't need to implement __eq__() and __ne__() since there is always
|
| 549 |
+
# only one RequiresPythonCandidate in a resolution, i.e. the host Python.
|
| 550 |
+
# The built-in object.__eq__() and object.__ne__() do exactly what we want.
|
| 551 |
+
|
| 552 |
+
def __str__(self) -> str:
|
| 553 |
+
return f"Python {self._version}"
|
| 554 |
+
|
| 555 |
+
@property
|
| 556 |
+
def project_name(self) -> NormalizedName:
|
| 557 |
+
return REQUIRES_PYTHON_IDENTIFIER
|
| 558 |
+
|
| 559 |
+
@property
|
| 560 |
+
def name(self) -> str:
|
| 561 |
+
return REQUIRES_PYTHON_IDENTIFIER
|
| 562 |
+
|
| 563 |
+
@property
|
| 564 |
+
def version(self) -> Version:
|
| 565 |
+
return self._version
|
| 566 |
+
|
| 567 |
+
def format_for_error(self) -> str:
|
| 568 |
+
return f"Python {self.version}"
|
| 569 |
+
|
| 570 |
+
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
| 571 |
+
return ()
|
| 572 |
+
|
| 573 |
+
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
| 574 |
+
return None
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/reporter.py
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from collections import defaultdict
|
| 2 |
+
from logging import getLogger
|
| 3 |
+
from typing import Any, DefaultDict
|
| 4 |
+
|
| 5 |
+
from pip._vendor.resolvelib.reporters import BaseReporter
|
| 6 |
+
|
| 7 |
+
from .base import Candidate, Requirement
|
| 8 |
+
|
| 9 |
+
logger = getLogger(__name__)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class PipReporter(BaseReporter):
|
| 13 |
+
def __init__(self) -> None:
|
| 14 |
+
self.reject_count_by_package: DefaultDict[str, int] = defaultdict(int)
|
| 15 |
+
|
| 16 |
+
self._messages_at_reject_count = {
|
| 17 |
+
1: (
|
| 18 |
+
"pip is looking at multiple versions of {package_name} to "
|
| 19 |
+
"determine which version is compatible with other "
|
| 20 |
+
"requirements. This could take a while."
|
| 21 |
+
),
|
| 22 |
+
8: (
|
| 23 |
+
"pip is still looking at multiple versions of {package_name} to "
|
| 24 |
+
"determine which version is compatible with other "
|
| 25 |
+
"requirements. This could take a while."
|
| 26 |
+
),
|
| 27 |
+
13: (
|
| 28 |
+
"This is taking longer than usual. You might need to provide "
|
| 29 |
+
"the dependency resolver with stricter constraints to reduce "
|
| 30 |
+
"runtime. See https://pip.pypa.io/warnings/backtracking for "
|
| 31 |
+
"guidance. If you want to abort this run, press Ctrl + C."
|
| 32 |
+
),
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None:
|
| 36 |
+
self.reject_count_by_package[candidate.name] += 1
|
| 37 |
+
|
| 38 |
+
count = self.reject_count_by_package[candidate.name]
|
| 39 |
+
if count not in self._messages_at_reject_count:
|
| 40 |
+
return
|
| 41 |
+
|
| 42 |
+
message = self._messages_at_reject_count[count]
|
| 43 |
+
logger.info("INFO: %s", message.format(package_name=candidate.name))
|
| 44 |
+
|
| 45 |
+
msg = "Will try a different candidate, due to conflict:"
|
| 46 |
+
for req_info in criterion.information:
|
| 47 |
+
req, parent = req_info.requirement, req_info.parent
|
| 48 |
+
# Inspired by Factory.get_installation_error
|
| 49 |
+
msg += "\n "
|
| 50 |
+
if parent:
|
| 51 |
+
msg += f"{parent.name} {parent.version} depends on "
|
| 52 |
+
else:
|
| 53 |
+
msg += "The user requested "
|
| 54 |
+
msg += req.format_for_error()
|
| 55 |
+
logger.debug(msg)
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
class PipDebuggingReporter(BaseReporter):
|
| 59 |
+
"""A reporter that does an info log for every event it sees."""
|
| 60 |
+
|
| 61 |
+
def starting(self) -> None:
|
| 62 |
+
logger.info("Reporter.starting()")
|
| 63 |
+
|
| 64 |
+
def starting_round(self, index: int) -> None:
|
| 65 |
+
logger.info("Reporter.starting_round(%r)", index)
|
| 66 |
+
|
| 67 |
+
def ending_round(self, index: int, state: Any) -> None:
|
| 68 |
+
logger.info("Reporter.ending_round(%r, state)", index)
|
| 69 |
+
logger.debug("Reporter.ending_round(%r, %r)", index, state)
|
| 70 |
+
|
| 71 |
+
def ending(self, state: Any) -> None:
|
| 72 |
+
logger.info("Reporter.ending(%r)", state)
|
| 73 |
+
|
| 74 |
+
def adding_requirement(self, requirement: Requirement, parent: Candidate) -> None:
|
| 75 |
+
logger.info("Reporter.adding_requirement(%r, %r)", requirement, parent)
|
| 76 |
+
|
| 77 |
+
def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None:
|
| 78 |
+
logger.info("Reporter.rejecting_candidate(%r, %r)", criterion, candidate)
|
| 79 |
+
|
| 80 |
+
def pinning(self, candidate: Candidate) -> None:
|
| 81 |
+
logger.info("Reporter.pinning(%r)", candidate)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/requirements.py
ADDED
|
@@ -0,0 +1,245 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Any, Optional
|
| 2 |
+
|
| 3 |
+
from pip._vendor.packaging.specifiers import SpecifierSet
|
| 4 |
+
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
| 5 |
+
|
| 6 |
+
from pip._internal.req.constructors import install_req_drop_extras
|
| 7 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 8 |
+
|
| 9 |
+
from .base import Candidate, CandidateLookup, Requirement, format_name
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class ExplicitRequirement(Requirement):
|
| 13 |
+
def __init__(self, candidate: Candidate) -> None:
|
| 14 |
+
self.candidate = candidate
|
| 15 |
+
|
| 16 |
+
def __str__(self) -> str:
|
| 17 |
+
return str(self.candidate)
|
| 18 |
+
|
| 19 |
+
def __repr__(self) -> str:
|
| 20 |
+
return f"{self.__class__.__name__}({self.candidate!r})"
|
| 21 |
+
|
| 22 |
+
def __hash__(self) -> int:
|
| 23 |
+
return hash(self.candidate)
|
| 24 |
+
|
| 25 |
+
def __eq__(self, other: Any) -> bool:
|
| 26 |
+
if not isinstance(other, ExplicitRequirement):
|
| 27 |
+
return False
|
| 28 |
+
return self.candidate == other.candidate
|
| 29 |
+
|
| 30 |
+
@property
|
| 31 |
+
def project_name(self) -> NormalizedName:
|
| 32 |
+
# No need to canonicalize - the candidate did this
|
| 33 |
+
return self.candidate.project_name
|
| 34 |
+
|
| 35 |
+
@property
|
| 36 |
+
def name(self) -> str:
|
| 37 |
+
# No need to canonicalize - the candidate did this
|
| 38 |
+
return self.candidate.name
|
| 39 |
+
|
| 40 |
+
def format_for_error(self) -> str:
|
| 41 |
+
return self.candidate.format_for_error()
|
| 42 |
+
|
| 43 |
+
def get_candidate_lookup(self) -> CandidateLookup:
|
| 44 |
+
return self.candidate, None
|
| 45 |
+
|
| 46 |
+
def is_satisfied_by(self, candidate: Candidate) -> bool:
|
| 47 |
+
return candidate == self.candidate
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class SpecifierRequirement(Requirement):
|
| 51 |
+
def __init__(self, ireq: InstallRequirement) -> None:
|
| 52 |
+
assert ireq.link is None, "This is a link, not a specifier"
|
| 53 |
+
self._ireq = ireq
|
| 54 |
+
self._equal_cache: Optional[str] = None
|
| 55 |
+
self._hash: Optional[int] = None
|
| 56 |
+
self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras)
|
| 57 |
+
|
| 58 |
+
@property
|
| 59 |
+
def _equal(self) -> str:
|
| 60 |
+
if self._equal_cache is not None:
|
| 61 |
+
return self._equal_cache
|
| 62 |
+
|
| 63 |
+
self._equal_cache = str(self._ireq)
|
| 64 |
+
return self._equal_cache
|
| 65 |
+
|
| 66 |
+
def __str__(self) -> str:
|
| 67 |
+
return str(self._ireq.req)
|
| 68 |
+
|
| 69 |
+
def __repr__(self) -> str:
|
| 70 |
+
return f"{self.__class__.__name__}({str(self._ireq.req)!r})"
|
| 71 |
+
|
| 72 |
+
def __eq__(self, other: object) -> bool:
|
| 73 |
+
if not isinstance(other, SpecifierRequirement):
|
| 74 |
+
return NotImplemented
|
| 75 |
+
return self._equal == other._equal
|
| 76 |
+
|
| 77 |
+
def __hash__(self) -> int:
|
| 78 |
+
if self._hash is not None:
|
| 79 |
+
return self._hash
|
| 80 |
+
|
| 81 |
+
self._hash = hash(self._equal)
|
| 82 |
+
return self._hash
|
| 83 |
+
|
| 84 |
+
@property
|
| 85 |
+
def project_name(self) -> NormalizedName:
|
| 86 |
+
assert self._ireq.req, "Specifier-backed ireq is always PEP 508"
|
| 87 |
+
return canonicalize_name(self._ireq.req.name)
|
| 88 |
+
|
| 89 |
+
@property
|
| 90 |
+
def name(self) -> str:
|
| 91 |
+
return format_name(self.project_name, self._extras)
|
| 92 |
+
|
| 93 |
+
def format_for_error(self) -> str:
|
| 94 |
+
# Convert comma-separated specifiers into "A, B, ..., F and G"
|
| 95 |
+
# This makes the specifier a bit more "human readable", without
|
| 96 |
+
# risking a change in meaning. (Hopefully! Not all edge cases have
|
| 97 |
+
# been checked)
|
| 98 |
+
parts = [s.strip() for s in str(self).split(",")]
|
| 99 |
+
if len(parts) == 0:
|
| 100 |
+
return ""
|
| 101 |
+
elif len(parts) == 1:
|
| 102 |
+
return parts[0]
|
| 103 |
+
|
| 104 |
+
return ", ".join(parts[:-1]) + " and " + parts[-1]
|
| 105 |
+
|
| 106 |
+
def get_candidate_lookup(self) -> CandidateLookup:
|
| 107 |
+
return None, self._ireq
|
| 108 |
+
|
| 109 |
+
def is_satisfied_by(self, candidate: Candidate) -> bool:
|
| 110 |
+
assert candidate.name == self.name, (
|
| 111 |
+
f"Internal issue: Candidate is not for this requirement "
|
| 112 |
+
f"{candidate.name} vs {self.name}"
|
| 113 |
+
)
|
| 114 |
+
# We can safely always allow prereleases here since PackageFinder
|
| 115 |
+
# already implements the prerelease logic, and would have filtered out
|
| 116 |
+
# prerelease candidates if the user does not expect them.
|
| 117 |
+
assert self._ireq.req, "Specifier-backed ireq is always PEP 508"
|
| 118 |
+
spec = self._ireq.req.specifier
|
| 119 |
+
return spec.contains(candidate.version, prereleases=True)
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
class SpecifierWithoutExtrasRequirement(SpecifierRequirement):
|
| 123 |
+
"""
|
| 124 |
+
Requirement backed by an install requirement on a base package.
|
| 125 |
+
Trims extras from its install requirement if there are any.
|
| 126 |
+
"""
|
| 127 |
+
|
| 128 |
+
def __init__(self, ireq: InstallRequirement) -> None:
|
| 129 |
+
assert ireq.link is None, "This is a link, not a specifier"
|
| 130 |
+
self._ireq = install_req_drop_extras(ireq)
|
| 131 |
+
self._equal_cache: Optional[str] = None
|
| 132 |
+
self._hash: Optional[int] = None
|
| 133 |
+
self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras)
|
| 134 |
+
|
| 135 |
+
@property
|
| 136 |
+
def _equal(self) -> str:
|
| 137 |
+
if self._equal_cache is not None:
|
| 138 |
+
return self._equal_cache
|
| 139 |
+
|
| 140 |
+
self._equal_cache = str(self._ireq)
|
| 141 |
+
return self._equal_cache
|
| 142 |
+
|
| 143 |
+
def __eq__(self, other: object) -> bool:
|
| 144 |
+
if not isinstance(other, SpecifierWithoutExtrasRequirement):
|
| 145 |
+
return NotImplemented
|
| 146 |
+
return self._equal == other._equal
|
| 147 |
+
|
| 148 |
+
def __hash__(self) -> int:
|
| 149 |
+
if self._hash is not None:
|
| 150 |
+
return self._hash
|
| 151 |
+
|
| 152 |
+
self._hash = hash(self._equal)
|
| 153 |
+
return self._hash
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
class RequiresPythonRequirement(Requirement):
|
| 157 |
+
"""A requirement representing Requires-Python metadata."""
|
| 158 |
+
|
| 159 |
+
def __init__(self, specifier: SpecifierSet, match: Candidate) -> None:
|
| 160 |
+
self.specifier = specifier
|
| 161 |
+
self._specifier_string = str(specifier) # for faster __eq__
|
| 162 |
+
self._hash: Optional[int] = None
|
| 163 |
+
self._candidate = match
|
| 164 |
+
|
| 165 |
+
def __str__(self) -> str:
|
| 166 |
+
return f"Python {self.specifier}"
|
| 167 |
+
|
| 168 |
+
def __repr__(self) -> str:
|
| 169 |
+
return f"{self.__class__.__name__}({str(self.specifier)!r})"
|
| 170 |
+
|
| 171 |
+
def __hash__(self) -> int:
|
| 172 |
+
if self._hash is not None:
|
| 173 |
+
return self._hash
|
| 174 |
+
|
| 175 |
+
self._hash = hash((self._specifier_string, self._candidate))
|
| 176 |
+
return self._hash
|
| 177 |
+
|
| 178 |
+
def __eq__(self, other: Any) -> bool:
|
| 179 |
+
if not isinstance(other, RequiresPythonRequirement):
|
| 180 |
+
return False
|
| 181 |
+
return (
|
| 182 |
+
self._specifier_string == other._specifier_string
|
| 183 |
+
and self._candidate == other._candidate
|
| 184 |
+
)
|
| 185 |
+
|
| 186 |
+
@property
|
| 187 |
+
def project_name(self) -> NormalizedName:
|
| 188 |
+
return self._candidate.project_name
|
| 189 |
+
|
| 190 |
+
@property
|
| 191 |
+
def name(self) -> str:
|
| 192 |
+
return self._candidate.name
|
| 193 |
+
|
| 194 |
+
def format_for_error(self) -> str:
|
| 195 |
+
return str(self)
|
| 196 |
+
|
| 197 |
+
def get_candidate_lookup(self) -> CandidateLookup:
|
| 198 |
+
if self.specifier.contains(self._candidate.version, prereleases=True):
|
| 199 |
+
return self._candidate, None
|
| 200 |
+
return None, None
|
| 201 |
+
|
| 202 |
+
def is_satisfied_by(self, candidate: Candidate) -> bool:
|
| 203 |
+
assert candidate.name == self._candidate.name, "Not Python candidate"
|
| 204 |
+
# We can safely always allow prereleases here since PackageFinder
|
| 205 |
+
# already implements the prerelease logic, and would have filtered out
|
| 206 |
+
# prerelease candidates if the user does not expect them.
|
| 207 |
+
return self.specifier.contains(candidate.version, prereleases=True)
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
class UnsatisfiableRequirement(Requirement):
|
| 211 |
+
"""A requirement that cannot be satisfied."""
|
| 212 |
+
|
| 213 |
+
def __init__(self, name: NormalizedName) -> None:
|
| 214 |
+
self._name = name
|
| 215 |
+
|
| 216 |
+
def __str__(self) -> str:
|
| 217 |
+
return f"{self._name} (unavailable)"
|
| 218 |
+
|
| 219 |
+
def __repr__(self) -> str:
|
| 220 |
+
return f"{self.__class__.__name__}({str(self._name)!r})"
|
| 221 |
+
|
| 222 |
+
def __eq__(self, other: object) -> bool:
|
| 223 |
+
if not isinstance(other, UnsatisfiableRequirement):
|
| 224 |
+
return NotImplemented
|
| 225 |
+
return self._name == other._name
|
| 226 |
+
|
| 227 |
+
def __hash__(self) -> int:
|
| 228 |
+
return hash(self._name)
|
| 229 |
+
|
| 230 |
+
@property
|
| 231 |
+
def project_name(self) -> NormalizedName:
|
| 232 |
+
return self._name
|
| 233 |
+
|
| 234 |
+
@property
|
| 235 |
+
def name(self) -> str:
|
| 236 |
+
return self._name
|
| 237 |
+
|
| 238 |
+
def format_for_error(self) -> str:
|
| 239 |
+
return str(self)
|
| 240 |
+
|
| 241 |
+
def get_candidate_lookup(self) -> CandidateLookup:
|
| 242 |
+
return None, None
|
| 243 |
+
|
| 244 |
+
def is_satisfied_by(self, candidate: Candidate) -> bool:
|
| 245 |
+
return False
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/resolver.py
ADDED
|
@@ -0,0 +1,317 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import functools
|
| 3 |
+
import logging
|
| 4 |
+
import os
|
| 5 |
+
from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, cast
|
| 6 |
+
|
| 7 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 8 |
+
from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible
|
| 9 |
+
from pip._vendor.resolvelib import Resolver as RLResolver
|
| 10 |
+
from pip._vendor.resolvelib.structs import DirectedGraph
|
| 11 |
+
|
| 12 |
+
from pip._internal.cache import WheelCache
|
| 13 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 14 |
+
from pip._internal.operations.prepare import RequirementPreparer
|
| 15 |
+
from pip._internal.req.constructors import install_req_extend_extras
|
| 16 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 17 |
+
from pip._internal.req.req_set import RequirementSet
|
| 18 |
+
from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
|
| 19 |
+
from pip._internal.resolution.resolvelib.provider import PipProvider
|
| 20 |
+
from pip._internal.resolution.resolvelib.reporter import (
|
| 21 |
+
PipDebuggingReporter,
|
| 22 |
+
PipReporter,
|
| 23 |
+
)
|
| 24 |
+
from pip._internal.utils.packaging import get_requirement
|
| 25 |
+
|
| 26 |
+
from .base import Candidate, Requirement
|
| 27 |
+
from .factory import Factory
|
| 28 |
+
|
| 29 |
+
if TYPE_CHECKING:
|
| 30 |
+
from pip._vendor.resolvelib.resolvers import Result as RLResult
|
| 31 |
+
|
| 32 |
+
Result = RLResult[Requirement, Candidate, str]
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
logger = logging.getLogger(__name__)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class Resolver(BaseResolver):
|
| 39 |
+
_allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
|
| 40 |
+
|
| 41 |
+
def __init__(
|
| 42 |
+
self,
|
| 43 |
+
preparer: RequirementPreparer,
|
| 44 |
+
finder: PackageFinder,
|
| 45 |
+
wheel_cache: Optional[WheelCache],
|
| 46 |
+
make_install_req: InstallRequirementProvider,
|
| 47 |
+
use_user_site: bool,
|
| 48 |
+
ignore_dependencies: bool,
|
| 49 |
+
ignore_installed: bool,
|
| 50 |
+
ignore_requires_python: bool,
|
| 51 |
+
force_reinstall: bool,
|
| 52 |
+
upgrade_strategy: str,
|
| 53 |
+
py_version_info: Optional[Tuple[int, ...]] = None,
|
| 54 |
+
):
|
| 55 |
+
super().__init__()
|
| 56 |
+
assert upgrade_strategy in self._allowed_strategies
|
| 57 |
+
|
| 58 |
+
self.factory = Factory(
|
| 59 |
+
finder=finder,
|
| 60 |
+
preparer=preparer,
|
| 61 |
+
make_install_req=make_install_req,
|
| 62 |
+
wheel_cache=wheel_cache,
|
| 63 |
+
use_user_site=use_user_site,
|
| 64 |
+
force_reinstall=force_reinstall,
|
| 65 |
+
ignore_installed=ignore_installed,
|
| 66 |
+
ignore_requires_python=ignore_requires_python,
|
| 67 |
+
py_version_info=py_version_info,
|
| 68 |
+
)
|
| 69 |
+
self.ignore_dependencies = ignore_dependencies
|
| 70 |
+
self.upgrade_strategy = upgrade_strategy
|
| 71 |
+
self._result: Optional[Result] = None
|
| 72 |
+
|
| 73 |
+
def resolve(
|
| 74 |
+
self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
|
| 75 |
+
) -> RequirementSet:
|
| 76 |
+
collected = self.factory.collect_root_requirements(root_reqs)
|
| 77 |
+
provider = PipProvider(
|
| 78 |
+
factory=self.factory,
|
| 79 |
+
constraints=collected.constraints,
|
| 80 |
+
ignore_dependencies=self.ignore_dependencies,
|
| 81 |
+
upgrade_strategy=self.upgrade_strategy,
|
| 82 |
+
user_requested=collected.user_requested,
|
| 83 |
+
)
|
| 84 |
+
if "PIP_RESOLVER_DEBUG" in os.environ:
|
| 85 |
+
reporter: BaseReporter = PipDebuggingReporter()
|
| 86 |
+
else:
|
| 87 |
+
reporter = PipReporter()
|
| 88 |
+
resolver: RLResolver[Requirement, Candidate, str] = RLResolver(
|
| 89 |
+
provider,
|
| 90 |
+
reporter,
|
| 91 |
+
)
|
| 92 |
+
|
| 93 |
+
try:
|
| 94 |
+
limit_how_complex_resolution_can_be = 200000
|
| 95 |
+
result = self._result = resolver.resolve(
|
| 96 |
+
collected.requirements, max_rounds=limit_how_complex_resolution_can_be
|
| 97 |
+
)
|
| 98 |
+
|
| 99 |
+
except ResolutionImpossible as e:
|
| 100 |
+
error = self.factory.get_installation_error(
|
| 101 |
+
cast("ResolutionImpossible[Requirement, Candidate]", e),
|
| 102 |
+
collected.constraints,
|
| 103 |
+
)
|
| 104 |
+
raise error from e
|
| 105 |
+
|
| 106 |
+
req_set = RequirementSet(check_supported_wheels=check_supported_wheels)
|
| 107 |
+
# process candidates with extras last to ensure their base equivalent is
|
| 108 |
+
# already in the req_set if appropriate.
|
| 109 |
+
# Python's sort is stable so using a binary key function keeps relative order
|
| 110 |
+
# within both subsets.
|
| 111 |
+
for candidate in sorted(
|
| 112 |
+
result.mapping.values(), key=lambda c: c.name != c.project_name
|
| 113 |
+
):
|
| 114 |
+
ireq = candidate.get_install_requirement()
|
| 115 |
+
if ireq is None:
|
| 116 |
+
if candidate.name != candidate.project_name:
|
| 117 |
+
# extend existing req's extras
|
| 118 |
+
with contextlib.suppress(KeyError):
|
| 119 |
+
req = req_set.get_requirement(candidate.project_name)
|
| 120 |
+
req_set.add_named_requirement(
|
| 121 |
+
install_req_extend_extras(
|
| 122 |
+
req, get_requirement(candidate.name).extras
|
| 123 |
+
)
|
| 124 |
+
)
|
| 125 |
+
continue
|
| 126 |
+
|
| 127 |
+
# Check if there is already an installation under the same name,
|
| 128 |
+
# and set a flag for later stages to uninstall it, if needed.
|
| 129 |
+
installed_dist = self.factory.get_dist_to_uninstall(candidate)
|
| 130 |
+
if installed_dist is None:
|
| 131 |
+
# There is no existing installation -- nothing to uninstall.
|
| 132 |
+
ireq.should_reinstall = False
|
| 133 |
+
elif self.factory.force_reinstall:
|
| 134 |
+
# The --force-reinstall flag is set -- reinstall.
|
| 135 |
+
ireq.should_reinstall = True
|
| 136 |
+
elif installed_dist.version != candidate.version:
|
| 137 |
+
# The installation is different in version -- reinstall.
|
| 138 |
+
ireq.should_reinstall = True
|
| 139 |
+
elif candidate.is_editable or installed_dist.editable:
|
| 140 |
+
# The incoming distribution is editable, or different in
|
| 141 |
+
# editable-ness to installation -- reinstall.
|
| 142 |
+
ireq.should_reinstall = True
|
| 143 |
+
elif candidate.source_link and candidate.source_link.is_file:
|
| 144 |
+
# The incoming distribution is under file://
|
| 145 |
+
if candidate.source_link.is_wheel:
|
| 146 |
+
# is a local wheel -- do nothing.
|
| 147 |
+
logger.info(
|
| 148 |
+
"%s is already installed with the same version as the "
|
| 149 |
+
"provided wheel. Use --force-reinstall to force an "
|
| 150 |
+
"installation of the wheel.",
|
| 151 |
+
ireq.name,
|
| 152 |
+
)
|
| 153 |
+
continue
|
| 154 |
+
|
| 155 |
+
# is a local sdist or path -- reinstall
|
| 156 |
+
ireq.should_reinstall = True
|
| 157 |
+
else:
|
| 158 |
+
continue
|
| 159 |
+
|
| 160 |
+
link = candidate.source_link
|
| 161 |
+
if link and link.is_yanked:
|
| 162 |
+
# The reason can contain non-ASCII characters, Unicode
|
| 163 |
+
# is required for Python 2.
|
| 164 |
+
msg = (
|
| 165 |
+
"The candidate selected for download or install is a "
|
| 166 |
+
"yanked version: {name!r} candidate (version {version} "
|
| 167 |
+
"at {link})\nReason for being yanked: {reason}"
|
| 168 |
+
).format(
|
| 169 |
+
name=candidate.name,
|
| 170 |
+
version=candidate.version,
|
| 171 |
+
link=link,
|
| 172 |
+
reason=link.yanked_reason or "<none given>",
|
| 173 |
+
)
|
| 174 |
+
logger.warning(msg)
|
| 175 |
+
|
| 176 |
+
req_set.add_named_requirement(ireq)
|
| 177 |
+
|
| 178 |
+
reqs = req_set.all_requirements
|
| 179 |
+
self.factory.preparer.prepare_linked_requirements_more(reqs)
|
| 180 |
+
for req in reqs:
|
| 181 |
+
req.prepared = True
|
| 182 |
+
req.needs_more_preparation = False
|
| 183 |
+
return req_set
|
| 184 |
+
|
| 185 |
+
def get_installation_order(
|
| 186 |
+
self, req_set: RequirementSet
|
| 187 |
+
) -> List[InstallRequirement]:
|
| 188 |
+
"""Get order for installation of requirements in RequirementSet.
|
| 189 |
+
|
| 190 |
+
The returned list contains a requirement before another that depends on
|
| 191 |
+
it. This helps ensure that the environment is kept consistent as they
|
| 192 |
+
get installed one-by-one.
|
| 193 |
+
|
| 194 |
+
The current implementation creates a topological ordering of the
|
| 195 |
+
dependency graph, giving more weight to packages with less
|
| 196 |
+
or no dependencies, while breaking any cycles in the graph at
|
| 197 |
+
arbitrary points. We make no guarantees about where the cycle
|
| 198 |
+
would be broken, other than it *would* be broken.
|
| 199 |
+
"""
|
| 200 |
+
assert self._result is not None, "must call resolve() first"
|
| 201 |
+
|
| 202 |
+
if not req_set.requirements:
|
| 203 |
+
# Nothing is left to install, so we do not need an order.
|
| 204 |
+
return []
|
| 205 |
+
|
| 206 |
+
graph = self._result.graph
|
| 207 |
+
weights = get_topological_weights(graph, set(req_set.requirements.keys()))
|
| 208 |
+
|
| 209 |
+
sorted_items = sorted(
|
| 210 |
+
req_set.requirements.items(),
|
| 211 |
+
key=functools.partial(_req_set_item_sorter, weights=weights),
|
| 212 |
+
reverse=True,
|
| 213 |
+
)
|
| 214 |
+
return [ireq for _, ireq in sorted_items]
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
def get_topological_weights(
|
| 218 |
+
graph: "DirectedGraph[Optional[str]]", requirement_keys: Set[str]
|
| 219 |
+
) -> Dict[Optional[str], int]:
|
| 220 |
+
"""Assign weights to each node based on how "deep" they are.
|
| 221 |
+
|
| 222 |
+
This implementation may change at any point in the future without prior
|
| 223 |
+
notice.
|
| 224 |
+
|
| 225 |
+
We first simplify the dependency graph by pruning any leaves and giving them
|
| 226 |
+
the highest weight: a package without any dependencies should be installed
|
| 227 |
+
first. This is done again and again in the same way, giving ever less weight
|
| 228 |
+
to the newly found leaves. The loop stops when no leaves are left: all
|
| 229 |
+
remaining packages have at least one dependency left in the graph.
|
| 230 |
+
|
| 231 |
+
Then we continue with the remaining graph, by taking the length for the
|
| 232 |
+
longest path to any node from root, ignoring any paths that contain a single
|
| 233 |
+
node twice (i.e. cycles). This is done through a depth-first search through
|
| 234 |
+
the graph, while keeping track of the path to the node.
|
| 235 |
+
|
| 236 |
+
Cycles in the graph result would result in node being revisited while also
|
| 237 |
+
being on its own path. In this case, take no action. This helps ensure we
|
| 238 |
+
don't get stuck in a cycle.
|
| 239 |
+
|
| 240 |
+
When assigning weight, the longer path (i.e. larger length) is preferred.
|
| 241 |
+
|
| 242 |
+
We are only interested in the weights of packages that are in the
|
| 243 |
+
requirement_keys.
|
| 244 |
+
"""
|
| 245 |
+
path: Set[Optional[str]] = set()
|
| 246 |
+
weights: Dict[Optional[str], int] = {}
|
| 247 |
+
|
| 248 |
+
def visit(node: Optional[str]) -> None:
|
| 249 |
+
if node in path:
|
| 250 |
+
# We hit a cycle, so we'll break it here.
|
| 251 |
+
return
|
| 252 |
+
|
| 253 |
+
# Time to visit the children!
|
| 254 |
+
path.add(node)
|
| 255 |
+
for child in graph.iter_children(node):
|
| 256 |
+
visit(child)
|
| 257 |
+
path.remove(node)
|
| 258 |
+
|
| 259 |
+
if node not in requirement_keys:
|
| 260 |
+
return
|
| 261 |
+
|
| 262 |
+
last_known_parent_count = weights.get(node, 0)
|
| 263 |
+
weights[node] = max(last_known_parent_count, len(path))
|
| 264 |
+
|
| 265 |
+
# Simplify the graph, pruning leaves that have no dependencies.
|
| 266 |
+
# This is needed for large graphs (say over 200 packages) because the
|
| 267 |
+
# `visit` function is exponentially slower then, taking minutes.
|
| 268 |
+
# See https://github.com/pypa/pip/issues/10557
|
| 269 |
+
# We will loop until we explicitly break the loop.
|
| 270 |
+
while True:
|
| 271 |
+
leaves = set()
|
| 272 |
+
for key in graph:
|
| 273 |
+
if key is None:
|
| 274 |
+
continue
|
| 275 |
+
for _child in graph.iter_children(key):
|
| 276 |
+
# This means we have at least one child
|
| 277 |
+
break
|
| 278 |
+
else:
|
| 279 |
+
# No child.
|
| 280 |
+
leaves.add(key)
|
| 281 |
+
if not leaves:
|
| 282 |
+
# We are done simplifying.
|
| 283 |
+
break
|
| 284 |
+
# Calculate the weight for the leaves.
|
| 285 |
+
weight = len(graph) - 1
|
| 286 |
+
for leaf in leaves:
|
| 287 |
+
if leaf not in requirement_keys:
|
| 288 |
+
continue
|
| 289 |
+
weights[leaf] = weight
|
| 290 |
+
# Remove the leaves from the graph, making it simpler.
|
| 291 |
+
for leaf in leaves:
|
| 292 |
+
graph.remove(leaf)
|
| 293 |
+
|
| 294 |
+
# Visit the remaining graph.
|
| 295 |
+
# `None` is guaranteed to be the root node by resolvelib.
|
| 296 |
+
visit(None)
|
| 297 |
+
|
| 298 |
+
# Sanity check: all requirement keys should be in the weights,
|
| 299 |
+
# and no other keys should be in the weights.
|
| 300 |
+
difference = set(weights.keys()).difference(requirement_keys)
|
| 301 |
+
assert not difference, difference
|
| 302 |
+
|
| 303 |
+
return weights
|
| 304 |
+
|
| 305 |
+
|
| 306 |
+
def _req_set_item_sorter(
|
| 307 |
+
item: Tuple[str, InstallRequirement],
|
| 308 |
+
weights: Dict[Optional[str], int],
|
| 309 |
+
) -> Tuple[int, str]:
|
| 310 |
+
"""Key function used to sort install requirements for installation.
|
| 311 |
+
|
| 312 |
+
Based on the "weight" mapping calculated in ``get_installation_order()``.
|
| 313 |
+
The canonical package name is returned as the second member as a tie-
|
| 314 |
+
breaker to ensure the result is predictable, which is useful in tests.
|
| 315 |
+
"""
|
| 316 |
+
name = canonicalize_name(item[0])
|
| 317 |
+
return weights[name], name
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/self_outdated_check.py
ADDED
|
@@ -0,0 +1,252 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import datetime
|
| 2 |
+
import functools
|
| 3 |
+
import hashlib
|
| 4 |
+
import json
|
| 5 |
+
import logging
|
| 6 |
+
import optparse
|
| 7 |
+
import os.path
|
| 8 |
+
import sys
|
| 9 |
+
from dataclasses import dataclass
|
| 10 |
+
from typing import Any, Callable, Dict, Optional
|
| 11 |
+
|
| 12 |
+
from pip._vendor.packaging.version import Version
|
| 13 |
+
from pip._vendor.packaging.version import parse as parse_version
|
| 14 |
+
from pip._vendor.rich.console import Group
|
| 15 |
+
from pip._vendor.rich.markup import escape
|
| 16 |
+
from pip._vendor.rich.text import Text
|
| 17 |
+
|
| 18 |
+
from pip._internal.index.collector import LinkCollector
|
| 19 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 20 |
+
from pip._internal.metadata import get_default_environment
|
| 21 |
+
from pip._internal.models.selection_prefs import SelectionPreferences
|
| 22 |
+
from pip._internal.network.session import PipSession
|
| 23 |
+
from pip._internal.utils.compat import WINDOWS
|
| 24 |
+
from pip._internal.utils.entrypoints import (
|
| 25 |
+
get_best_invocation_for_this_pip,
|
| 26 |
+
get_best_invocation_for_this_python,
|
| 27 |
+
)
|
| 28 |
+
from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace
|
| 29 |
+
from pip._internal.utils.misc import (
|
| 30 |
+
ExternallyManagedEnvironment,
|
| 31 |
+
check_externally_managed,
|
| 32 |
+
ensure_dir,
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
_WEEK = datetime.timedelta(days=7)
|
| 36 |
+
|
| 37 |
+
logger = logging.getLogger(__name__)
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def _get_statefile_name(key: str) -> str:
|
| 41 |
+
key_bytes = key.encode()
|
| 42 |
+
name = hashlib.sha224(key_bytes).hexdigest()
|
| 43 |
+
return name
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def _convert_date(isodate: str) -> datetime.datetime:
|
| 47 |
+
"""Convert an ISO format string to a date.
|
| 48 |
+
|
| 49 |
+
Handles the format 2020-01-22T14:24:01Z (trailing Z)
|
| 50 |
+
which is not supported by older versions of fromisoformat.
|
| 51 |
+
"""
|
| 52 |
+
return datetime.datetime.fromisoformat(isodate.replace("Z", "+00:00"))
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class SelfCheckState:
|
| 56 |
+
def __init__(self, cache_dir: str) -> None:
|
| 57 |
+
self._state: Dict[str, Any] = {}
|
| 58 |
+
self._statefile_path = None
|
| 59 |
+
|
| 60 |
+
# Try to load the existing state
|
| 61 |
+
if cache_dir:
|
| 62 |
+
self._statefile_path = os.path.join(
|
| 63 |
+
cache_dir, "selfcheck", _get_statefile_name(self.key)
|
| 64 |
+
)
|
| 65 |
+
try:
|
| 66 |
+
with open(self._statefile_path, encoding="utf-8") as statefile:
|
| 67 |
+
self._state = json.load(statefile)
|
| 68 |
+
except (OSError, ValueError, KeyError):
|
| 69 |
+
# Explicitly suppressing exceptions, since we don't want to
|
| 70 |
+
# error out if the cache file is invalid.
|
| 71 |
+
pass
|
| 72 |
+
|
| 73 |
+
@property
|
| 74 |
+
def key(self) -> str:
|
| 75 |
+
return sys.prefix
|
| 76 |
+
|
| 77 |
+
def get(self, current_time: datetime.datetime) -> Optional[str]:
|
| 78 |
+
"""Check if we have a not-outdated version loaded already."""
|
| 79 |
+
if not self._state:
|
| 80 |
+
return None
|
| 81 |
+
|
| 82 |
+
if "last_check" not in self._state:
|
| 83 |
+
return None
|
| 84 |
+
|
| 85 |
+
if "pypi_version" not in self._state:
|
| 86 |
+
return None
|
| 87 |
+
|
| 88 |
+
# Determine if we need to refresh the state
|
| 89 |
+
last_check = _convert_date(self._state["last_check"])
|
| 90 |
+
time_since_last_check = current_time - last_check
|
| 91 |
+
if time_since_last_check > _WEEK:
|
| 92 |
+
return None
|
| 93 |
+
|
| 94 |
+
return self._state["pypi_version"]
|
| 95 |
+
|
| 96 |
+
def set(self, pypi_version: str, current_time: datetime.datetime) -> None:
|
| 97 |
+
# If we do not have a path to cache in, don't bother saving.
|
| 98 |
+
if not self._statefile_path:
|
| 99 |
+
return
|
| 100 |
+
|
| 101 |
+
# Check to make sure that we own the directory
|
| 102 |
+
if not check_path_owner(os.path.dirname(self._statefile_path)):
|
| 103 |
+
return
|
| 104 |
+
|
| 105 |
+
# Now that we've ensured the directory is owned by this user, we'll go
|
| 106 |
+
# ahead and make sure that all our directories are created.
|
| 107 |
+
ensure_dir(os.path.dirname(self._statefile_path))
|
| 108 |
+
|
| 109 |
+
state = {
|
| 110 |
+
# Include the key so it's easy to tell which pip wrote the
|
| 111 |
+
# file.
|
| 112 |
+
"key": self.key,
|
| 113 |
+
"last_check": current_time.isoformat(),
|
| 114 |
+
"pypi_version": pypi_version,
|
| 115 |
+
}
|
| 116 |
+
|
| 117 |
+
text = json.dumps(state, sort_keys=True, separators=(",", ":"))
|
| 118 |
+
|
| 119 |
+
with adjacent_tmp_file(self._statefile_path) as f:
|
| 120 |
+
f.write(text.encode())
|
| 121 |
+
|
| 122 |
+
try:
|
| 123 |
+
# Since we have a prefix-specific state file, we can just
|
| 124 |
+
# overwrite whatever is there, no need to check.
|
| 125 |
+
replace(f.name, self._statefile_path)
|
| 126 |
+
except OSError:
|
| 127 |
+
# Best effort.
|
| 128 |
+
pass
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
@dataclass
|
| 132 |
+
class UpgradePrompt:
|
| 133 |
+
old: str
|
| 134 |
+
new: str
|
| 135 |
+
|
| 136 |
+
def __rich__(self) -> Group:
|
| 137 |
+
if WINDOWS:
|
| 138 |
+
pip_cmd = f"{get_best_invocation_for_this_python()} -m pip"
|
| 139 |
+
else:
|
| 140 |
+
pip_cmd = get_best_invocation_for_this_pip()
|
| 141 |
+
|
| 142 |
+
notice = "[bold][[reset][blue]notice[reset][bold]][reset]"
|
| 143 |
+
return Group(
|
| 144 |
+
Text(),
|
| 145 |
+
Text.from_markup(
|
| 146 |
+
f"{notice} A new release of pip is available: "
|
| 147 |
+
f"[red]{self.old}[reset] -> [green]{self.new}[reset]"
|
| 148 |
+
),
|
| 149 |
+
Text.from_markup(
|
| 150 |
+
f"{notice} To update, run: "
|
| 151 |
+
f"[green]{escape(pip_cmd)} install --upgrade pip"
|
| 152 |
+
),
|
| 153 |
+
)
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
def was_installed_by_pip(pkg: str) -> bool:
|
| 157 |
+
"""Checks whether pkg was installed by pip
|
| 158 |
+
|
| 159 |
+
This is used not to display the upgrade message when pip is in fact
|
| 160 |
+
installed by system package manager, such as dnf on Fedora.
|
| 161 |
+
"""
|
| 162 |
+
dist = get_default_environment().get_distribution(pkg)
|
| 163 |
+
return dist is not None and "pip" == dist.installer
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
def _get_current_remote_pip_version(
|
| 167 |
+
session: PipSession, options: optparse.Values
|
| 168 |
+
) -> Optional[str]:
|
| 169 |
+
# Lets use PackageFinder to see what the latest pip version is
|
| 170 |
+
link_collector = LinkCollector.create(
|
| 171 |
+
session,
|
| 172 |
+
options=options,
|
| 173 |
+
suppress_no_index=True,
|
| 174 |
+
)
|
| 175 |
+
|
| 176 |
+
# Pass allow_yanked=False so we don't suggest upgrading to a
|
| 177 |
+
# yanked version.
|
| 178 |
+
selection_prefs = SelectionPreferences(
|
| 179 |
+
allow_yanked=False,
|
| 180 |
+
allow_all_prereleases=False, # Explicitly set to False
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
finder = PackageFinder.create(
|
| 184 |
+
link_collector=link_collector,
|
| 185 |
+
selection_prefs=selection_prefs,
|
| 186 |
+
)
|
| 187 |
+
best_candidate = finder.find_best_candidate("pip").best_candidate
|
| 188 |
+
if best_candidate is None:
|
| 189 |
+
return None
|
| 190 |
+
|
| 191 |
+
return str(best_candidate.version)
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
def _self_version_check_logic(
|
| 195 |
+
*,
|
| 196 |
+
state: SelfCheckState,
|
| 197 |
+
current_time: datetime.datetime,
|
| 198 |
+
local_version: Version,
|
| 199 |
+
get_remote_version: Callable[[], Optional[str]],
|
| 200 |
+
) -> Optional[UpgradePrompt]:
|
| 201 |
+
remote_version_str = state.get(current_time)
|
| 202 |
+
if remote_version_str is None:
|
| 203 |
+
remote_version_str = get_remote_version()
|
| 204 |
+
if remote_version_str is None:
|
| 205 |
+
logger.debug("No remote pip version found")
|
| 206 |
+
return None
|
| 207 |
+
state.set(remote_version_str, current_time)
|
| 208 |
+
|
| 209 |
+
remote_version = parse_version(remote_version_str)
|
| 210 |
+
logger.debug("Remote version of pip: %s", remote_version)
|
| 211 |
+
logger.debug("Local version of pip: %s", local_version)
|
| 212 |
+
|
| 213 |
+
pip_installed_by_pip = was_installed_by_pip("pip")
|
| 214 |
+
logger.debug("Was pip installed by pip? %s", pip_installed_by_pip)
|
| 215 |
+
if not pip_installed_by_pip:
|
| 216 |
+
return None # Only suggest upgrade if pip is installed by pip.
|
| 217 |
+
|
| 218 |
+
local_version_is_older = (
|
| 219 |
+
local_version < remote_version
|
| 220 |
+
and local_version.base_version != remote_version.base_version
|
| 221 |
+
)
|
| 222 |
+
if local_version_is_older:
|
| 223 |
+
return UpgradePrompt(old=str(local_version), new=remote_version_str)
|
| 224 |
+
|
| 225 |
+
return None
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
def pip_self_version_check(session: PipSession, options: optparse.Values) -> None:
|
| 229 |
+
"""Check for an update for pip.
|
| 230 |
+
|
| 231 |
+
Limit the frequency of checks to once per week. State is stored either in
|
| 232 |
+
the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
|
| 233 |
+
of the pip script path.
|
| 234 |
+
"""
|
| 235 |
+
installed_dist = get_default_environment().get_distribution("pip")
|
| 236 |
+
if not installed_dist:
|
| 237 |
+
return
|
| 238 |
+
try:
|
| 239 |
+
check_externally_managed()
|
| 240 |
+
except ExternallyManagedEnvironment:
|
| 241 |
+
return
|
| 242 |
+
|
| 243 |
+
upgrade_prompt = _self_version_check_logic(
|
| 244 |
+
state=SelfCheckState(cache_dir=options.cache_dir),
|
| 245 |
+
current_time=datetime.datetime.now(datetime.timezone.utc),
|
| 246 |
+
local_version=installed_dist.version,
|
| 247 |
+
get_remote_version=functools.partial(
|
| 248 |
+
_get_current_remote_pip_version, session, options
|
| 249 |
+
),
|
| 250 |
+
)
|
| 251 |
+
if upgrade_prompt is not None:
|
| 252 |
+
logger.warning("%s", upgrade_prompt, extra={"rich": True})
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/__init__.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Expose a limited set of classes and functions so callers outside of
|
| 2 |
+
# the vcs package don't need to import deeper than `pip._internal.vcs`.
|
| 3 |
+
# (The test directory may still need to import from a vcs sub-package.)
|
| 4 |
+
# Import all vcs modules to register each VCS in the VcsSupport object.
|
| 5 |
+
import pip._internal.vcs.bazaar
|
| 6 |
+
import pip._internal.vcs.git
|
| 7 |
+
import pip._internal.vcs.mercurial
|
| 8 |
+
import pip._internal.vcs.subversion # noqa: F401
|
| 9 |
+
from pip._internal.vcs.versioncontrol import ( # noqa: F401
|
| 10 |
+
RemoteNotFoundError,
|
| 11 |
+
RemoteNotValidError,
|
| 12 |
+
is_url,
|
| 13 |
+
make_vcs_requirement_url,
|
| 14 |
+
vcs,
|
| 15 |
+
)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-310.pyc
ADDED
|
Binary file (3.5 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/git.cpython-310.pyc
ADDED
|
Binary file (12.4 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-310.pyc
ADDED
|
Binary file (5.06 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-310.pyc
ADDED
|
Binary file (8.46 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-310.pyc
ADDED
|
Binary file (21 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/bazaar.py
ADDED
|
@@ -0,0 +1,112 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from typing import List, Optional, Tuple
|
| 3 |
+
|
| 4 |
+
from pip._internal.utils.misc import HiddenText, display_path
|
| 5 |
+
from pip._internal.utils.subprocess import make_command
|
| 6 |
+
from pip._internal.utils.urls import path_to_url
|
| 7 |
+
from pip._internal.vcs.versioncontrol import (
|
| 8 |
+
AuthInfo,
|
| 9 |
+
RemoteNotFoundError,
|
| 10 |
+
RevOptions,
|
| 11 |
+
VersionControl,
|
| 12 |
+
vcs,
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
logger = logging.getLogger(__name__)
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class Bazaar(VersionControl):
|
| 19 |
+
name = "bzr"
|
| 20 |
+
dirname = ".bzr"
|
| 21 |
+
repo_name = "branch"
|
| 22 |
+
schemes = (
|
| 23 |
+
"bzr+http",
|
| 24 |
+
"bzr+https",
|
| 25 |
+
"bzr+ssh",
|
| 26 |
+
"bzr+sftp",
|
| 27 |
+
"bzr+ftp",
|
| 28 |
+
"bzr+lp",
|
| 29 |
+
"bzr+file",
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
@staticmethod
|
| 33 |
+
def get_base_rev_args(rev: str) -> List[str]:
|
| 34 |
+
return ["-r", rev]
|
| 35 |
+
|
| 36 |
+
def fetch_new(
|
| 37 |
+
self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
|
| 38 |
+
) -> None:
|
| 39 |
+
rev_display = rev_options.to_display()
|
| 40 |
+
logger.info(
|
| 41 |
+
"Checking out %s%s to %s",
|
| 42 |
+
url,
|
| 43 |
+
rev_display,
|
| 44 |
+
display_path(dest),
|
| 45 |
+
)
|
| 46 |
+
if verbosity <= 0:
|
| 47 |
+
flags = ["--quiet"]
|
| 48 |
+
elif verbosity == 1:
|
| 49 |
+
flags = []
|
| 50 |
+
else:
|
| 51 |
+
flags = [f"-{'v'*verbosity}"]
|
| 52 |
+
cmd_args = make_command(
|
| 53 |
+
"checkout", "--lightweight", *flags, rev_options.to_args(), url, dest
|
| 54 |
+
)
|
| 55 |
+
self.run_command(cmd_args)
|
| 56 |
+
|
| 57 |
+
def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
|
| 58 |
+
self.run_command(make_command("switch", url), cwd=dest)
|
| 59 |
+
|
| 60 |
+
def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
|
| 61 |
+
output = self.run_command(
|
| 62 |
+
make_command("info"), show_stdout=False, stdout_only=True, cwd=dest
|
| 63 |
+
)
|
| 64 |
+
if output.startswith("Standalone "):
|
| 65 |
+
# Older versions of pip used to create standalone branches.
|
| 66 |
+
# Convert the standalone branch to a checkout by calling "bzr bind".
|
| 67 |
+
cmd_args = make_command("bind", "-q", url)
|
| 68 |
+
self.run_command(cmd_args, cwd=dest)
|
| 69 |
+
|
| 70 |
+
cmd_args = make_command("update", "-q", rev_options.to_args())
|
| 71 |
+
self.run_command(cmd_args, cwd=dest)
|
| 72 |
+
|
| 73 |
+
@classmethod
|
| 74 |
+
def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:
|
| 75 |
+
# hotfix the URL scheme after removing bzr+ from bzr+ssh:// re-add it
|
| 76 |
+
url, rev, user_pass = super().get_url_rev_and_auth(url)
|
| 77 |
+
if url.startswith("ssh://"):
|
| 78 |
+
url = "bzr+" + url
|
| 79 |
+
return url, rev, user_pass
|
| 80 |
+
|
| 81 |
+
@classmethod
|
| 82 |
+
def get_remote_url(cls, location: str) -> str:
|
| 83 |
+
urls = cls.run_command(
|
| 84 |
+
["info"], show_stdout=False, stdout_only=True, cwd=location
|
| 85 |
+
)
|
| 86 |
+
for line in urls.splitlines():
|
| 87 |
+
line = line.strip()
|
| 88 |
+
for x in ("checkout of branch: ", "parent branch: "):
|
| 89 |
+
if line.startswith(x):
|
| 90 |
+
repo = line.split(x)[1]
|
| 91 |
+
if cls._is_local_repository(repo):
|
| 92 |
+
return path_to_url(repo)
|
| 93 |
+
return repo
|
| 94 |
+
raise RemoteNotFoundError
|
| 95 |
+
|
| 96 |
+
@classmethod
|
| 97 |
+
def get_revision(cls, location: str) -> str:
|
| 98 |
+
revision = cls.run_command(
|
| 99 |
+
["revno"],
|
| 100 |
+
show_stdout=False,
|
| 101 |
+
stdout_only=True,
|
| 102 |
+
cwd=location,
|
| 103 |
+
)
|
| 104 |
+
return revision.splitlines()[-1]
|
| 105 |
+
|
| 106 |
+
@classmethod
|
| 107 |
+
def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
|
| 108 |
+
"""Always assume the versions don't match"""
|
| 109 |
+
return False
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
vcs.register(Bazaar)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/git.py
ADDED
|
@@ -0,0 +1,527 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os.path
|
| 3 |
+
import pathlib
|
| 4 |
+
import re
|
| 5 |
+
import urllib.parse
|
| 6 |
+
import urllib.request
|
| 7 |
+
from dataclasses import replace
|
| 8 |
+
from typing import List, Optional, Tuple
|
| 9 |
+
|
| 10 |
+
from pip._internal.exceptions import BadCommand, InstallationError
|
| 11 |
+
from pip._internal.utils.misc import HiddenText, display_path, hide_url
|
| 12 |
+
from pip._internal.utils.subprocess import make_command
|
| 13 |
+
from pip._internal.vcs.versioncontrol import (
|
| 14 |
+
AuthInfo,
|
| 15 |
+
RemoteNotFoundError,
|
| 16 |
+
RemoteNotValidError,
|
| 17 |
+
RevOptions,
|
| 18 |
+
VersionControl,
|
| 19 |
+
find_path_to_project_root_from_repo_root,
|
| 20 |
+
vcs,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
urlsplit = urllib.parse.urlsplit
|
| 24 |
+
urlunsplit = urllib.parse.urlunsplit
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
logger = logging.getLogger(__name__)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
GIT_VERSION_REGEX = re.compile(
|
| 31 |
+
r"^git version " # Prefix.
|
| 32 |
+
r"(\d+)" # Major.
|
| 33 |
+
r"\.(\d+)" # Dot, minor.
|
| 34 |
+
r"(?:\.(\d+))?" # Optional dot, patch.
|
| 35 |
+
r".*$" # Suffix, including any pre- and post-release segments we don't care about.
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
HASH_REGEX = re.compile("^[a-fA-F0-9]{40}$")
|
| 39 |
+
|
| 40 |
+
# SCP (Secure copy protocol) shorthand. e.g. 'git@example.com:foo/bar.git'
|
| 41 |
+
SCP_REGEX = re.compile(
|
| 42 |
+
r"""^
|
| 43 |
+
# Optional user, e.g. 'git@'
|
| 44 |
+
(\w+@)?
|
| 45 |
+
# Server, e.g. 'github.com'.
|
| 46 |
+
([^/:]+):
|
| 47 |
+
# The server-side path. e.g. 'user/project.git'. Must start with an
|
| 48 |
+
# alphanumeric character so as not to be confusable with a Windows paths
|
| 49 |
+
# like 'C:/foo/bar' or 'C:\foo\bar'.
|
| 50 |
+
(\w[^:]*)
|
| 51 |
+
$""",
|
| 52 |
+
re.VERBOSE,
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def looks_like_hash(sha: str) -> bool:
|
| 57 |
+
return bool(HASH_REGEX.match(sha))
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class Git(VersionControl):
|
| 61 |
+
name = "git"
|
| 62 |
+
dirname = ".git"
|
| 63 |
+
repo_name = "clone"
|
| 64 |
+
schemes = (
|
| 65 |
+
"git+http",
|
| 66 |
+
"git+https",
|
| 67 |
+
"git+ssh",
|
| 68 |
+
"git+git",
|
| 69 |
+
"git+file",
|
| 70 |
+
)
|
| 71 |
+
# Prevent the user's environment variables from interfering with pip:
|
| 72 |
+
# https://github.com/pypa/pip/issues/1130
|
| 73 |
+
unset_environ = ("GIT_DIR", "GIT_WORK_TREE")
|
| 74 |
+
default_arg_rev = "HEAD"
|
| 75 |
+
|
| 76 |
+
@staticmethod
|
| 77 |
+
def get_base_rev_args(rev: str) -> List[str]:
|
| 78 |
+
return [rev]
|
| 79 |
+
|
| 80 |
+
def is_immutable_rev_checkout(self, url: str, dest: str) -> bool:
|
| 81 |
+
_, rev_options = self.get_url_rev_options(hide_url(url))
|
| 82 |
+
if not rev_options.rev:
|
| 83 |
+
return False
|
| 84 |
+
if not self.is_commit_id_equal(dest, rev_options.rev):
|
| 85 |
+
# the current commit is different from rev,
|
| 86 |
+
# which means rev was something else than a commit hash
|
| 87 |
+
return False
|
| 88 |
+
# return False in the rare case rev is both a commit hash
|
| 89 |
+
# and a tag or a branch; we don't want to cache in that case
|
| 90 |
+
# because that branch/tag could point to something else in the future
|
| 91 |
+
is_tag_or_branch = bool(self.get_revision_sha(dest, rev_options.rev)[0])
|
| 92 |
+
return not is_tag_or_branch
|
| 93 |
+
|
| 94 |
+
def get_git_version(self) -> Tuple[int, ...]:
|
| 95 |
+
version = self.run_command(
|
| 96 |
+
["version"],
|
| 97 |
+
command_desc="git version",
|
| 98 |
+
show_stdout=False,
|
| 99 |
+
stdout_only=True,
|
| 100 |
+
)
|
| 101 |
+
match = GIT_VERSION_REGEX.match(version)
|
| 102 |
+
if not match:
|
| 103 |
+
logger.warning("Can't parse git version: %s", version)
|
| 104 |
+
return ()
|
| 105 |
+
return (int(match.group(1)), int(match.group(2)))
|
| 106 |
+
|
| 107 |
+
@classmethod
|
| 108 |
+
def get_current_branch(cls, location: str) -> Optional[str]:
|
| 109 |
+
"""
|
| 110 |
+
Return the current branch, or None if HEAD isn't at a branch
|
| 111 |
+
(e.g. detached HEAD).
|
| 112 |
+
"""
|
| 113 |
+
# git-symbolic-ref exits with empty stdout if "HEAD" is a detached
|
| 114 |
+
# HEAD rather than a symbolic ref. In addition, the -q causes the
|
| 115 |
+
# command to exit with status code 1 instead of 128 in this case
|
| 116 |
+
# and to suppress the message to stderr.
|
| 117 |
+
args = ["symbolic-ref", "-q", "HEAD"]
|
| 118 |
+
output = cls.run_command(
|
| 119 |
+
args,
|
| 120 |
+
extra_ok_returncodes=(1,),
|
| 121 |
+
show_stdout=False,
|
| 122 |
+
stdout_only=True,
|
| 123 |
+
cwd=location,
|
| 124 |
+
)
|
| 125 |
+
ref = output.strip()
|
| 126 |
+
|
| 127 |
+
if ref.startswith("refs/heads/"):
|
| 128 |
+
return ref[len("refs/heads/") :]
|
| 129 |
+
|
| 130 |
+
return None
|
| 131 |
+
|
| 132 |
+
@classmethod
|
| 133 |
+
def get_revision_sha(cls, dest: str, rev: str) -> Tuple[Optional[str], bool]:
|
| 134 |
+
"""
|
| 135 |
+
Return (sha_or_none, is_branch), where sha_or_none is a commit hash
|
| 136 |
+
if the revision names a remote branch or tag, otherwise None.
|
| 137 |
+
|
| 138 |
+
Args:
|
| 139 |
+
dest: the repository directory.
|
| 140 |
+
rev: the revision name.
|
| 141 |
+
"""
|
| 142 |
+
# Pass rev to pre-filter the list.
|
| 143 |
+
output = cls.run_command(
|
| 144 |
+
["show-ref", rev],
|
| 145 |
+
cwd=dest,
|
| 146 |
+
show_stdout=False,
|
| 147 |
+
stdout_only=True,
|
| 148 |
+
on_returncode="ignore",
|
| 149 |
+
)
|
| 150 |
+
refs = {}
|
| 151 |
+
# NOTE: We do not use splitlines here since that would split on other
|
| 152 |
+
# unicode separators, which can be maliciously used to install a
|
| 153 |
+
# different revision.
|
| 154 |
+
for line in output.strip().split("\n"):
|
| 155 |
+
line = line.rstrip("\r")
|
| 156 |
+
if not line:
|
| 157 |
+
continue
|
| 158 |
+
try:
|
| 159 |
+
ref_sha, ref_name = line.split(" ", maxsplit=2)
|
| 160 |
+
except ValueError:
|
| 161 |
+
# Include the offending line to simplify troubleshooting if
|
| 162 |
+
# this error ever occurs.
|
| 163 |
+
raise ValueError(f"unexpected show-ref line: {line!r}")
|
| 164 |
+
|
| 165 |
+
refs[ref_name] = ref_sha
|
| 166 |
+
|
| 167 |
+
branch_ref = f"refs/remotes/origin/{rev}"
|
| 168 |
+
tag_ref = f"refs/tags/{rev}"
|
| 169 |
+
|
| 170 |
+
sha = refs.get(branch_ref)
|
| 171 |
+
if sha is not None:
|
| 172 |
+
return (sha, True)
|
| 173 |
+
|
| 174 |
+
sha = refs.get(tag_ref)
|
| 175 |
+
|
| 176 |
+
return (sha, False)
|
| 177 |
+
|
| 178 |
+
@classmethod
|
| 179 |
+
def _should_fetch(cls, dest: str, rev: str) -> bool:
|
| 180 |
+
"""
|
| 181 |
+
Return true if rev is a ref or is a commit that we don't have locally.
|
| 182 |
+
|
| 183 |
+
Branches and tags are not considered in this method because they are
|
| 184 |
+
assumed to be always available locally (which is a normal outcome of
|
| 185 |
+
``git clone`` and ``git fetch --tags``).
|
| 186 |
+
"""
|
| 187 |
+
if rev.startswith("refs/"):
|
| 188 |
+
# Always fetch remote refs.
|
| 189 |
+
return True
|
| 190 |
+
|
| 191 |
+
if not looks_like_hash(rev):
|
| 192 |
+
# Git fetch would fail with abbreviated commits.
|
| 193 |
+
return False
|
| 194 |
+
|
| 195 |
+
if cls.has_commit(dest, rev):
|
| 196 |
+
# Don't fetch if we have the commit locally.
|
| 197 |
+
return False
|
| 198 |
+
|
| 199 |
+
return True
|
| 200 |
+
|
| 201 |
+
@classmethod
|
| 202 |
+
def resolve_revision(
|
| 203 |
+
cls, dest: str, url: HiddenText, rev_options: RevOptions
|
| 204 |
+
) -> RevOptions:
|
| 205 |
+
"""
|
| 206 |
+
Resolve a revision to a new RevOptions object with the SHA1 of the
|
| 207 |
+
branch, tag, or ref if found.
|
| 208 |
+
|
| 209 |
+
Args:
|
| 210 |
+
rev_options: a RevOptions object.
|
| 211 |
+
"""
|
| 212 |
+
rev = rev_options.arg_rev
|
| 213 |
+
# The arg_rev property's implementation for Git ensures that the
|
| 214 |
+
# rev return value is always non-None.
|
| 215 |
+
assert rev is not None
|
| 216 |
+
|
| 217 |
+
sha, is_branch = cls.get_revision_sha(dest, rev)
|
| 218 |
+
|
| 219 |
+
if sha is not None:
|
| 220 |
+
rev_options = rev_options.make_new(sha)
|
| 221 |
+
rev_options = replace(rev_options, branch_name=(rev if is_branch else None))
|
| 222 |
+
|
| 223 |
+
return rev_options
|
| 224 |
+
|
| 225 |
+
# Do not show a warning for the common case of something that has
|
| 226 |
+
# the form of a Git commit hash.
|
| 227 |
+
if not looks_like_hash(rev):
|
| 228 |
+
logger.warning(
|
| 229 |
+
"Did not find branch or tag '%s', assuming revision or ref.",
|
| 230 |
+
rev,
|
| 231 |
+
)
|
| 232 |
+
|
| 233 |
+
if not cls._should_fetch(dest, rev):
|
| 234 |
+
return rev_options
|
| 235 |
+
|
| 236 |
+
# fetch the requested revision
|
| 237 |
+
cls.run_command(
|
| 238 |
+
make_command("fetch", "-q", url, rev_options.to_args()),
|
| 239 |
+
cwd=dest,
|
| 240 |
+
)
|
| 241 |
+
# Change the revision to the SHA of the ref we fetched
|
| 242 |
+
sha = cls.get_revision(dest, rev="FETCH_HEAD")
|
| 243 |
+
rev_options = rev_options.make_new(sha)
|
| 244 |
+
|
| 245 |
+
return rev_options
|
| 246 |
+
|
| 247 |
+
@classmethod
|
| 248 |
+
def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
|
| 249 |
+
"""
|
| 250 |
+
Return whether the current commit hash equals the given name.
|
| 251 |
+
|
| 252 |
+
Args:
|
| 253 |
+
dest: the repository directory.
|
| 254 |
+
name: a string name.
|
| 255 |
+
"""
|
| 256 |
+
if not name:
|
| 257 |
+
# Then avoid an unnecessary subprocess call.
|
| 258 |
+
return False
|
| 259 |
+
|
| 260 |
+
return cls.get_revision(dest) == name
|
| 261 |
+
|
| 262 |
+
def fetch_new(
|
| 263 |
+
self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
|
| 264 |
+
) -> None:
|
| 265 |
+
rev_display = rev_options.to_display()
|
| 266 |
+
logger.info("Cloning %s%s to %s", url, rev_display, display_path(dest))
|
| 267 |
+
if verbosity <= 0:
|
| 268 |
+
flags: Tuple[str, ...] = ("--quiet",)
|
| 269 |
+
elif verbosity == 1:
|
| 270 |
+
flags = ()
|
| 271 |
+
else:
|
| 272 |
+
flags = ("--verbose", "--progress")
|
| 273 |
+
if self.get_git_version() >= (2, 17):
|
| 274 |
+
# Git added support for partial clone in 2.17
|
| 275 |
+
# https://git-scm.com/docs/partial-clone
|
| 276 |
+
# Speeds up cloning by functioning without a complete copy of repository
|
| 277 |
+
self.run_command(
|
| 278 |
+
make_command(
|
| 279 |
+
"clone",
|
| 280 |
+
"--filter=blob:none",
|
| 281 |
+
*flags,
|
| 282 |
+
url,
|
| 283 |
+
dest,
|
| 284 |
+
)
|
| 285 |
+
)
|
| 286 |
+
else:
|
| 287 |
+
self.run_command(make_command("clone", *flags, url, dest))
|
| 288 |
+
|
| 289 |
+
if rev_options.rev:
|
| 290 |
+
# Then a specific revision was requested.
|
| 291 |
+
rev_options = self.resolve_revision(dest, url, rev_options)
|
| 292 |
+
branch_name = getattr(rev_options, "branch_name", None)
|
| 293 |
+
logger.debug("Rev options %s, branch_name %s", rev_options, branch_name)
|
| 294 |
+
if branch_name is None:
|
| 295 |
+
# Only do a checkout if the current commit id doesn't match
|
| 296 |
+
# the requested revision.
|
| 297 |
+
if not self.is_commit_id_equal(dest, rev_options.rev):
|
| 298 |
+
cmd_args = make_command(
|
| 299 |
+
"checkout",
|
| 300 |
+
"-q",
|
| 301 |
+
rev_options.to_args(),
|
| 302 |
+
)
|
| 303 |
+
self.run_command(cmd_args, cwd=dest)
|
| 304 |
+
elif self.get_current_branch(dest) != branch_name:
|
| 305 |
+
# Then a specific branch was requested, and that branch
|
| 306 |
+
# is not yet checked out.
|
| 307 |
+
track_branch = f"origin/{branch_name}"
|
| 308 |
+
cmd_args = [
|
| 309 |
+
"checkout",
|
| 310 |
+
"-b",
|
| 311 |
+
branch_name,
|
| 312 |
+
"--track",
|
| 313 |
+
track_branch,
|
| 314 |
+
]
|
| 315 |
+
self.run_command(cmd_args, cwd=dest)
|
| 316 |
+
else:
|
| 317 |
+
sha = self.get_revision(dest)
|
| 318 |
+
rev_options = rev_options.make_new(sha)
|
| 319 |
+
|
| 320 |
+
logger.info("Resolved %s to commit %s", url, rev_options.rev)
|
| 321 |
+
|
| 322 |
+
#: repo may contain submodules
|
| 323 |
+
self.update_submodules(dest)
|
| 324 |
+
|
| 325 |
+
def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
|
| 326 |
+
self.run_command(
|
| 327 |
+
make_command("config", "remote.origin.url", url),
|
| 328 |
+
cwd=dest,
|
| 329 |
+
)
|
| 330 |
+
cmd_args = make_command("checkout", "-q", rev_options.to_args())
|
| 331 |
+
self.run_command(cmd_args, cwd=dest)
|
| 332 |
+
|
| 333 |
+
self.update_submodules(dest)
|
| 334 |
+
|
| 335 |
+
def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
|
| 336 |
+
# First fetch changes from the default remote
|
| 337 |
+
if self.get_git_version() >= (1, 9):
|
| 338 |
+
# fetch tags in addition to everything else
|
| 339 |
+
self.run_command(["fetch", "-q", "--tags"], cwd=dest)
|
| 340 |
+
else:
|
| 341 |
+
self.run_command(["fetch", "-q"], cwd=dest)
|
| 342 |
+
# Then reset to wanted revision (maybe even origin/master)
|
| 343 |
+
rev_options = self.resolve_revision(dest, url, rev_options)
|
| 344 |
+
cmd_args = make_command("reset", "--hard", "-q", rev_options.to_args())
|
| 345 |
+
self.run_command(cmd_args, cwd=dest)
|
| 346 |
+
#: update submodules
|
| 347 |
+
self.update_submodules(dest)
|
| 348 |
+
|
| 349 |
+
@classmethod
|
| 350 |
+
def get_remote_url(cls, location: str) -> str:
|
| 351 |
+
"""
|
| 352 |
+
Return URL of the first remote encountered.
|
| 353 |
+
|
| 354 |
+
Raises RemoteNotFoundError if the repository does not have a remote
|
| 355 |
+
url configured.
|
| 356 |
+
"""
|
| 357 |
+
# We need to pass 1 for extra_ok_returncodes since the command
|
| 358 |
+
# exits with return code 1 if there are no matching lines.
|
| 359 |
+
stdout = cls.run_command(
|
| 360 |
+
["config", "--get-regexp", r"remote\..*\.url"],
|
| 361 |
+
extra_ok_returncodes=(1,),
|
| 362 |
+
show_stdout=False,
|
| 363 |
+
stdout_only=True,
|
| 364 |
+
cwd=location,
|
| 365 |
+
)
|
| 366 |
+
remotes = stdout.splitlines()
|
| 367 |
+
try:
|
| 368 |
+
found_remote = remotes[0]
|
| 369 |
+
except IndexError:
|
| 370 |
+
raise RemoteNotFoundError
|
| 371 |
+
|
| 372 |
+
for remote in remotes:
|
| 373 |
+
if remote.startswith("remote.origin.url "):
|
| 374 |
+
found_remote = remote
|
| 375 |
+
break
|
| 376 |
+
url = found_remote.split(" ")[1]
|
| 377 |
+
return cls._git_remote_to_pip_url(url.strip())
|
| 378 |
+
|
| 379 |
+
@staticmethod
|
| 380 |
+
def _git_remote_to_pip_url(url: str) -> str:
|
| 381 |
+
"""
|
| 382 |
+
Convert a remote url from what git uses to what pip accepts.
|
| 383 |
+
|
| 384 |
+
There are 3 legal forms **url** may take:
|
| 385 |
+
|
| 386 |
+
1. A fully qualified url: ssh://git@example.com/foo/bar.git
|
| 387 |
+
2. A local project.git folder: /path/to/bare/repository.git
|
| 388 |
+
3. SCP shorthand for form 1: git@example.com:foo/bar.git
|
| 389 |
+
|
| 390 |
+
Form 1 is output as-is. Form 2 must be converted to URI and form 3 must
|
| 391 |
+
be converted to form 1.
|
| 392 |
+
|
| 393 |
+
See the corresponding test test_git_remote_url_to_pip() for examples of
|
| 394 |
+
sample inputs/outputs.
|
| 395 |
+
"""
|
| 396 |
+
if re.match(r"\w+://", url):
|
| 397 |
+
# This is already valid. Pass it though as-is.
|
| 398 |
+
return url
|
| 399 |
+
if os.path.exists(url):
|
| 400 |
+
# A local bare remote (git clone --mirror).
|
| 401 |
+
# Needs a file:// prefix.
|
| 402 |
+
return pathlib.PurePath(url).as_uri()
|
| 403 |
+
scp_match = SCP_REGEX.match(url)
|
| 404 |
+
if scp_match:
|
| 405 |
+
# Add an ssh:// prefix and replace the ':' with a '/'.
|
| 406 |
+
return scp_match.expand(r"ssh://\1\2/\3")
|
| 407 |
+
# Otherwise, bail out.
|
| 408 |
+
raise RemoteNotValidError(url)
|
| 409 |
+
|
| 410 |
+
@classmethod
|
| 411 |
+
def has_commit(cls, location: str, rev: str) -> bool:
|
| 412 |
+
"""
|
| 413 |
+
Check if rev is a commit that is available in the local repository.
|
| 414 |
+
"""
|
| 415 |
+
try:
|
| 416 |
+
cls.run_command(
|
| 417 |
+
["rev-parse", "-q", "--verify", "sha^" + rev],
|
| 418 |
+
cwd=location,
|
| 419 |
+
log_failed_cmd=False,
|
| 420 |
+
)
|
| 421 |
+
except InstallationError:
|
| 422 |
+
return False
|
| 423 |
+
else:
|
| 424 |
+
return True
|
| 425 |
+
|
| 426 |
+
@classmethod
|
| 427 |
+
def get_revision(cls, location: str, rev: Optional[str] = None) -> str:
|
| 428 |
+
if rev is None:
|
| 429 |
+
rev = "HEAD"
|
| 430 |
+
current_rev = cls.run_command(
|
| 431 |
+
["rev-parse", rev],
|
| 432 |
+
show_stdout=False,
|
| 433 |
+
stdout_only=True,
|
| 434 |
+
cwd=location,
|
| 435 |
+
)
|
| 436 |
+
return current_rev.strip()
|
| 437 |
+
|
| 438 |
+
@classmethod
|
| 439 |
+
def get_subdirectory(cls, location: str) -> Optional[str]:
|
| 440 |
+
"""
|
| 441 |
+
Return the path to Python project root, relative to the repo root.
|
| 442 |
+
Return None if the project root is in the repo root.
|
| 443 |
+
"""
|
| 444 |
+
# find the repo root
|
| 445 |
+
git_dir = cls.run_command(
|
| 446 |
+
["rev-parse", "--git-dir"],
|
| 447 |
+
show_stdout=False,
|
| 448 |
+
stdout_only=True,
|
| 449 |
+
cwd=location,
|
| 450 |
+
).strip()
|
| 451 |
+
if not os.path.isabs(git_dir):
|
| 452 |
+
git_dir = os.path.join(location, git_dir)
|
| 453 |
+
repo_root = os.path.abspath(os.path.join(git_dir, ".."))
|
| 454 |
+
return find_path_to_project_root_from_repo_root(location, repo_root)
|
| 455 |
+
|
| 456 |
+
@classmethod
|
| 457 |
+
def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:
|
| 458 |
+
"""
|
| 459 |
+
Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
|
| 460 |
+
That's required because although they use SSH they sometimes don't
|
| 461 |
+
work with a ssh:// scheme (e.g. GitHub). But we need a scheme for
|
| 462 |
+
parsing. Hence we remove it again afterwards and return it as a stub.
|
| 463 |
+
"""
|
| 464 |
+
# Works around an apparent Git bug
|
| 465 |
+
# (see https://article.gmane.org/gmane.comp.version-control.git/146500)
|
| 466 |
+
scheme, netloc, path, query, fragment = urlsplit(url)
|
| 467 |
+
if scheme.endswith("file"):
|
| 468 |
+
initial_slashes = path[: -len(path.lstrip("/"))]
|
| 469 |
+
newpath = initial_slashes + urllib.request.url2pathname(path).replace(
|
| 470 |
+
"\\", "/"
|
| 471 |
+
).lstrip("/")
|
| 472 |
+
after_plus = scheme.find("+") + 1
|
| 473 |
+
url = scheme[:after_plus] + urlunsplit(
|
| 474 |
+
(scheme[after_plus:], netloc, newpath, query, fragment),
|
| 475 |
+
)
|
| 476 |
+
|
| 477 |
+
if "://" not in url:
|
| 478 |
+
assert "file:" not in url
|
| 479 |
+
url = url.replace("git+", "git+ssh://")
|
| 480 |
+
url, rev, user_pass = super().get_url_rev_and_auth(url)
|
| 481 |
+
url = url.replace("ssh://", "")
|
| 482 |
+
else:
|
| 483 |
+
url, rev, user_pass = super().get_url_rev_and_auth(url)
|
| 484 |
+
|
| 485 |
+
return url, rev, user_pass
|
| 486 |
+
|
| 487 |
+
@classmethod
|
| 488 |
+
def update_submodules(cls, location: str) -> None:
|
| 489 |
+
if not os.path.exists(os.path.join(location, ".gitmodules")):
|
| 490 |
+
return
|
| 491 |
+
cls.run_command(
|
| 492 |
+
["submodule", "update", "--init", "--recursive", "-q"],
|
| 493 |
+
cwd=location,
|
| 494 |
+
)
|
| 495 |
+
|
| 496 |
+
@classmethod
|
| 497 |
+
def get_repository_root(cls, location: str) -> Optional[str]:
|
| 498 |
+
loc = super().get_repository_root(location)
|
| 499 |
+
if loc:
|
| 500 |
+
return loc
|
| 501 |
+
try:
|
| 502 |
+
r = cls.run_command(
|
| 503 |
+
["rev-parse", "--show-toplevel"],
|
| 504 |
+
cwd=location,
|
| 505 |
+
show_stdout=False,
|
| 506 |
+
stdout_only=True,
|
| 507 |
+
on_returncode="raise",
|
| 508 |
+
log_failed_cmd=False,
|
| 509 |
+
)
|
| 510 |
+
except BadCommand:
|
| 511 |
+
logger.debug(
|
| 512 |
+
"could not determine if %s is under git control "
|
| 513 |
+
"because git is not available",
|
| 514 |
+
location,
|
| 515 |
+
)
|
| 516 |
+
return None
|
| 517 |
+
except InstallationError:
|
| 518 |
+
return None
|
| 519 |
+
return os.path.normpath(r.rstrip("\r\n"))
|
| 520 |
+
|
| 521 |
+
@staticmethod
|
| 522 |
+
def should_add_vcs_url_prefix(repo_url: str) -> bool:
|
| 523 |
+
"""In either https or ssh form, requirements must be prefixed with git+."""
|
| 524 |
+
return True
|
| 525 |
+
|
| 526 |
+
|
| 527 |
+
vcs.register(Git)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/mercurial.py
ADDED
|
@@ -0,0 +1,163 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import configparser
|
| 2 |
+
import logging
|
| 3 |
+
import os
|
| 4 |
+
from typing import List, Optional, Tuple
|
| 5 |
+
|
| 6 |
+
from pip._internal.exceptions import BadCommand, InstallationError
|
| 7 |
+
from pip._internal.utils.misc import HiddenText, display_path
|
| 8 |
+
from pip._internal.utils.subprocess import make_command
|
| 9 |
+
from pip._internal.utils.urls import path_to_url
|
| 10 |
+
from pip._internal.vcs.versioncontrol import (
|
| 11 |
+
RevOptions,
|
| 12 |
+
VersionControl,
|
| 13 |
+
find_path_to_project_root_from_repo_root,
|
| 14 |
+
vcs,
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
logger = logging.getLogger(__name__)
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class Mercurial(VersionControl):
|
| 21 |
+
name = "hg"
|
| 22 |
+
dirname = ".hg"
|
| 23 |
+
repo_name = "clone"
|
| 24 |
+
schemes = (
|
| 25 |
+
"hg+file",
|
| 26 |
+
"hg+http",
|
| 27 |
+
"hg+https",
|
| 28 |
+
"hg+ssh",
|
| 29 |
+
"hg+static-http",
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
@staticmethod
|
| 33 |
+
def get_base_rev_args(rev: str) -> List[str]:
|
| 34 |
+
return [f"--rev={rev}"]
|
| 35 |
+
|
| 36 |
+
def fetch_new(
|
| 37 |
+
self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
|
| 38 |
+
) -> None:
|
| 39 |
+
rev_display = rev_options.to_display()
|
| 40 |
+
logger.info(
|
| 41 |
+
"Cloning hg %s%s to %s",
|
| 42 |
+
url,
|
| 43 |
+
rev_display,
|
| 44 |
+
display_path(dest),
|
| 45 |
+
)
|
| 46 |
+
if verbosity <= 0:
|
| 47 |
+
flags: Tuple[str, ...] = ("--quiet",)
|
| 48 |
+
elif verbosity == 1:
|
| 49 |
+
flags = ()
|
| 50 |
+
elif verbosity == 2:
|
| 51 |
+
flags = ("--verbose",)
|
| 52 |
+
else:
|
| 53 |
+
flags = ("--verbose", "--debug")
|
| 54 |
+
self.run_command(make_command("clone", "--noupdate", *flags, url, dest))
|
| 55 |
+
self.run_command(
|
| 56 |
+
make_command("update", *flags, rev_options.to_args()),
|
| 57 |
+
cwd=dest,
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
|
| 61 |
+
repo_config = os.path.join(dest, self.dirname, "hgrc")
|
| 62 |
+
config = configparser.RawConfigParser()
|
| 63 |
+
try:
|
| 64 |
+
config.read(repo_config)
|
| 65 |
+
config.set("paths", "default", url.secret)
|
| 66 |
+
with open(repo_config, "w") as config_file:
|
| 67 |
+
config.write(config_file)
|
| 68 |
+
except (OSError, configparser.NoSectionError) as exc:
|
| 69 |
+
logger.warning("Could not switch Mercurial repository to %s: %s", url, exc)
|
| 70 |
+
else:
|
| 71 |
+
cmd_args = make_command("update", "-q", rev_options.to_args())
|
| 72 |
+
self.run_command(cmd_args, cwd=dest)
|
| 73 |
+
|
| 74 |
+
def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
|
| 75 |
+
self.run_command(["pull", "-q"], cwd=dest)
|
| 76 |
+
cmd_args = make_command("update", "-q", rev_options.to_args())
|
| 77 |
+
self.run_command(cmd_args, cwd=dest)
|
| 78 |
+
|
| 79 |
+
@classmethod
|
| 80 |
+
def get_remote_url(cls, location: str) -> str:
|
| 81 |
+
url = cls.run_command(
|
| 82 |
+
["showconfig", "paths.default"],
|
| 83 |
+
show_stdout=False,
|
| 84 |
+
stdout_only=True,
|
| 85 |
+
cwd=location,
|
| 86 |
+
).strip()
|
| 87 |
+
if cls._is_local_repository(url):
|
| 88 |
+
url = path_to_url(url)
|
| 89 |
+
return url.strip()
|
| 90 |
+
|
| 91 |
+
@classmethod
|
| 92 |
+
def get_revision(cls, location: str) -> str:
|
| 93 |
+
"""
|
| 94 |
+
Return the repository-local changeset revision number, as an integer.
|
| 95 |
+
"""
|
| 96 |
+
current_revision = cls.run_command(
|
| 97 |
+
["parents", "--template={rev}"],
|
| 98 |
+
show_stdout=False,
|
| 99 |
+
stdout_only=True,
|
| 100 |
+
cwd=location,
|
| 101 |
+
).strip()
|
| 102 |
+
return current_revision
|
| 103 |
+
|
| 104 |
+
@classmethod
|
| 105 |
+
def get_requirement_revision(cls, location: str) -> str:
|
| 106 |
+
"""
|
| 107 |
+
Return the changeset identification hash, as a 40-character
|
| 108 |
+
hexadecimal string
|
| 109 |
+
"""
|
| 110 |
+
current_rev_hash = cls.run_command(
|
| 111 |
+
["parents", "--template={node}"],
|
| 112 |
+
show_stdout=False,
|
| 113 |
+
stdout_only=True,
|
| 114 |
+
cwd=location,
|
| 115 |
+
).strip()
|
| 116 |
+
return current_rev_hash
|
| 117 |
+
|
| 118 |
+
@classmethod
|
| 119 |
+
def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
|
| 120 |
+
"""Always assume the versions don't match"""
|
| 121 |
+
return False
|
| 122 |
+
|
| 123 |
+
@classmethod
|
| 124 |
+
def get_subdirectory(cls, location: str) -> Optional[str]:
|
| 125 |
+
"""
|
| 126 |
+
Return the path to Python project root, relative to the repo root.
|
| 127 |
+
Return None if the project root is in the repo root.
|
| 128 |
+
"""
|
| 129 |
+
# find the repo root
|
| 130 |
+
repo_root = cls.run_command(
|
| 131 |
+
["root"], show_stdout=False, stdout_only=True, cwd=location
|
| 132 |
+
).strip()
|
| 133 |
+
if not os.path.isabs(repo_root):
|
| 134 |
+
repo_root = os.path.abspath(os.path.join(location, repo_root))
|
| 135 |
+
return find_path_to_project_root_from_repo_root(location, repo_root)
|
| 136 |
+
|
| 137 |
+
@classmethod
|
| 138 |
+
def get_repository_root(cls, location: str) -> Optional[str]:
|
| 139 |
+
loc = super().get_repository_root(location)
|
| 140 |
+
if loc:
|
| 141 |
+
return loc
|
| 142 |
+
try:
|
| 143 |
+
r = cls.run_command(
|
| 144 |
+
["root"],
|
| 145 |
+
cwd=location,
|
| 146 |
+
show_stdout=False,
|
| 147 |
+
stdout_only=True,
|
| 148 |
+
on_returncode="raise",
|
| 149 |
+
log_failed_cmd=False,
|
| 150 |
+
)
|
| 151 |
+
except BadCommand:
|
| 152 |
+
logger.debug(
|
| 153 |
+
"could not determine if %s is under hg control "
|
| 154 |
+
"because hg is not available",
|
| 155 |
+
location,
|
| 156 |
+
)
|
| 157 |
+
return None
|
| 158 |
+
except InstallationError:
|
| 159 |
+
return None
|
| 160 |
+
return os.path.normpath(r.rstrip("\r\n"))
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
vcs.register(Mercurial)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/subversion.py
ADDED
|
@@ -0,0 +1,324 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os
|
| 3 |
+
import re
|
| 4 |
+
from typing import List, Optional, Tuple
|
| 5 |
+
|
| 6 |
+
from pip._internal.utils.misc import (
|
| 7 |
+
HiddenText,
|
| 8 |
+
display_path,
|
| 9 |
+
is_console_interactive,
|
| 10 |
+
is_installable_dir,
|
| 11 |
+
split_auth_from_netloc,
|
| 12 |
+
)
|
| 13 |
+
from pip._internal.utils.subprocess import CommandArgs, make_command
|
| 14 |
+
from pip._internal.vcs.versioncontrol import (
|
| 15 |
+
AuthInfo,
|
| 16 |
+
RemoteNotFoundError,
|
| 17 |
+
RevOptions,
|
| 18 |
+
VersionControl,
|
| 19 |
+
vcs,
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
logger = logging.getLogger(__name__)
|
| 23 |
+
|
| 24 |
+
_svn_xml_url_re = re.compile('url="([^"]+)"')
|
| 25 |
+
_svn_rev_re = re.compile(r'committed-rev="(\d+)"')
|
| 26 |
+
_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
|
| 27 |
+
_svn_info_xml_url_re = re.compile(r"<url>(.*)</url>")
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class Subversion(VersionControl):
|
| 31 |
+
name = "svn"
|
| 32 |
+
dirname = ".svn"
|
| 33 |
+
repo_name = "checkout"
|
| 34 |
+
schemes = ("svn+ssh", "svn+http", "svn+https", "svn+svn", "svn+file")
|
| 35 |
+
|
| 36 |
+
@classmethod
|
| 37 |
+
def should_add_vcs_url_prefix(cls, remote_url: str) -> bool:
|
| 38 |
+
return True
|
| 39 |
+
|
| 40 |
+
@staticmethod
|
| 41 |
+
def get_base_rev_args(rev: str) -> List[str]:
|
| 42 |
+
return ["-r", rev]
|
| 43 |
+
|
| 44 |
+
@classmethod
|
| 45 |
+
def get_revision(cls, location: str) -> str:
|
| 46 |
+
"""
|
| 47 |
+
Return the maximum revision for all files under a given location
|
| 48 |
+
"""
|
| 49 |
+
# Note: taken from setuptools.command.egg_info
|
| 50 |
+
revision = 0
|
| 51 |
+
|
| 52 |
+
for base, dirs, _ in os.walk(location):
|
| 53 |
+
if cls.dirname not in dirs:
|
| 54 |
+
dirs[:] = []
|
| 55 |
+
continue # no sense walking uncontrolled subdirs
|
| 56 |
+
dirs.remove(cls.dirname)
|
| 57 |
+
entries_fn = os.path.join(base, cls.dirname, "entries")
|
| 58 |
+
if not os.path.exists(entries_fn):
|
| 59 |
+
# FIXME: should we warn?
|
| 60 |
+
continue
|
| 61 |
+
|
| 62 |
+
dirurl, localrev = cls._get_svn_url_rev(base)
|
| 63 |
+
|
| 64 |
+
if base == location:
|
| 65 |
+
assert dirurl is not None
|
| 66 |
+
base = dirurl + "/" # save the root url
|
| 67 |
+
elif not dirurl or not dirurl.startswith(base):
|
| 68 |
+
dirs[:] = []
|
| 69 |
+
continue # not part of the same svn tree, skip it
|
| 70 |
+
revision = max(revision, localrev)
|
| 71 |
+
return str(revision)
|
| 72 |
+
|
| 73 |
+
@classmethod
|
| 74 |
+
def get_netloc_and_auth(
|
| 75 |
+
cls, netloc: str, scheme: str
|
| 76 |
+
) -> Tuple[str, Tuple[Optional[str], Optional[str]]]:
|
| 77 |
+
"""
|
| 78 |
+
This override allows the auth information to be passed to svn via the
|
| 79 |
+
--username and --password options instead of via the URL.
|
| 80 |
+
"""
|
| 81 |
+
if scheme == "ssh":
|
| 82 |
+
# The --username and --password options can't be used for
|
| 83 |
+
# svn+ssh URLs, so keep the auth information in the URL.
|
| 84 |
+
return super().get_netloc_and_auth(netloc, scheme)
|
| 85 |
+
|
| 86 |
+
return split_auth_from_netloc(netloc)
|
| 87 |
+
|
| 88 |
+
@classmethod
|
| 89 |
+
def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:
|
| 90 |
+
# hotfix the URL scheme after removing svn+ from svn+ssh:// re-add it
|
| 91 |
+
url, rev, user_pass = super().get_url_rev_and_auth(url)
|
| 92 |
+
if url.startswith("ssh://"):
|
| 93 |
+
url = "svn+" + url
|
| 94 |
+
return url, rev, user_pass
|
| 95 |
+
|
| 96 |
+
@staticmethod
|
| 97 |
+
def make_rev_args(
|
| 98 |
+
username: Optional[str], password: Optional[HiddenText]
|
| 99 |
+
) -> CommandArgs:
|
| 100 |
+
extra_args: CommandArgs = []
|
| 101 |
+
if username:
|
| 102 |
+
extra_args += ["--username", username]
|
| 103 |
+
if password:
|
| 104 |
+
extra_args += ["--password", password]
|
| 105 |
+
|
| 106 |
+
return extra_args
|
| 107 |
+
|
| 108 |
+
@classmethod
|
| 109 |
+
def get_remote_url(cls, location: str) -> str:
|
| 110 |
+
# In cases where the source is in a subdirectory, we have to look up in
|
| 111 |
+
# the location until we find a valid project root.
|
| 112 |
+
orig_location = location
|
| 113 |
+
while not is_installable_dir(location):
|
| 114 |
+
last_location = location
|
| 115 |
+
location = os.path.dirname(location)
|
| 116 |
+
if location == last_location:
|
| 117 |
+
# We've traversed up to the root of the filesystem without
|
| 118 |
+
# finding a Python project.
|
| 119 |
+
logger.warning(
|
| 120 |
+
"Could not find Python project for directory %s (tried all "
|
| 121 |
+
"parent directories)",
|
| 122 |
+
orig_location,
|
| 123 |
+
)
|
| 124 |
+
raise RemoteNotFoundError
|
| 125 |
+
|
| 126 |
+
url, _rev = cls._get_svn_url_rev(location)
|
| 127 |
+
if url is None:
|
| 128 |
+
raise RemoteNotFoundError
|
| 129 |
+
|
| 130 |
+
return url
|
| 131 |
+
|
| 132 |
+
@classmethod
|
| 133 |
+
def _get_svn_url_rev(cls, location: str) -> Tuple[Optional[str], int]:
|
| 134 |
+
from pip._internal.exceptions import InstallationError
|
| 135 |
+
|
| 136 |
+
entries_path = os.path.join(location, cls.dirname, "entries")
|
| 137 |
+
if os.path.exists(entries_path):
|
| 138 |
+
with open(entries_path) as f:
|
| 139 |
+
data = f.read()
|
| 140 |
+
else: # subversion >= 1.7 does not have the 'entries' file
|
| 141 |
+
data = ""
|
| 142 |
+
|
| 143 |
+
url = None
|
| 144 |
+
if data.startswith("8") or data.startswith("9") or data.startswith("10"):
|
| 145 |
+
entries = list(map(str.splitlines, data.split("\n\x0c\n")))
|
| 146 |
+
del entries[0][0] # get rid of the '8'
|
| 147 |
+
url = entries[0][3]
|
| 148 |
+
revs = [int(d[9]) for d in entries if len(d) > 9 and d[9]] + [0]
|
| 149 |
+
elif data.startswith("<?xml"):
|
| 150 |
+
match = _svn_xml_url_re.search(data)
|
| 151 |
+
if not match:
|
| 152 |
+
raise ValueError(f"Badly formatted data: {data!r}")
|
| 153 |
+
url = match.group(1) # get repository URL
|
| 154 |
+
revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
|
| 155 |
+
else:
|
| 156 |
+
try:
|
| 157 |
+
# subversion >= 1.7
|
| 158 |
+
# Note that using get_remote_call_options is not necessary here
|
| 159 |
+
# because `svn info` is being run against a local directory.
|
| 160 |
+
# We don't need to worry about making sure interactive mode
|
| 161 |
+
# is being used to prompt for passwords, because passwords
|
| 162 |
+
# are only potentially needed for remote server requests.
|
| 163 |
+
xml = cls.run_command(
|
| 164 |
+
["info", "--xml", location],
|
| 165 |
+
show_stdout=False,
|
| 166 |
+
stdout_only=True,
|
| 167 |
+
)
|
| 168 |
+
match = _svn_info_xml_url_re.search(xml)
|
| 169 |
+
assert match is not None
|
| 170 |
+
url = match.group(1)
|
| 171 |
+
revs = [int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)]
|
| 172 |
+
except InstallationError:
|
| 173 |
+
url, revs = None, []
|
| 174 |
+
|
| 175 |
+
if revs:
|
| 176 |
+
rev = max(revs)
|
| 177 |
+
else:
|
| 178 |
+
rev = 0
|
| 179 |
+
|
| 180 |
+
return url, rev
|
| 181 |
+
|
| 182 |
+
@classmethod
|
| 183 |
+
def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
|
| 184 |
+
"""Always assume the versions don't match"""
|
| 185 |
+
return False
|
| 186 |
+
|
| 187 |
+
def __init__(self, use_interactive: Optional[bool] = None) -> None:
|
| 188 |
+
if use_interactive is None:
|
| 189 |
+
use_interactive = is_console_interactive()
|
| 190 |
+
self.use_interactive = use_interactive
|
| 191 |
+
|
| 192 |
+
# This member is used to cache the fetched version of the current
|
| 193 |
+
# ``svn`` client.
|
| 194 |
+
# Special value definitions:
|
| 195 |
+
# None: Not evaluated yet.
|
| 196 |
+
# Empty tuple: Could not parse version.
|
| 197 |
+
self._vcs_version: Optional[Tuple[int, ...]] = None
|
| 198 |
+
|
| 199 |
+
super().__init__()
|
| 200 |
+
|
| 201 |
+
def call_vcs_version(self) -> Tuple[int, ...]:
|
| 202 |
+
"""Query the version of the currently installed Subversion client.
|
| 203 |
+
|
| 204 |
+
:return: A tuple containing the parts of the version information or
|
| 205 |
+
``()`` if the version returned from ``svn`` could not be parsed.
|
| 206 |
+
:raises: BadCommand: If ``svn`` is not installed.
|
| 207 |
+
"""
|
| 208 |
+
# Example versions:
|
| 209 |
+
# svn, version 1.10.3 (r1842928)
|
| 210 |
+
# compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0
|
| 211 |
+
# svn, version 1.7.14 (r1542130)
|
| 212 |
+
# compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu
|
| 213 |
+
# svn, version 1.12.0-SlikSvn (SlikSvn/1.12.0)
|
| 214 |
+
# compiled May 28 2019, 13:44:56 on x86_64-microsoft-windows6.2
|
| 215 |
+
version_prefix = "svn, version "
|
| 216 |
+
version = self.run_command(["--version"], show_stdout=False, stdout_only=True)
|
| 217 |
+
if not version.startswith(version_prefix):
|
| 218 |
+
return ()
|
| 219 |
+
|
| 220 |
+
version = version[len(version_prefix) :].split()[0]
|
| 221 |
+
version_list = version.partition("-")[0].split(".")
|
| 222 |
+
try:
|
| 223 |
+
parsed_version = tuple(map(int, version_list))
|
| 224 |
+
except ValueError:
|
| 225 |
+
return ()
|
| 226 |
+
|
| 227 |
+
return parsed_version
|
| 228 |
+
|
| 229 |
+
def get_vcs_version(self) -> Tuple[int, ...]:
|
| 230 |
+
"""Return the version of the currently installed Subversion client.
|
| 231 |
+
|
| 232 |
+
If the version of the Subversion client has already been queried,
|
| 233 |
+
a cached value will be used.
|
| 234 |
+
|
| 235 |
+
:return: A tuple containing the parts of the version information or
|
| 236 |
+
``()`` if the version returned from ``svn`` could not be parsed.
|
| 237 |
+
:raises: BadCommand: If ``svn`` is not installed.
|
| 238 |
+
"""
|
| 239 |
+
if self._vcs_version is not None:
|
| 240 |
+
# Use cached version, if available.
|
| 241 |
+
# If parsing the version failed previously (empty tuple),
|
| 242 |
+
# do not attempt to parse it again.
|
| 243 |
+
return self._vcs_version
|
| 244 |
+
|
| 245 |
+
vcs_version = self.call_vcs_version()
|
| 246 |
+
self._vcs_version = vcs_version
|
| 247 |
+
return vcs_version
|
| 248 |
+
|
| 249 |
+
def get_remote_call_options(self) -> CommandArgs:
|
| 250 |
+
"""Return options to be used on calls to Subversion that contact the server.
|
| 251 |
+
|
| 252 |
+
These options are applicable for the following ``svn`` subcommands used
|
| 253 |
+
in this class.
|
| 254 |
+
|
| 255 |
+
- checkout
|
| 256 |
+
- switch
|
| 257 |
+
- update
|
| 258 |
+
|
| 259 |
+
:return: A list of command line arguments to pass to ``svn``.
|
| 260 |
+
"""
|
| 261 |
+
if not self.use_interactive:
|
| 262 |
+
# --non-interactive switch is available since Subversion 0.14.4.
|
| 263 |
+
# Subversion < 1.8 runs in interactive mode by default.
|
| 264 |
+
return ["--non-interactive"]
|
| 265 |
+
|
| 266 |
+
svn_version = self.get_vcs_version()
|
| 267 |
+
# By default, Subversion >= 1.8 runs in non-interactive mode if
|
| 268 |
+
# stdin is not a TTY. Since that is how pip invokes SVN, in
|
| 269 |
+
# call_subprocess(), pip must pass --force-interactive to ensure
|
| 270 |
+
# the user can be prompted for a password, if required.
|
| 271 |
+
# SVN added the --force-interactive option in SVN 1.8. Since
|
| 272 |
+
# e.g. RHEL/CentOS 7, which is supported until 2024, ships with
|
| 273 |
+
# SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip
|
| 274 |
+
# can't safely add the option if the SVN version is < 1.8 (or unknown).
|
| 275 |
+
if svn_version >= (1, 8):
|
| 276 |
+
return ["--force-interactive"]
|
| 277 |
+
|
| 278 |
+
return []
|
| 279 |
+
|
| 280 |
+
def fetch_new(
|
| 281 |
+
self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
|
| 282 |
+
) -> None:
|
| 283 |
+
rev_display = rev_options.to_display()
|
| 284 |
+
logger.info(
|
| 285 |
+
"Checking out %s%s to %s",
|
| 286 |
+
url,
|
| 287 |
+
rev_display,
|
| 288 |
+
display_path(dest),
|
| 289 |
+
)
|
| 290 |
+
if verbosity <= 0:
|
| 291 |
+
flags = ["--quiet"]
|
| 292 |
+
else:
|
| 293 |
+
flags = []
|
| 294 |
+
cmd_args = make_command(
|
| 295 |
+
"checkout",
|
| 296 |
+
*flags,
|
| 297 |
+
self.get_remote_call_options(),
|
| 298 |
+
rev_options.to_args(),
|
| 299 |
+
url,
|
| 300 |
+
dest,
|
| 301 |
+
)
|
| 302 |
+
self.run_command(cmd_args)
|
| 303 |
+
|
| 304 |
+
def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
|
| 305 |
+
cmd_args = make_command(
|
| 306 |
+
"switch",
|
| 307 |
+
self.get_remote_call_options(),
|
| 308 |
+
rev_options.to_args(),
|
| 309 |
+
url,
|
| 310 |
+
dest,
|
| 311 |
+
)
|
| 312 |
+
self.run_command(cmd_args)
|
| 313 |
+
|
| 314 |
+
def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
|
| 315 |
+
cmd_args = make_command(
|
| 316 |
+
"update",
|
| 317 |
+
self.get_remote_call_options(),
|
| 318 |
+
rev_options.to_args(),
|
| 319 |
+
dest,
|
| 320 |
+
)
|
| 321 |
+
self.run_command(cmd_args)
|
| 322 |
+
|
| 323 |
+
|
| 324 |
+
vcs.register(Subversion)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/vcs/versioncontrol.py
ADDED
|
@@ -0,0 +1,688 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Handles all VCS (version control) support"""
|
| 2 |
+
|
| 3 |
+
import logging
|
| 4 |
+
import os
|
| 5 |
+
import shutil
|
| 6 |
+
import sys
|
| 7 |
+
import urllib.parse
|
| 8 |
+
from dataclasses import dataclass, field
|
| 9 |
+
from typing import (
|
| 10 |
+
Any,
|
| 11 |
+
Dict,
|
| 12 |
+
Iterable,
|
| 13 |
+
Iterator,
|
| 14 |
+
List,
|
| 15 |
+
Literal,
|
| 16 |
+
Mapping,
|
| 17 |
+
Optional,
|
| 18 |
+
Tuple,
|
| 19 |
+
Type,
|
| 20 |
+
Union,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
from pip._internal.cli.spinners import SpinnerInterface
|
| 24 |
+
from pip._internal.exceptions import BadCommand, InstallationError
|
| 25 |
+
from pip._internal.utils.misc import (
|
| 26 |
+
HiddenText,
|
| 27 |
+
ask_path_exists,
|
| 28 |
+
backup_dir,
|
| 29 |
+
display_path,
|
| 30 |
+
hide_url,
|
| 31 |
+
hide_value,
|
| 32 |
+
is_installable_dir,
|
| 33 |
+
rmtree,
|
| 34 |
+
)
|
| 35 |
+
from pip._internal.utils.subprocess import (
|
| 36 |
+
CommandArgs,
|
| 37 |
+
call_subprocess,
|
| 38 |
+
format_command_args,
|
| 39 |
+
make_command,
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
__all__ = ["vcs"]
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
logger = logging.getLogger(__name__)
|
| 46 |
+
|
| 47 |
+
AuthInfo = Tuple[Optional[str], Optional[str]]
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def is_url(name: str) -> bool:
|
| 51 |
+
"""
|
| 52 |
+
Return true if the name looks like a URL.
|
| 53 |
+
"""
|
| 54 |
+
scheme = urllib.parse.urlsplit(name).scheme
|
| 55 |
+
if not scheme:
|
| 56 |
+
return False
|
| 57 |
+
return scheme in ["http", "https", "file", "ftp"] + vcs.all_schemes
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def make_vcs_requirement_url(
|
| 61 |
+
repo_url: str, rev: str, project_name: str, subdir: Optional[str] = None
|
| 62 |
+
) -> str:
|
| 63 |
+
"""
|
| 64 |
+
Return the URL for a VCS requirement.
|
| 65 |
+
|
| 66 |
+
Args:
|
| 67 |
+
repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+").
|
| 68 |
+
project_name: the (unescaped) project name.
|
| 69 |
+
"""
|
| 70 |
+
egg_project_name = project_name.replace("-", "_")
|
| 71 |
+
req = f"{repo_url}@{rev}#egg={egg_project_name}"
|
| 72 |
+
if subdir:
|
| 73 |
+
req += f"&subdirectory={subdir}"
|
| 74 |
+
|
| 75 |
+
return req
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def find_path_to_project_root_from_repo_root(
|
| 79 |
+
location: str, repo_root: str
|
| 80 |
+
) -> Optional[str]:
|
| 81 |
+
"""
|
| 82 |
+
Find the the Python project's root by searching up the filesystem from
|
| 83 |
+
`location`. Return the path to project root relative to `repo_root`.
|
| 84 |
+
Return None if the project root is `repo_root`, or cannot be found.
|
| 85 |
+
"""
|
| 86 |
+
# find project root.
|
| 87 |
+
orig_location = location
|
| 88 |
+
while not is_installable_dir(location):
|
| 89 |
+
last_location = location
|
| 90 |
+
location = os.path.dirname(location)
|
| 91 |
+
if location == last_location:
|
| 92 |
+
# We've traversed up to the root of the filesystem without
|
| 93 |
+
# finding a Python project.
|
| 94 |
+
logger.warning(
|
| 95 |
+
"Could not find a Python project for directory %s (tried all "
|
| 96 |
+
"parent directories)",
|
| 97 |
+
orig_location,
|
| 98 |
+
)
|
| 99 |
+
return None
|
| 100 |
+
|
| 101 |
+
if os.path.samefile(repo_root, location):
|
| 102 |
+
return None
|
| 103 |
+
|
| 104 |
+
return os.path.relpath(location, repo_root)
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
class RemoteNotFoundError(Exception):
|
| 108 |
+
pass
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
class RemoteNotValidError(Exception):
|
| 112 |
+
def __init__(self, url: str):
|
| 113 |
+
super().__init__(url)
|
| 114 |
+
self.url = url
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
@dataclass(frozen=True)
|
| 118 |
+
class RevOptions:
|
| 119 |
+
"""
|
| 120 |
+
Encapsulates a VCS-specific revision to install, along with any VCS
|
| 121 |
+
install options.
|
| 122 |
+
|
| 123 |
+
Args:
|
| 124 |
+
vc_class: a VersionControl subclass.
|
| 125 |
+
rev: the name of the revision to install.
|
| 126 |
+
extra_args: a list of extra options.
|
| 127 |
+
"""
|
| 128 |
+
|
| 129 |
+
vc_class: Type["VersionControl"]
|
| 130 |
+
rev: Optional[str] = None
|
| 131 |
+
extra_args: CommandArgs = field(default_factory=list)
|
| 132 |
+
branch_name: Optional[str] = None
|
| 133 |
+
|
| 134 |
+
def __repr__(self) -> str:
|
| 135 |
+
return f"<RevOptions {self.vc_class.name}: rev={self.rev!r}>"
|
| 136 |
+
|
| 137 |
+
@property
|
| 138 |
+
def arg_rev(self) -> Optional[str]:
|
| 139 |
+
if self.rev is None:
|
| 140 |
+
return self.vc_class.default_arg_rev
|
| 141 |
+
|
| 142 |
+
return self.rev
|
| 143 |
+
|
| 144 |
+
def to_args(self) -> CommandArgs:
|
| 145 |
+
"""
|
| 146 |
+
Return the VCS-specific command arguments.
|
| 147 |
+
"""
|
| 148 |
+
args: CommandArgs = []
|
| 149 |
+
rev = self.arg_rev
|
| 150 |
+
if rev is not None:
|
| 151 |
+
args += self.vc_class.get_base_rev_args(rev)
|
| 152 |
+
args += self.extra_args
|
| 153 |
+
|
| 154 |
+
return args
|
| 155 |
+
|
| 156 |
+
def to_display(self) -> str:
|
| 157 |
+
if not self.rev:
|
| 158 |
+
return ""
|
| 159 |
+
|
| 160 |
+
return f" (to revision {self.rev})"
|
| 161 |
+
|
| 162 |
+
def make_new(self, rev: str) -> "RevOptions":
|
| 163 |
+
"""
|
| 164 |
+
Make a copy of the current instance, but with a new rev.
|
| 165 |
+
|
| 166 |
+
Args:
|
| 167 |
+
rev: the name of the revision for the new object.
|
| 168 |
+
"""
|
| 169 |
+
return self.vc_class.make_rev_options(rev, extra_args=self.extra_args)
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
class VcsSupport:
|
| 173 |
+
_registry: Dict[str, "VersionControl"] = {}
|
| 174 |
+
schemes = ["ssh", "git", "hg", "bzr", "sftp", "svn"]
|
| 175 |
+
|
| 176 |
+
def __init__(self) -> None:
|
| 177 |
+
# Register more schemes with urlparse for various version control
|
| 178 |
+
# systems
|
| 179 |
+
urllib.parse.uses_netloc.extend(self.schemes)
|
| 180 |
+
super().__init__()
|
| 181 |
+
|
| 182 |
+
def __iter__(self) -> Iterator[str]:
|
| 183 |
+
return self._registry.__iter__()
|
| 184 |
+
|
| 185 |
+
@property
|
| 186 |
+
def backends(self) -> List["VersionControl"]:
|
| 187 |
+
return list(self._registry.values())
|
| 188 |
+
|
| 189 |
+
@property
|
| 190 |
+
def dirnames(self) -> List[str]:
|
| 191 |
+
return [backend.dirname for backend in self.backends]
|
| 192 |
+
|
| 193 |
+
@property
|
| 194 |
+
def all_schemes(self) -> List[str]:
|
| 195 |
+
schemes: List[str] = []
|
| 196 |
+
for backend in self.backends:
|
| 197 |
+
schemes.extend(backend.schemes)
|
| 198 |
+
return schemes
|
| 199 |
+
|
| 200 |
+
def register(self, cls: Type["VersionControl"]) -> None:
|
| 201 |
+
if not hasattr(cls, "name"):
|
| 202 |
+
logger.warning("Cannot register VCS %s", cls.__name__)
|
| 203 |
+
return
|
| 204 |
+
if cls.name not in self._registry:
|
| 205 |
+
self._registry[cls.name] = cls()
|
| 206 |
+
logger.debug("Registered VCS backend: %s", cls.name)
|
| 207 |
+
|
| 208 |
+
def unregister(self, name: str) -> None:
|
| 209 |
+
if name in self._registry:
|
| 210 |
+
del self._registry[name]
|
| 211 |
+
|
| 212 |
+
def get_backend_for_dir(self, location: str) -> Optional["VersionControl"]:
|
| 213 |
+
"""
|
| 214 |
+
Return a VersionControl object if a repository of that type is found
|
| 215 |
+
at the given directory.
|
| 216 |
+
"""
|
| 217 |
+
vcs_backends = {}
|
| 218 |
+
for vcs_backend in self._registry.values():
|
| 219 |
+
repo_path = vcs_backend.get_repository_root(location)
|
| 220 |
+
if not repo_path:
|
| 221 |
+
continue
|
| 222 |
+
logger.debug("Determine that %s uses VCS: %s", location, vcs_backend.name)
|
| 223 |
+
vcs_backends[repo_path] = vcs_backend
|
| 224 |
+
|
| 225 |
+
if not vcs_backends:
|
| 226 |
+
return None
|
| 227 |
+
|
| 228 |
+
# Choose the VCS in the inner-most directory. Since all repository
|
| 229 |
+
# roots found here would be either `location` or one of its
|
| 230 |
+
# parents, the longest path should have the most path components,
|
| 231 |
+
# i.e. the backend representing the inner-most repository.
|
| 232 |
+
inner_most_repo_path = max(vcs_backends, key=len)
|
| 233 |
+
return vcs_backends[inner_most_repo_path]
|
| 234 |
+
|
| 235 |
+
def get_backend_for_scheme(self, scheme: str) -> Optional["VersionControl"]:
|
| 236 |
+
"""
|
| 237 |
+
Return a VersionControl object or None.
|
| 238 |
+
"""
|
| 239 |
+
for vcs_backend in self._registry.values():
|
| 240 |
+
if scheme in vcs_backend.schemes:
|
| 241 |
+
return vcs_backend
|
| 242 |
+
return None
|
| 243 |
+
|
| 244 |
+
def get_backend(self, name: str) -> Optional["VersionControl"]:
|
| 245 |
+
"""
|
| 246 |
+
Return a VersionControl object or None.
|
| 247 |
+
"""
|
| 248 |
+
name = name.lower()
|
| 249 |
+
return self._registry.get(name)
|
| 250 |
+
|
| 251 |
+
|
| 252 |
+
vcs = VcsSupport()
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
class VersionControl:
|
| 256 |
+
name = ""
|
| 257 |
+
dirname = ""
|
| 258 |
+
repo_name = ""
|
| 259 |
+
# List of supported schemes for this Version Control
|
| 260 |
+
schemes: Tuple[str, ...] = ()
|
| 261 |
+
# Iterable of environment variable names to pass to call_subprocess().
|
| 262 |
+
unset_environ: Tuple[str, ...] = ()
|
| 263 |
+
default_arg_rev: Optional[str] = None
|
| 264 |
+
|
| 265 |
+
@classmethod
|
| 266 |
+
def should_add_vcs_url_prefix(cls, remote_url: str) -> bool:
|
| 267 |
+
"""
|
| 268 |
+
Return whether the vcs prefix (e.g. "git+") should be added to a
|
| 269 |
+
repository's remote url when used in a requirement.
|
| 270 |
+
"""
|
| 271 |
+
return not remote_url.lower().startswith(f"{cls.name}:")
|
| 272 |
+
|
| 273 |
+
@classmethod
|
| 274 |
+
def get_subdirectory(cls, location: str) -> Optional[str]:
|
| 275 |
+
"""
|
| 276 |
+
Return the path to Python project root, relative to the repo root.
|
| 277 |
+
Return None if the project root is in the repo root.
|
| 278 |
+
"""
|
| 279 |
+
return None
|
| 280 |
+
|
| 281 |
+
@classmethod
|
| 282 |
+
def get_requirement_revision(cls, repo_dir: str) -> str:
|
| 283 |
+
"""
|
| 284 |
+
Return the revision string that should be used in a requirement.
|
| 285 |
+
"""
|
| 286 |
+
return cls.get_revision(repo_dir)
|
| 287 |
+
|
| 288 |
+
@classmethod
|
| 289 |
+
def get_src_requirement(cls, repo_dir: str, project_name: str) -> str:
|
| 290 |
+
"""
|
| 291 |
+
Return the requirement string to use to redownload the files
|
| 292 |
+
currently at the given repository directory.
|
| 293 |
+
|
| 294 |
+
Args:
|
| 295 |
+
project_name: the (unescaped) project name.
|
| 296 |
+
|
| 297 |
+
The return value has a form similar to the following:
|
| 298 |
+
|
| 299 |
+
{repository_url}@{revision}#egg={project_name}
|
| 300 |
+
"""
|
| 301 |
+
repo_url = cls.get_remote_url(repo_dir)
|
| 302 |
+
|
| 303 |
+
if cls.should_add_vcs_url_prefix(repo_url):
|
| 304 |
+
repo_url = f"{cls.name}+{repo_url}"
|
| 305 |
+
|
| 306 |
+
revision = cls.get_requirement_revision(repo_dir)
|
| 307 |
+
subdir = cls.get_subdirectory(repo_dir)
|
| 308 |
+
req = make_vcs_requirement_url(repo_url, revision, project_name, subdir=subdir)
|
| 309 |
+
|
| 310 |
+
return req
|
| 311 |
+
|
| 312 |
+
@staticmethod
|
| 313 |
+
def get_base_rev_args(rev: str) -> List[str]:
|
| 314 |
+
"""
|
| 315 |
+
Return the base revision arguments for a vcs command.
|
| 316 |
+
|
| 317 |
+
Args:
|
| 318 |
+
rev: the name of a revision to install. Cannot be None.
|
| 319 |
+
"""
|
| 320 |
+
raise NotImplementedError
|
| 321 |
+
|
| 322 |
+
def is_immutable_rev_checkout(self, url: str, dest: str) -> bool:
|
| 323 |
+
"""
|
| 324 |
+
Return true if the commit hash checked out at dest matches
|
| 325 |
+
the revision in url.
|
| 326 |
+
|
| 327 |
+
Always return False, if the VCS does not support immutable commit
|
| 328 |
+
hashes.
|
| 329 |
+
|
| 330 |
+
This method does not check if there are local uncommitted changes
|
| 331 |
+
in dest after checkout, as pip currently has no use case for that.
|
| 332 |
+
"""
|
| 333 |
+
return False
|
| 334 |
+
|
| 335 |
+
@classmethod
|
| 336 |
+
def make_rev_options(
|
| 337 |
+
cls, rev: Optional[str] = None, extra_args: Optional[CommandArgs] = None
|
| 338 |
+
) -> RevOptions:
|
| 339 |
+
"""
|
| 340 |
+
Return a RevOptions object.
|
| 341 |
+
|
| 342 |
+
Args:
|
| 343 |
+
rev: the name of a revision to install.
|
| 344 |
+
extra_args: a list of extra options.
|
| 345 |
+
"""
|
| 346 |
+
return RevOptions(cls, rev, extra_args=extra_args or [])
|
| 347 |
+
|
| 348 |
+
@classmethod
|
| 349 |
+
def _is_local_repository(cls, repo: str) -> bool:
|
| 350 |
+
"""
|
| 351 |
+
posix absolute paths start with os.path.sep,
|
| 352 |
+
win32 ones start with drive (like c:\\folder)
|
| 353 |
+
"""
|
| 354 |
+
drive, tail = os.path.splitdrive(repo)
|
| 355 |
+
return repo.startswith(os.path.sep) or bool(drive)
|
| 356 |
+
|
| 357 |
+
@classmethod
|
| 358 |
+
def get_netloc_and_auth(
|
| 359 |
+
cls, netloc: str, scheme: str
|
| 360 |
+
) -> Tuple[str, Tuple[Optional[str], Optional[str]]]:
|
| 361 |
+
"""
|
| 362 |
+
Parse the repository URL's netloc, and return the new netloc to use
|
| 363 |
+
along with auth information.
|
| 364 |
+
|
| 365 |
+
Args:
|
| 366 |
+
netloc: the original repository URL netloc.
|
| 367 |
+
scheme: the repository URL's scheme without the vcs prefix.
|
| 368 |
+
|
| 369 |
+
This is mainly for the Subversion class to override, so that auth
|
| 370 |
+
information can be provided via the --username and --password options
|
| 371 |
+
instead of through the URL. For other subclasses like Git without
|
| 372 |
+
such an option, auth information must stay in the URL.
|
| 373 |
+
|
| 374 |
+
Returns: (netloc, (username, password)).
|
| 375 |
+
"""
|
| 376 |
+
return netloc, (None, None)
|
| 377 |
+
|
| 378 |
+
@classmethod
|
| 379 |
+
def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:
|
| 380 |
+
"""
|
| 381 |
+
Parse the repository URL to use, and return the URL, revision,
|
| 382 |
+
and auth info to use.
|
| 383 |
+
|
| 384 |
+
Returns: (url, rev, (username, password)).
|
| 385 |
+
"""
|
| 386 |
+
scheme, netloc, path, query, frag = urllib.parse.urlsplit(url)
|
| 387 |
+
if "+" not in scheme:
|
| 388 |
+
raise ValueError(
|
| 389 |
+
f"Sorry, {url!r} is a malformed VCS url. "
|
| 390 |
+
"The format is <vcs>+<protocol>://<url>, "
|
| 391 |
+
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp"
|
| 392 |
+
)
|
| 393 |
+
# Remove the vcs prefix.
|
| 394 |
+
scheme = scheme.split("+", 1)[1]
|
| 395 |
+
netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme)
|
| 396 |
+
rev = None
|
| 397 |
+
if "@" in path:
|
| 398 |
+
path, rev = path.rsplit("@", 1)
|
| 399 |
+
if not rev:
|
| 400 |
+
raise InstallationError(
|
| 401 |
+
f"The URL {url!r} has an empty revision (after @) "
|
| 402 |
+
"which is not supported. Include a revision after @ "
|
| 403 |
+
"or remove @ from the URL."
|
| 404 |
+
)
|
| 405 |
+
url = urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
|
| 406 |
+
return url, rev, user_pass
|
| 407 |
+
|
| 408 |
+
@staticmethod
|
| 409 |
+
def make_rev_args(
|
| 410 |
+
username: Optional[str], password: Optional[HiddenText]
|
| 411 |
+
) -> CommandArgs:
|
| 412 |
+
"""
|
| 413 |
+
Return the RevOptions "extra arguments" to use in obtain().
|
| 414 |
+
"""
|
| 415 |
+
return []
|
| 416 |
+
|
| 417 |
+
def get_url_rev_options(self, url: HiddenText) -> Tuple[HiddenText, RevOptions]:
|
| 418 |
+
"""
|
| 419 |
+
Return the URL and RevOptions object to use in obtain(),
|
| 420 |
+
as a tuple (url, rev_options).
|
| 421 |
+
"""
|
| 422 |
+
secret_url, rev, user_pass = self.get_url_rev_and_auth(url.secret)
|
| 423 |
+
username, secret_password = user_pass
|
| 424 |
+
password: Optional[HiddenText] = None
|
| 425 |
+
if secret_password is not None:
|
| 426 |
+
password = hide_value(secret_password)
|
| 427 |
+
extra_args = self.make_rev_args(username, password)
|
| 428 |
+
rev_options = self.make_rev_options(rev, extra_args=extra_args)
|
| 429 |
+
|
| 430 |
+
return hide_url(secret_url), rev_options
|
| 431 |
+
|
| 432 |
+
@staticmethod
|
| 433 |
+
def normalize_url(url: str) -> str:
|
| 434 |
+
"""
|
| 435 |
+
Normalize a URL for comparison by unquoting it and removing any
|
| 436 |
+
trailing slash.
|
| 437 |
+
"""
|
| 438 |
+
return urllib.parse.unquote(url).rstrip("/")
|
| 439 |
+
|
| 440 |
+
@classmethod
|
| 441 |
+
def compare_urls(cls, url1: str, url2: str) -> bool:
|
| 442 |
+
"""
|
| 443 |
+
Compare two repo URLs for identity, ignoring incidental differences.
|
| 444 |
+
"""
|
| 445 |
+
return cls.normalize_url(url1) == cls.normalize_url(url2)
|
| 446 |
+
|
| 447 |
+
def fetch_new(
|
| 448 |
+
self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
|
| 449 |
+
) -> None:
|
| 450 |
+
"""
|
| 451 |
+
Fetch a revision from a repository, in the case that this is the
|
| 452 |
+
first fetch from the repository.
|
| 453 |
+
|
| 454 |
+
Args:
|
| 455 |
+
dest: the directory to fetch the repository to.
|
| 456 |
+
rev_options: a RevOptions object.
|
| 457 |
+
verbosity: verbosity level.
|
| 458 |
+
"""
|
| 459 |
+
raise NotImplementedError
|
| 460 |
+
|
| 461 |
+
def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
|
| 462 |
+
"""
|
| 463 |
+
Switch the repo at ``dest`` to point to ``URL``.
|
| 464 |
+
|
| 465 |
+
Args:
|
| 466 |
+
rev_options: a RevOptions object.
|
| 467 |
+
"""
|
| 468 |
+
raise NotImplementedError
|
| 469 |
+
|
| 470 |
+
def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
|
| 471 |
+
"""
|
| 472 |
+
Update an already-existing repo to the given ``rev_options``.
|
| 473 |
+
|
| 474 |
+
Args:
|
| 475 |
+
rev_options: a RevOptions object.
|
| 476 |
+
"""
|
| 477 |
+
raise NotImplementedError
|
| 478 |
+
|
| 479 |
+
@classmethod
|
| 480 |
+
def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
|
| 481 |
+
"""
|
| 482 |
+
Return whether the id of the current commit equals the given name.
|
| 483 |
+
|
| 484 |
+
Args:
|
| 485 |
+
dest: the repository directory.
|
| 486 |
+
name: a string name.
|
| 487 |
+
"""
|
| 488 |
+
raise NotImplementedError
|
| 489 |
+
|
| 490 |
+
def obtain(self, dest: str, url: HiddenText, verbosity: int) -> None:
|
| 491 |
+
"""
|
| 492 |
+
Install or update in editable mode the package represented by this
|
| 493 |
+
VersionControl object.
|
| 494 |
+
|
| 495 |
+
:param dest: the repository directory in which to install or update.
|
| 496 |
+
:param url: the repository URL starting with a vcs prefix.
|
| 497 |
+
:param verbosity: verbosity level.
|
| 498 |
+
"""
|
| 499 |
+
url, rev_options = self.get_url_rev_options(url)
|
| 500 |
+
|
| 501 |
+
if not os.path.exists(dest):
|
| 502 |
+
self.fetch_new(dest, url, rev_options, verbosity=verbosity)
|
| 503 |
+
return
|
| 504 |
+
|
| 505 |
+
rev_display = rev_options.to_display()
|
| 506 |
+
if self.is_repository_directory(dest):
|
| 507 |
+
existing_url = self.get_remote_url(dest)
|
| 508 |
+
if self.compare_urls(existing_url, url.secret):
|
| 509 |
+
logger.debug(
|
| 510 |
+
"%s in %s exists, and has correct URL (%s)",
|
| 511 |
+
self.repo_name.title(),
|
| 512 |
+
display_path(dest),
|
| 513 |
+
url,
|
| 514 |
+
)
|
| 515 |
+
if not self.is_commit_id_equal(dest, rev_options.rev):
|
| 516 |
+
logger.info(
|
| 517 |
+
"Updating %s %s%s",
|
| 518 |
+
display_path(dest),
|
| 519 |
+
self.repo_name,
|
| 520 |
+
rev_display,
|
| 521 |
+
)
|
| 522 |
+
self.update(dest, url, rev_options)
|
| 523 |
+
else:
|
| 524 |
+
logger.info("Skipping because already up-to-date.")
|
| 525 |
+
return
|
| 526 |
+
|
| 527 |
+
logger.warning(
|
| 528 |
+
"%s %s in %s exists with URL %s",
|
| 529 |
+
self.name,
|
| 530 |
+
self.repo_name,
|
| 531 |
+
display_path(dest),
|
| 532 |
+
existing_url,
|
| 533 |
+
)
|
| 534 |
+
prompt = ("(s)witch, (i)gnore, (w)ipe, (b)ackup ", ("s", "i", "w", "b"))
|
| 535 |
+
else:
|
| 536 |
+
logger.warning(
|
| 537 |
+
"Directory %s already exists, and is not a %s %s.",
|
| 538 |
+
dest,
|
| 539 |
+
self.name,
|
| 540 |
+
self.repo_name,
|
| 541 |
+
)
|
| 542 |
+
# https://github.com/python/mypy/issues/1174
|
| 543 |
+
prompt = ("(i)gnore, (w)ipe, (b)ackup ", ("i", "w", "b")) # type: ignore
|
| 544 |
+
|
| 545 |
+
logger.warning(
|
| 546 |
+
"The plan is to install the %s repository %s",
|
| 547 |
+
self.name,
|
| 548 |
+
url,
|
| 549 |
+
)
|
| 550 |
+
response = ask_path_exists(f"What to do? {prompt[0]}", prompt[1])
|
| 551 |
+
|
| 552 |
+
if response == "a":
|
| 553 |
+
sys.exit(-1)
|
| 554 |
+
|
| 555 |
+
if response == "w":
|
| 556 |
+
logger.warning("Deleting %s", display_path(dest))
|
| 557 |
+
rmtree(dest)
|
| 558 |
+
self.fetch_new(dest, url, rev_options, verbosity=verbosity)
|
| 559 |
+
return
|
| 560 |
+
|
| 561 |
+
if response == "b":
|
| 562 |
+
dest_dir = backup_dir(dest)
|
| 563 |
+
logger.warning("Backing up %s to %s", display_path(dest), dest_dir)
|
| 564 |
+
shutil.move(dest, dest_dir)
|
| 565 |
+
self.fetch_new(dest, url, rev_options, verbosity=verbosity)
|
| 566 |
+
return
|
| 567 |
+
|
| 568 |
+
# Do nothing if the response is "i".
|
| 569 |
+
if response == "s":
|
| 570 |
+
logger.info(
|
| 571 |
+
"Switching %s %s to %s%s",
|
| 572 |
+
self.repo_name,
|
| 573 |
+
display_path(dest),
|
| 574 |
+
url,
|
| 575 |
+
rev_display,
|
| 576 |
+
)
|
| 577 |
+
self.switch(dest, url, rev_options)
|
| 578 |
+
|
| 579 |
+
def unpack(self, location: str, url: HiddenText, verbosity: int) -> None:
|
| 580 |
+
"""
|
| 581 |
+
Clean up current location and download the url repository
|
| 582 |
+
(and vcs infos) into location
|
| 583 |
+
|
| 584 |
+
:param url: the repository URL starting with a vcs prefix.
|
| 585 |
+
:param verbosity: verbosity level.
|
| 586 |
+
"""
|
| 587 |
+
if os.path.exists(location):
|
| 588 |
+
rmtree(location)
|
| 589 |
+
self.obtain(location, url=url, verbosity=verbosity)
|
| 590 |
+
|
| 591 |
+
@classmethod
|
| 592 |
+
def get_remote_url(cls, location: str) -> str:
|
| 593 |
+
"""
|
| 594 |
+
Return the url used at location
|
| 595 |
+
|
| 596 |
+
Raises RemoteNotFoundError if the repository does not have a remote
|
| 597 |
+
url configured.
|
| 598 |
+
"""
|
| 599 |
+
raise NotImplementedError
|
| 600 |
+
|
| 601 |
+
@classmethod
|
| 602 |
+
def get_revision(cls, location: str) -> str:
|
| 603 |
+
"""
|
| 604 |
+
Return the current commit id of the files at the given location.
|
| 605 |
+
"""
|
| 606 |
+
raise NotImplementedError
|
| 607 |
+
|
| 608 |
+
@classmethod
|
| 609 |
+
def run_command(
|
| 610 |
+
cls,
|
| 611 |
+
cmd: Union[List[str], CommandArgs],
|
| 612 |
+
show_stdout: bool = True,
|
| 613 |
+
cwd: Optional[str] = None,
|
| 614 |
+
on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise",
|
| 615 |
+
extra_ok_returncodes: Optional[Iterable[int]] = None,
|
| 616 |
+
command_desc: Optional[str] = None,
|
| 617 |
+
extra_environ: Optional[Mapping[str, Any]] = None,
|
| 618 |
+
spinner: Optional[SpinnerInterface] = None,
|
| 619 |
+
log_failed_cmd: bool = True,
|
| 620 |
+
stdout_only: bool = False,
|
| 621 |
+
) -> str:
|
| 622 |
+
"""
|
| 623 |
+
Run a VCS subcommand
|
| 624 |
+
This is simply a wrapper around call_subprocess that adds the VCS
|
| 625 |
+
command name, and checks that the VCS is available
|
| 626 |
+
"""
|
| 627 |
+
cmd = make_command(cls.name, *cmd)
|
| 628 |
+
if command_desc is None:
|
| 629 |
+
command_desc = format_command_args(cmd)
|
| 630 |
+
try:
|
| 631 |
+
return call_subprocess(
|
| 632 |
+
cmd,
|
| 633 |
+
show_stdout,
|
| 634 |
+
cwd,
|
| 635 |
+
on_returncode=on_returncode,
|
| 636 |
+
extra_ok_returncodes=extra_ok_returncodes,
|
| 637 |
+
command_desc=command_desc,
|
| 638 |
+
extra_environ=extra_environ,
|
| 639 |
+
unset_environ=cls.unset_environ,
|
| 640 |
+
spinner=spinner,
|
| 641 |
+
log_failed_cmd=log_failed_cmd,
|
| 642 |
+
stdout_only=stdout_only,
|
| 643 |
+
)
|
| 644 |
+
except NotADirectoryError:
|
| 645 |
+
raise BadCommand(f"Cannot find command {cls.name!r} - invalid PATH")
|
| 646 |
+
except FileNotFoundError:
|
| 647 |
+
# errno.ENOENT = no such file or directory
|
| 648 |
+
# In other words, the VCS executable isn't available
|
| 649 |
+
raise BadCommand(
|
| 650 |
+
f"Cannot find command {cls.name!r} - do you have "
|
| 651 |
+
f"{cls.name!r} installed and in your PATH?"
|
| 652 |
+
)
|
| 653 |
+
except PermissionError:
|
| 654 |
+
# errno.EACCES = Permission denied
|
| 655 |
+
# This error occurs, for instance, when the command is installed
|
| 656 |
+
# only for another user. So, the current user don't have
|
| 657 |
+
# permission to call the other user command.
|
| 658 |
+
raise BadCommand(
|
| 659 |
+
f"No permission to execute {cls.name!r} - install it "
|
| 660 |
+
f"locally, globally (ask admin), or check your PATH. "
|
| 661 |
+
f"See possible solutions at "
|
| 662 |
+
f"https://pip.pypa.io/en/latest/reference/pip_freeze/"
|
| 663 |
+
f"#fixing-permission-denied."
|
| 664 |
+
)
|
| 665 |
+
|
| 666 |
+
@classmethod
|
| 667 |
+
def is_repository_directory(cls, path: str) -> bool:
|
| 668 |
+
"""
|
| 669 |
+
Return whether a directory path is a repository directory.
|
| 670 |
+
"""
|
| 671 |
+
logger.debug("Checking in %s for %s (%s)...", path, cls.dirname, cls.name)
|
| 672 |
+
return os.path.exists(os.path.join(path, cls.dirname))
|
| 673 |
+
|
| 674 |
+
@classmethod
|
| 675 |
+
def get_repository_root(cls, location: str) -> Optional[str]:
|
| 676 |
+
"""
|
| 677 |
+
Return the "root" (top-level) directory controlled by the vcs,
|
| 678 |
+
or `None` if the directory is not in any.
|
| 679 |
+
|
| 680 |
+
It is meant to be overridden to implement smarter detection
|
| 681 |
+
mechanisms for specific vcs.
|
| 682 |
+
|
| 683 |
+
This can do more than is_repository_directory() alone. For
|
| 684 |
+
example, the Git override checks that Git is actually available.
|
| 685 |
+
"""
|
| 686 |
+
if cls.is_repository_directory(location):
|
| 687 |
+
return location
|
| 688 |
+
return None
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/wheel_builder.py
ADDED
|
@@ -0,0 +1,354 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Orchestrator for building wheels from InstallRequirements.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import logging
|
| 5 |
+
import os.path
|
| 6 |
+
import re
|
| 7 |
+
import shutil
|
| 8 |
+
from typing import Iterable, List, Optional, Tuple
|
| 9 |
+
|
| 10 |
+
from pip._vendor.packaging.utils import canonicalize_name, canonicalize_version
|
| 11 |
+
from pip._vendor.packaging.version import InvalidVersion, Version
|
| 12 |
+
|
| 13 |
+
from pip._internal.cache import WheelCache
|
| 14 |
+
from pip._internal.exceptions import InvalidWheelFilename, UnsupportedWheel
|
| 15 |
+
from pip._internal.metadata import FilesystemWheel, get_wheel_distribution
|
| 16 |
+
from pip._internal.models.link import Link
|
| 17 |
+
from pip._internal.models.wheel import Wheel
|
| 18 |
+
from pip._internal.operations.build.wheel import build_wheel_pep517
|
| 19 |
+
from pip._internal.operations.build.wheel_editable import build_wheel_editable
|
| 20 |
+
from pip._internal.operations.build.wheel_legacy import build_wheel_legacy
|
| 21 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 22 |
+
from pip._internal.utils.logging import indent_log
|
| 23 |
+
from pip._internal.utils.misc import ensure_dir, hash_file
|
| 24 |
+
from pip._internal.utils.setuptools_build import make_setuptools_clean_args
|
| 25 |
+
from pip._internal.utils.subprocess import call_subprocess
|
| 26 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
| 27 |
+
from pip._internal.utils.urls import path_to_url
|
| 28 |
+
from pip._internal.vcs import vcs
|
| 29 |
+
|
| 30 |
+
logger = logging.getLogger(__name__)
|
| 31 |
+
|
| 32 |
+
_egg_info_re = re.compile(r"([a-z0-9_.]+)-([a-z0-9_.!+-]+)", re.IGNORECASE)
|
| 33 |
+
|
| 34 |
+
BuildResult = Tuple[List[InstallRequirement], List[InstallRequirement]]
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def _contains_egg_info(s: str) -> bool:
|
| 38 |
+
"""Determine whether the string looks like an egg_info.
|
| 39 |
+
|
| 40 |
+
:param s: The string to parse. E.g. foo-2.1
|
| 41 |
+
"""
|
| 42 |
+
return bool(_egg_info_re.search(s))
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def _should_build(
|
| 46 |
+
req: InstallRequirement,
|
| 47 |
+
need_wheel: bool,
|
| 48 |
+
) -> bool:
|
| 49 |
+
"""Return whether an InstallRequirement should be built into a wheel."""
|
| 50 |
+
if req.constraint:
|
| 51 |
+
# never build requirements that are merely constraints
|
| 52 |
+
return False
|
| 53 |
+
if req.is_wheel:
|
| 54 |
+
if need_wheel:
|
| 55 |
+
logger.info(
|
| 56 |
+
"Skipping %s, due to already being wheel.",
|
| 57 |
+
req.name,
|
| 58 |
+
)
|
| 59 |
+
return False
|
| 60 |
+
|
| 61 |
+
if need_wheel:
|
| 62 |
+
# i.e. pip wheel, not pip install
|
| 63 |
+
return True
|
| 64 |
+
|
| 65 |
+
# From this point, this concerns the pip install command only
|
| 66 |
+
# (need_wheel=False).
|
| 67 |
+
|
| 68 |
+
if not req.source_dir:
|
| 69 |
+
return False
|
| 70 |
+
|
| 71 |
+
if req.editable:
|
| 72 |
+
# we only build PEP 660 editable requirements
|
| 73 |
+
return req.supports_pyproject_editable
|
| 74 |
+
|
| 75 |
+
return True
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def should_build_for_wheel_command(
|
| 79 |
+
req: InstallRequirement,
|
| 80 |
+
) -> bool:
|
| 81 |
+
return _should_build(req, need_wheel=True)
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def should_build_for_install_command(
|
| 85 |
+
req: InstallRequirement,
|
| 86 |
+
) -> bool:
|
| 87 |
+
return _should_build(req, need_wheel=False)
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def _should_cache(
|
| 91 |
+
req: InstallRequirement,
|
| 92 |
+
) -> Optional[bool]:
|
| 93 |
+
"""
|
| 94 |
+
Return whether a built InstallRequirement can be stored in the persistent
|
| 95 |
+
wheel cache, assuming the wheel cache is available, and _should_build()
|
| 96 |
+
has determined a wheel needs to be built.
|
| 97 |
+
"""
|
| 98 |
+
if req.editable or not req.source_dir:
|
| 99 |
+
# never cache editable requirements
|
| 100 |
+
return False
|
| 101 |
+
|
| 102 |
+
if req.link and req.link.is_vcs:
|
| 103 |
+
# VCS checkout. Do not cache
|
| 104 |
+
# unless it points to an immutable commit hash.
|
| 105 |
+
assert not req.editable
|
| 106 |
+
assert req.source_dir
|
| 107 |
+
vcs_backend = vcs.get_backend_for_scheme(req.link.scheme)
|
| 108 |
+
assert vcs_backend
|
| 109 |
+
if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir):
|
| 110 |
+
return True
|
| 111 |
+
return False
|
| 112 |
+
|
| 113 |
+
assert req.link
|
| 114 |
+
base, ext = req.link.splitext()
|
| 115 |
+
if _contains_egg_info(base):
|
| 116 |
+
return True
|
| 117 |
+
|
| 118 |
+
# Otherwise, do not cache.
|
| 119 |
+
return False
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def _get_cache_dir(
|
| 123 |
+
req: InstallRequirement,
|
| 124 |
+
wheel_cache: WheelCache,
|
| 125 |
+
) -> str:
|
| 126 |
+
"""Return the persistent or temporary cache directory where the built
|
| 127 |
+
wheel need to be stored.
|
| 128 |
+
"""
|
| 129 |
+
cache_available = bool(wheel_cache.cache_dir)
|
| 130 |
+
assert req.link
|
| 131 |
+
if cache_available and _should_cache(req):
|
| 132 |
+
cache_dir = wheel_cache.get_path_for_link(req.link)
|
| 133 |
+
else:
|
| 134 |
+
cache_dir = wheel_cache.get_ephem_path_for_link(req.link)
|
| 135 |
+
return cache_dir
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
def _verify_one(req: InstallRequirement, wheel_path: str) -> None:
|
| 139 |
+
canonical_name = canonicalize_name(req.name or "")
|
| 140 |
+
w = Wheel(os.path.basename(wheel_path))
|
| 141 |
+
if canonicalize_name(w.name) != canonical_name:
|
| 142 |
+
raise InvalidWheelFilename(
|
| 143 |
+
f"Wheel has unexpected file name: expected {canonical_name!r}, "
|
| 144 |
+
f"got {w.name!r}",
|
| 145 |
+
)
|
| 146 |
+
dist = get_wheel_distribution(FilesystemWheel(wheel_path), canonical_name)
|
| 147 |
+
dist_verstr = str(dist.version)
|
| 148 |
+
if canonicalize_version(dist_verstr) != canonicalize_version(w.version):
|
| 149 |
+
raise InvalidWheelFilename(
|
| 150 |
+
f"Wheel has unexpected file name: expected {dist_verstr!r}, "
|
| 151 |
+
f"got {w.version!r}",
|
| 152 |
+
)
|
| 153 |
+
metadata_version_value = dist.metadata_version
|
| 154 |
+
if metadata_version_value is None:
|
| 155 |
+
raise UnsupportedWheel("Missing Metadata-Version")
|
| 156 |
+
try:
|
| 157 |
+
metadata_version = Version(metadata_version_value)
|
| 158 |
+
except InvalidVersion:
|
| 159 |
+
msg = f"Invalid Metadata-Version: {metadata_version_value}"
|
| 160 |
+
raise UnsupportedWheel(msg)
|
| 161 |
+
if metadata_version >= Version("1.2") and not isinstance(dist.version, Version):
|
| 162 |
+
raise UnsupportedWheel(
|
| 163 |
+
f"Metadata 1.2 mandates PEP 440 version, but {dist_verstr!r} is not"
|
| 164 |
+
)
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
def _build_one(
|
| 168 |
+
req: InstallRequirement,
|
| 169 |
+
output_dir: str,
|
| 170 |
+
verify: bool,
|
| 171 |
+
build_options: List[str],
|
| 172 |
+
global_options: List[str],
|
| 173 |
+
editable: bool,
|
| 174 |
+
) -> Optional[str]:
|
| 175 |
+
"""Build one wheel.
|
| 176 |
+
|
| 177 |
+
:return: The filename of the built wheel, or None if the build failed.
|
| 178 |
+
"""
|
| 179 |
+
artifact = "editable" if editable else "wheel"
|
| 180 |
+
try:
|
| 181 |
+
ensure_dir(output_dir)
|
| 182 |
+
except OSError as e:
|
| 183 |
+
logger.warning(
|
| 184 |
+
"Building %s for %s failed: %s",
|
| 185 |
+
artifact,
|
| 186 |
+
req.name,
|
| 187 |
+
e,
|
| 188 |
+
)
|
| 189 |
+
return None
|
| 190 |
+
|
| 191 |
+
# Install build deps into temporary directory (PEP 518)
|
| 192 |
+
with req.build_env:
|
| 193 |
+
wheel_path = _build_one_inside_env(
|
| 194 |
+
req, output_dir, build_options, global_options, editable
|
| 195 |
+
)
|
| 196 |
+
if wheel_path and verify:
|
| 197 |
+
try:
|
| 198 |
+
_verify_one(req, wheel_path)
|
| 199 |
+
except (InvalidWheelFilename, UnsupportedWheel) as e:
|
| 200 |
+
logger.warning("Built %s for %s is invalid: %s", artifact, req.name, e)
|
| 201 |
+
return None
|
| 202 |
+
return wheel_path
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
def _build_one_inside_env(
|
| 206 |
+
req: InstallRequirement,
|
| 207 |
+
output_dir: str,
|
| 208 |
+
build_options: List[str],
|
| 209 |
+
global_options: List[str],
|
| 210 |
+
editable: bool,
|
| 211 |
+
) -> Optional[str]:
|
| 212 |
+
with TempDirectory(kind="wheel") as temp_dir:
|
| 213 |
+
assert req.name
|
| 214 |
+
if req.use_pep517:
|
| 215 |
+
assert req.metadata_directory
|
| 216 |
+
assert req.pep517_backend
|
| 217 |
+
if global_options:
|
| 218 |
+
logger.warning(
|
| 219 |
+
"Ignoring --global-option when building %s using PEP 517", req.name
|
| 220 |
+
)
|
| 221 |
+
if build_options:
|
| 222 |
+
logger.warning(
|
| 223 |
+
"Ignoring --build-option when building %s using PEP 517", req.name
|
| 224 |
+
)
|
| 225 |
+
if editable:
|
| 226 |
+
wheel_path = build_wheel_editable(
|
| 227 |
+
name=req.name,
|
| 228 |
+
backend=req.pep517_backend,
|
| 229 |
+
metadata_directory=req.metadata_directory,
|
| 230 |
+
tempd=temp_dir.path,
|
| 231 |
+
)
|
| 232 |
+
else:
|
| 233 |
+
wheel_path = build_wheel_pep517(
|
| 234 |
+
name=req.name,
|
| 235 |
+
backend=req.pep517_backend,
|
| 236 |
+
metadata_directory=req.metadata_directory,
|
| 237 |
+
tempd=temp_dir.path,
|
| 238 |
+
)
|
| 239 |
+
else:
|
| 240 |
+
wheel_path = build_wheel_legacy(
|
| 241 |
+
name=req.name,
|
| 242 |
+
setup_py_path=req.setup_py_path,
|
| 243 |
+
source_dir=req.unpacked_source_directory,
|
| 244 |
+
global_options=global_options,
|
| 245 |
+
build_options=build_options,
|
| 246 |
+
tempd=temp_dir.path,
|
| 247 |
+
)
|
| 248 |
+
|
| 249 |
+
if wheel_path is not None:
|
| 250 |
+
wheel_name = os.path.basename(wheel_path)
|
| 251 |
+
dest_path = os.path.join(output_dir, wheel_name)
|
| 252 |
+
try:
|
| 253 |
+
wheel_hash, length = hash_file(wheel_path)
|
| 254 |
+
shutil.move(wheel_path, dest_path)
|
| 255 |
+
logger.info(
|
| 256 |
+
"Created wheel for %s: filename=%s size=%d sha256=%s",
|
| 257 |
+
req.name,
|
| 258 |
+
wheel_name,
|
| 259 |
+
length,
|
| 260 |
+
wheel_hash.hexdigest(),
|
| 261 |
+
)
|
| 262 |
+
logger.info("Stored in directory: %s", output_dir)
|
| 263 |
+
return dest_path
|
| 264 |
+
except Exception as e:
|
| 265 |
+
logger.warning(
|
| 266 |
+
"Building wheel for %s failed: %s",
|
| 267 |
+
req.name,
|
| 268 |
+
e,
|
| 269 |
+
)
|
| 270 |
+
# Ignore return, we can't do anything else useful.
|
| 271 |
+
if not req.use_pep517:
|
| 272 |
+
_clean_one_legacy(req, global_options)
|
| 273 |
+
return None
|
| 274 |
+
|
| 275 |
+
|
| 276 |
+
def _clean_one_legacy(req: InstallRequirement, global_options: List[str]) -> bool:
|
| 277 |
+
clean_args = make_setuptools_clean_args(
|
| 278 |
+
req.setup_py_path,
|
| 279 |
+
global_options=global_options,
|
| 280 |
+
)
|
| 281 |
+
|
| 282 |
+
logger.info("Running setup.py clean for %s", req.name)
|
| 283 |
+
try:
|
| 284 |
+
call_subprocess(
|
| 285 |
+
clean_args, command_desc="python setup.py clean", cwd=req.source_dir
|
| 286 |
+
)
|
| 287 |
+
return True
|
| 288 |
+
except Exception:
|
| 289 |
+
logger.error("Failed cleaning build dir for %s", req.name)
|
| 290 |
+
return False
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
def build(
|
| 294 |
+
requirements: Iterable[InstallRequirement],
|
| 295 |
+
wheel_cache: WheelCache,
|
| 296 |
+
verify: bool,
|
| 297 |
+
build_options: List[str],
|
| 298 |
+
global_options: List[str],
|
| 299 |
+
) -> BuildResult:
|
| 300 |
+
"""Build wheels.
|
| 301 |
+
|
| 302 |
+
:return: The list of InstallRequirement that succeeded to build and
|
| 303 |
+
the list of InstallRequirement that failed to build.
|
| 304 |
+
"""
|
| 305 |
+
if not requirements:
|
| 306 |
+
return [], []
|
| 307 |
+
|
| 308 |
+
# Build the wheels.
|
| 309 |
+
logger.info(
|
| 310 |
+
"Building wheels for collected packages: %s",
|
| 311 |
+
", ".join(req.name for req in requirements), # type: ignore
|
| 312 |
+
)
|
| 313 |
+
|
| 314 |
+
with indent_log():
|
| 315 |
+
build_successes, build_failures = [], []
|
| 316 |
+
for req in requirements:
|
| 317 |
+
assert req.name
|
| 318 |
+
cache_dir = _get_cache_dir(req, wheel_cache)
|
| 319 |
+
wheel_file = _build_one(
|
| 320 |
+
req,
|
| 321 |
+
cache_dir,
|
| 322 |
+
verify,
|
| 323 |
+
build_options,
|
| 324 |
+
global_options,
|
| 325 |
+
req.editable and req.permit_editable_wheels,
|
| 326 |
+
)
|
| 327 |
+
if wheel_file:
|
| 328 |
+
# Record the download origin in the cache
|
| 329 |
+
if req.download_info is not None:
|
| 330 |
+
# download_info is guaranteed to be set because when we build an
|
| 331 |
+
# InstallRequirement it has been through the preparer before, but
|
| 332 |
+
# let's be cautious.
|
| 333 |
+
wheel_cache.record_download_origin(cache_dir, req.download_info)
|
| 334 |
+
# Update the link for this.
|
| 335 |
+
req.link = Link(path_to_url(wheel_file))
|
| 336 |
+
req.local_file_path = req.link.file_path
|
| 337 |
+
assert req.link.is_wheel
|
| 338 |
+
build_successes.append(req)
|
| 339 |
+
else:
|
| 340 |
+
build_failures.append(req)
|
| 341 |
+
|
| 342 |
+
# notify success/failure
|
| 343 |
+
if build_successes:
|
| 344 |
+
logger.info(
|
| 345 |
+
"Successfully built %s",
|
| 346 |
+
" ".join([req.name for req in build_successes]), # type: ignore
|
| 347 |
+
)
|
| 348 |
+
if build_failures:
|
| 349 |
+
logger.info(
|
| 350 |
+
"Failed to build %s",
|
| 351 |
+
" ".join([req.name for req in build_failures]), # type: ignore
|
| 352 |
+
)
|
| 353 |
+
# Return a list of requirements that failed to build
|
| 354 |
+
return build_successes, build_failures
|
evalkit_llava/lib/python3.10/site-packages/pip/py.typed
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
pip is a command line program. While it is implemented in Python, and so is
|
| 2 |
+
available for import, you must not use pip's internal APIs in this way. Typing
|
| 3 |
+
information is provided as a convenience only and is not a guarantee. Expect
|
| 4 |
+
unannounced changes to the API and types in releases.
|
evalkit_llava/lib/python3.10/site-packages/setuptools-75.8.0-py3.10.egg-info/entry_points.txt
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[distutils.commands]
|
| 2 |
+
alias = setuptools.command.alias:alias
|
| 3 |
+
bdist_egg = setuptools.command.bdist_egg:bdist_egg
|
| 4 |
+
bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm
|
| 5 |
+
bdist_wheel = setuptools.command.bdist_wheel:bdist_wheel
|
| 6 |
+
build = setuptools.command.build:build
|
| 7 |
+
build_clib = setuptools.command.build_clib:build_clib
|
| 8 |
+
build_ext = setuptools.command.build_ext:build_ext
|
| 9 |
+
build_py = setuptools.command.build_py:build_py
|
| 10 |
+
develop = setuptools.command.develop:develop
|
| 11 |
+
dist_info = setuptools.command.dist_info:dist_info
|
| 12 |
+
easy_install = setuptools.command.easy_install:easy_install
|
| 13 |
+
editable_wheel = setuptools.command.editable_wheel:editable_wheel
|
| 14 |
+
egg_info = setuptools.command.egg_info:egg_info
|
| 15 |
+
install = setuptools.command.install:install
|
| 16 |
+
install_egg_info = setuptools.command.install_egg_info:install_egg_info
|
| 17 |
+
install_lib = setuptools.command.install_lib:install_lib
|
| 18 |
+
install_scripts = setuptools.command.install_scripts:install_scripts
|
| 19 |
+
rotate = setuptools.command.rotate:rotate
|
| 20 |
+
saveopts = setuptools.command.saveopts:saveopts
|
| 21 |
+
sdist = setuptools.command.sdist:sdist
|
| 22 |
+
setopt = setuptools.command.setopt:setopt
|
| 23 |
+
|
| 24 |
+
[distutils.setup_keywords]
|
| 25 |
+
dependency_links = setuptools.dist:assert_string_list
|
| 26 |
+
eager_resources = setuptools.dist:assert_string_list
|
| 27 |
+
entry_points = setuptools.dist:check_entry_points
|
| 28 |
+
exclude_package_data = setuptools.dist:check_package_data
|
| 29 |
+
extras_require = setuptools.dist:check_extras
|
| 30 |
+
include_package_data = setuptools.dist:assert_bool
|
| 31 |
+
install_requires = setuptools.dist:check_requirements
|
| 32 |
+
namespace_packages = setuptools.dist:check_nsp
|
| 33 |
+
package_data = setuptools.dist:check_package_data
|
| 34 |
+
packages = setuptools.dist:check_packages
|
| 35 |
+
python_requires = setuptools.dist:check_specifier
|
| 36 |
+
setup_requires = setuptools.dist:check_requirements
|
| 37 |
+
use_2to3 = setuptools.dist:invalid_unless_false
|
| 38 |
+
zip_safe = setuptools.dist:assert_bool
|
| 39 |
+
|
| 40 |
+
[egg_info.writers]
|
| 41 |
+
PKG-INFO = setuptools.command.egg_info:write_pkg_info
|
| 42 |
+
dependency_links.txt = setuptools.command.egg_info:overwrite_arg
|
| 43 |
+
eager_resources.txt = setuptools.command.egg_info:overwrite_arg
|
| 44 |
+
entry_points.txt = setuptools.command.egg_info:write_entries
|
| 45 |
+
namespace_packages.txt = setuptools.command.egg_info:overwrite_arg
|
| 46 |
+
requires.txt = setuptools.command.egg_info:write_requirements
|
| 47 |
+
top_level.txt = setuptools.command.egg_info:write_toplevel_names
|
| 48 |
+
|
| 49 |
+
[setuptools.finalize_distribution_options]
|
| 50 |
+
keywords = setuptools.dist:Distribution._finalize_setup_keywords
|
| 51 |
+
parent_finalize = setuptools.dist:_Distribution.finalize_options
|
evalkit_llava/lib/python3.10/site-packages/wheel-0.45.1.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: flit 3.9.0
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
evalkit_llava/lib/python3.10/site-packages/wheel/__main__.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Wheel command line tool (enable python -m wheel syntax)
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from __future__ import annotations
|
| 6 |
+
|
| 7 |
+
import sys
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def main(): # needed for console script
|
| 11 |
+
if __package__ == "":
|
| 12 |
+
# To be able to run 'python wheel-0.9.whl/wheel':
|
| 13 |
+
import os.path
|
| 14 |
+
|
| 15 |
+
path = os.path.dirname(os.path.dirname(__file__))
|
| 16 |
+
sys.path[0:0] = [path]
|
| 17 |
+
import wheel.cli
|
| 18 |
+
|
| 19 |
+
sys.exit(wheel.cli.main())
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
if __name__ == "__main__":
|
| 23 |
+
sys.exit(main())
|
evalkit_llava/lib/python3.10/site-packages/wheel/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (212 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/wheel/__pycache__/__main__.cpython-310.pyc
ADDED
|
Binary file (610 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/wheel/__pycache__/_bdist_wheel.cpython-310.pyc
ADDED
|
Binary file (15.2 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/wheel/__pycache__/_setuptools_logging.cpython-310.pyc
ADDED
|
Binary file (998 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/wheel/__pycache__/bdist_wheel.cpython-310.pyc
ADDED
|
Binary file (672 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/wheel/__pycache__/macosx_libfile.cpython-310.pyc
ADDED
|
Binary file (10.4 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/wheel/__pycache__/metadata.cpython-310.pyc
ADDED
|
Binary file (6.17 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/wheel/__pycache__/util.cpython-310.pyc
ADDED
|
Binary file (693 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/wheel/__pycache__/wheelfile.cpython-310.pyc
ADDED
|
Binary file (6.48 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/wheel/cli/__init__.py
ADDED
|
@@ -0,0 +1,155 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Wheel command-line utility.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from __future__ import annotations
|
| 6 |
+
|
| 7 |
+
import argparse
|
| 8 |
+
import os
|
| 9 |
+
import sys
|
| 10 |
+
from argparse import ArgumentTypeError
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class WheelError(Exception):
|
| 14 |
+
pass
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def unpack_f(args: argparse.Namespace) -> None:
|
| 18 |
+
from .unpack import unpack
|
| 19 |
+
|
| 20 |
+
unpack(args.wheelfile, args.dest)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def pack_f(args: argparse.Namespace) -> None:
|
| 24 |
+
from .pack import pack
|
| 25 |
+
|
| 26 |
+
pack(args.directory, args.dest_dir, args.build_number)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def convert_f(args: argparse.Namespace) -> None:
|
| 30 |
+
from .convert import convert
|
| 31 |
+
|
| 32 |
+
convert(args.files, args.dest_dir, args.verbose)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def tags_f(args: argparse.Namespace) -> None:
|
| 36 |
+
from .tags import tags
|
| 37 |
+
|
| 38 |
+
names = (
|
| 39 |
+
tags(
|
| 40 |
+
wheel,
|
| 41 |
+
args.python_tag,
|
| 42 |
+
args.abi_tag,
|
| 43 |
+
args.platform_tag,
|
| 44 |
+
args.build,
|
| 45 |
+
args.remove,
|
| 46 |
+
)
|
| 47 |
+
for wheel in args.wheel
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
for name in names:
|
| 51 |
+
print(name)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def version_f(args: argparse.Namespace) -> None:
|
| 55 |
+
from .. import __version__
|
| 56 |
+
|
| 57 |
+
print(f"wheel {__version__}")
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def parse_build_tag(build_tag: str) -> str:
|
| 61 |
+
if build_tag and not build_tag[0].isdigit():
|
| 62 |
+
raise ArgumentTypeError("build tag must begin with a digit")
|
| 63 |
+
elif "-" in build_tag:
|
| 64 |
+
raise ArgumentTypeError("invalid character ('-') in build tag")
|
| 65 |
+
|
| 66 |
+
return build_tag
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
TAGS_HELP = """\
|
| 70 |
+
Make a new wheel with given tags. Any tags unspecified will remain the same.
|
| 71 |
+
Starting the tags with a "+" will append to the existing tags. Starting with a
|
| 72 |
+
"-" will remove a tag (use --option=-TAG syntax). Multiple tags can be
|
| 73 |
+
separated by ".". The original file will remain unless --remove is given. The
|
| 74 |
+
output filename(s) will be displayed on stdout for further processing.
|
| 75 |
+
"""
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def parser():
|
| 79 |
+
p = argparse.ArgumentParser()
|
| 80 |
+
s = p.add_subparsers(help="commands")
|
| 81 |
+
|
| 82 |
+
unpack_parser = s.add_parser("unpack", help="Unpack wheel")
|
| 83 |
+
unpack_parser.add_argument(
|
| 84 |
+
"--dest", "-d", help="Destination directory", default="."
|
| 85 |
+
)
|
| 86 |
+
unpack_parser.add_argument("wheelfile", help="Wheel file")
|
| 87 |
+
unpack_parser.set_defaults(func=unpack_f)
|
| 88 |
+
|
| 89 |
+
repack_parser = s.add_parser("pack", help="Repack wheel")
|
| 90 |
+
repack_parser.add_argument("directory", help="Root directory of the unpacked wheel")
|
| 91 |
+
repack_parser.add_argument(
|
| 92 |
+
"--dest-dir",
|
| 93 |
+
"-d",
|
| 94 |
+
default=os.path.curdir,
|
| 95 |
+
help="Directory to store the wheel (default %(default)s)",
|
| 96 |
+
)
|
| 97 |
+
repack_parser.add_argument(
|
| 98 |
+
"--build-number", help="Build tag to use in the wheel name"
|
| 99 |
+
)
|
| 100 |
+
repack_parser.set_defaults(func=pack_f)
|
| 101 |
+
|
| 102 |
+
convert_parser = s.add_parser("convert", help="Convert egg or wininst to wheel")
|
| 103 |
+
convert_parser.add_argument("files", nargs="*", help="Files to convert")
|
| 104 |
+
convert_parser.add_argument(
|
| 105 |
+
"--dest-dir",
|
| 106 |
+
"-d",
|
| 107 |
+
default=os.path.curdir,
|
| 108 |
+
help="Directory to store wheels (default %(default)s)",
|
| 109 |
+
)
|
| 110 |
+
convert_parser.add_argument("--verbose", "-v", action="store_true")
|
| 111 |
+
convert_parser.set_defaults(func=convert_f)
|
| 112 |
+
|
| 113 |
+
tags_parser = s.add_parser(
|
| 114 |
+
"tags", help="Add or replace the tags on a wheel", description=TAGS_HELP
|
| 115 |
+
)
|
| 116 |
+
tags_parser.add_argument("wheel", nargs="*", help="Existing wheel(s) to retag")
|
| 117 |
+
tags_parser.add_argument(
|
| 118 |
+
"--remove",
|
| 119 |
+
action="store_true",
|
| 120 |
+
help="Remove the original files, keeping only the renamed ones",
|
| 121 |
+
)
|
| 122 |
+
tags_parser.add_argument(
|
| 123 |
+
"--python-tag", metavar="TAG", help="Specify an interpreter tag(s)"
|
| 124 |
+
)
|
| 125 |
+
tags_parser.add_argument("--abi-tag", metavar="TAG", help="Specify an ABI tag(s)")
|
| 126 |
+
tags_parser.add_argument(
|
| 127 |
+
"--platform-tag", metavar="TAG", help="Specify a platform tag(s)"
|
| 128 |
+
)
|
| 129 |
+
tags_parser.add_argument(
|
| 130 |
+
"--build", type=parse_build_tag, metavar="BUILD", help="Specify a build tag"
|
| 131 |
+
)
|
| 132 |
+
tags_parser.set_defaults(func=tags_f)
|
| 133 |
+
|
| 134 |
+
version_parser = s.add_parser("version", help="Print version and exit")
|
| 135 |
+
version_parser.set_defaults(func=version_f)
|
| 136 |
+
|
| 137 |
+
help_parser = s.add_parser("help", help="Show this help")
|
| 138 |
+
help_parser.set_defaults(func=lambda args: p.print_help())
|
| 139 |
+
|
| 140 |
+
return p
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
def main():
|
| 144 |
+
p = parser()
|
| 145 |
+
args = p.parse_args()
|
| 146 |
+
if not hasattr(args, "func"):
|
| 147 |
+
p.print_help()
|
| 148 |
+
else:
|
| 149 |
+
try:
|
| 150 |
+
args.func(args)
|
| 151 |
+
return 0
|
| 152 |
+
except WheelError as e:
|
| 153 |
+
print(e, file=sys.stderr)
|
| 154 |
+
|
| 155 |
+
return 1
|