Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +2 -0
- llava/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/base.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/locations/__init__.py +456 -0
- llava/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/base.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/locations/_distutils.py +172 -0
- llava/lib/python3.10/site-packages/pip/_internal/locations/_sysconfig.py +214 -0
- llava/lib/python3.10/site-packages/pip/_internal/locations/base.py +81 -0
- llava/lib/python3.10/site-packages/pip/_internal/models/__init__.py +2 -0
- llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/__init__.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/candidate.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/index.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/wheel.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/models/candidate.py +25 -0
- llava/lib/python3.10/site-packages/pip/_internal/models/direct_url.py +224 -0
- llava/lib/python3.10/site-packages/pip/_internal/models/index.py +28 -0
- llava/lib/python3.10/site-packages/pip/_internal/models/installation_report.py +56 -0
- llava/lib/python3.10/site-packages/pip/_internal/models/link.py +604 -0
- llava/lib/python3.10/site-packages/pip/_internal/models/scheme.py +25 -0
- llava/lib/python3.10/site-packages/pip/_internal/models/selection_prefs.py +53 -0
- llava/lib/python3.10/site-packages/pip/_internal/models/target_python.py +121 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/__init__.py +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/check.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/build/__init__.py +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/build/build_tracker.py +138 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/build/metadata.py +39 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_editable.py +42 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_legacy.py +74 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/build/wheel.py +37 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_editable.py +46 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_legacy.py +102 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/check.py +181 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/freeze.py +256 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/install/__init__.py +2 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-310.pyc +0 -0
.gitattributes
CHANGED
|
@@ -1331,3 +1331,5 @@ videochat2/lib/python3.10/site-packages/scipy/stats/_boost/invgauss_ufunc.cpytho
|
|
| 1331 |
videochat2/lib/python3.10/site-packages/scipy/stats/_boost/binom_ufunc.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1332 |
videochat2/lib/python3.10/site-packages/matplotlib/__pycache__/widgets.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 1333 |
minigpt2/lib/libcrypto.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
| 1331 |
videochat2/lib/python3.10/site-packages/scipy/stats/_boost/binom_ufunc.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1332 |
videochat2/lib/python3.10/site-packages/matplotlib/__pycache__/widgets.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 1333 |
minigpt2/lib/libcrypto.so filter=lfs diff=lfs merge=lfs -text
|
| 1334 |
+
minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/idna/__pycache__/uts46data.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 1335 |
+
minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/yarl/_quoting_c.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
llava/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/base.cpython-310.pyc
ADDED
|
Binary file (2.49 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/locations/__init__.py
ADDED
|
@@ -0,0 +1,456 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import logging
|
| 3 |
+
import os
|
| 4 |
+
import pathlib
|
| 5 |
+
import sys
|
| 6 |
+
import sysconfig
|
| 7 |
+
from typing import Any, Dict, Generator, Optional, Tuple
|
| 8 |
+
|
| 9 |
+
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
| 10 |
+
from pip._internal.utils.compat import WINDOWS
|
| 11 |
+
from pip._internal.utils.deprecation import deprecated
|
| 12 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
| 13 |
+
|
| 14 |
+
from . import _sysconfig
|
| 15 |
+
from .base import (
|
| 16 |
+
USER_CACHE_DIR,
|
| 17 |
+
get_major_minor_version,
|
| 18 |
+
get_src_prefix,
|
| 19 |
+
is_osx_framework,
|
| 20 |
+
site_packages,
|
| 21 |
+
user_site,
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
__all__ = [
|
| 25 |
+
"USER_CACHE_DIR",
|
| 26 |
+
"get_bin_prefix",
|
| 27 |
+
"get_bin_user",
|
| 28 |
+
"get_major_minor_version",
|
| 29 |
+
"get_platlib",
|
| 30 |
+
"get_purelib",
|
| 31 |
+
"get_scheme",
|
| 32 |
+
"get_src_prefix",
|
| 33 |
+
"site_packages",
|
| 34 |
+
"user_site",
|
| 35 |
+
]
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
logger = logging.getLogger(__name__)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
_PLATLIBDIR: str = getattr(sys, "platlibdir", "lib")
|
| 42 |
+
|
| 43 |
+
_USE_SYSCONFIG_DEFAULT = sys.version_info >= (3, 10)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def _should_use_sysconfig() -> bool:
|
| 47 |
+
"""This function determines the value of _USE_SYSCONFIG.
|
| 48 |
+
|
| 49 |
+
By default, pip uses sysconfig on Python 3.10+.
|
| 50 |
+
But Python distributors can override this decision by setting:
|
| 51 |
+
sysconfig._PIP_USE_SYSCONFIG = True / False
|
| 52 |
+
Rationale in https://github.com/pypa/pip/issues/10647
|
| 53 |
+
|
| 54 |
+
This is a function for testability, but should be constant during any one
|
| 55 |
+
run.
|
| 56 |
+
"""
|
| 57 |
+
return bool(getattr(sysconfig, "_PIP_USE_SYSCONFIG", _USE_SYSCONFIG_DEFAULT))
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
_USE_SYSCONFIG = _should_use_sysconfig()
|
| 61 |
+
|
| 62 |
+
if not _USE_SYSCONFIG:
|
| 63 |
+
# Import distutils lazily to avoid deprecation warnings,
|
| 64 |
+
# but import it soon enough that it is in memory and available during
|
| 65 |
+
# a pip reinstall.
|
| 66 |
+
from . import _distutils
|
| 67 |
+
|
| 68 |
+
# Be noisy about incompatibilities if this platforms "should" be using
|
| 69 |
+
# sysconfig, but is explicitly opting out and using distutils instead.
|
| 70 |
+
if _USE_SYSCONFIG_DEFAULT and not _USE_SYSCONFIG:
|
| 71 |
+
_MISMATCH_LEVEL = logging.WARNING
|
| 72 |
+
else:
|
| 73 |
+
_MISMATCH_LEVEL = logging.DEBUG
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def _looks_like_bpo_44860() -> bool:
|
| 77 |
+
"""The resolution to bpo-44860 will change this incorrect platlib.
|
| 78 |
+
|
| 79 |
+
See <https://bugs.python.org/issue44860>.
|
| 80 |
+
"""
|
| 81 |
+
from distutils.command.install import INSTALL_SCHEMES
|
| 82 |
+
|
| 83 |
+
try:
|
| 84 |
+
unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"]
|
| 85 |
+
except KeyError:
|
| 86 |
+
return False
|
| 87 |
+
return unix_user_platlib == "$usersite"
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def _looks_like_red_hat_patched_platlib_purelib(scheme: Dict[str, str]) -> bool:
|
| 91 |
+
platlib = scheme["platlib"]
|
| 92 |
+
if "/$platlibdir/" in platlib:
|
| 93 |
+
platlib = platlib.replace("/$platlibdir/", f"/{_PLATLIBDIR}/")
|
| 94 |
+
if "/lib64/" not in platlib:
|
| 95 |
+
return False
|
| 96 |
+
unpatched = platlib.replace("/lib64/", "/lib/")
|
| 97 |
+
return unpatched.replace("$platbase/", "$base/") == scheme["purelib"]
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
@functools.lru_cache(maxsize=None)
|
| 101 |
+
def _looks_like_red_hat_lib() -> bool:
|
| 102 |
+
"""Red Hat patches platlib in unix_prefix and unix_home, but not purelib.
|
| 103 |
+
|
| 104 |
+
This is the only way I can see to tell a Red Hat-patched Python.
|
| 105 |
+
"""
|
| 106 |
+
from distutils.command.install import INSTALL_SCHEMES
|
| 107 |
+
|
| 108 |
+
return all(
|
| 109 |
+
k in INSTALL_SCHEMES
|
| 110 |
+
and _looks_like_red_hat_patched_platlib_purelib(INSTALL_SCHEMES[k])
|
| 111 |
+
for k in ("unix_prefix", "unix_home")
|
| 112 |
+
)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
@functools.lru_cache(maxsize=None)
|
| 116 |
+
def _looks_like_debian_scheme() -> bool:
|
| 117 |
+
"""Debian adds two additional schemes."""
|
| 118 |
+
from distutils.command.install import INSTALL_SCHEMES
|
| 119 |
+
|
| 120 |
+
return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
@functools.lru_cache(maxsize=None)
|
| 124 |
+
def _looks_like_red_hat_scheme() -> bool:
|
| 125 |
+
"""Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``.
|
| 126 |
+
|
| 127 |
+
Red Hat's ``00251-change-user-install-location.patch`` changes the install
|
| 128 |
+
command's ``prefix`` and ``exec_prefix`` to append ``"/local"``. This is
|
| 129 |
+
(fortunately?) done quite unconditionally, so we create a default command
|
| 130 |
+
object without any configuration to detect this.
|
| 131 |
+
"""
|
| 132 |
+
from distutils.command.install import install
|
| 133 |
+
from distutils.dist import Distribution
|
| 134 |
+
|
| 135 |
+
cmd: Any = install(Distribution())
|
| 136 |
+
cmd.finalize_options()
|
| 137 |
+
return (
|
| 138 |
+
cmd.exec_prefix == f"{os.path.normpath(sys.exec_prefix)}/local"
|
| 139 |
+
and cmd.prefix == f"{os.path.normpath(sys.prefix)}/local"
|
| 140 |
+
)
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
@functools.lru_cache(maxsize=None)
|
| 144 |
+
def _looks_like_slackware_scheme() -> bool:
|
| 145 |
+
"""Slackware patches sysconfig but fails to patch distutils and site.
|
| 146 |
+
|
| 147 |
+
Slackware changes sysconfig's user scheme to use ``"lib64"`` for the lib
|
| 148 |
+
path, but does not do the same to the site module.
|
| 149 |
+
"""
|
| 150 |
+
if user_site is None: # User-site not available.
|
| 151 |
+
return False
|
| 152 |
+
try:
|
| 153 |
+
paths = sysconfig.get_paths(scheme="posix_user", expand=False)
|
| 154 |
+
except KeyError: # User-site not available.
|
| 155 |
+
return False
|
| 156 |
+
return "/lib64/" in paths["purelib"] and "/lib64/" not in user_site
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
@functools.lru_cache(maxsize=None)
|
| 160 |
+
def _looks_like_msys2_mingw_scheme() -> bool:
|
| 161 |
+
"""MSYS2 patches distutils and sysconfig to use a UNIX-like scheme.
|
| 162 |
+
|
| 163 |
+
However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is
|
| 164 |
+
likely going to be included in their 3.10 release, so we ignore the warning.
|
| 165 |
+
See msys2/MINGW-packages#9319.
|
| 166 |
+
|
| 167 |
+
MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase,
|
| 168 |
+
and is missing the final ``"site-packages"``.
|
| 169 |
+
"""
|
| 170 |
+
paths = sysconfig.get_paths("nt", expand=False)
|
| 171 |
+
return all(
|
| 172 |
+
"Lib" not in p and "lib" in p and not p.endswith("site-packages")
|
| 173 |
+
for p in (paths[key] for key in ("platlib", "purelib"))
|
| 174 |
+
)
|
| 175 |
+
|
| 176 |
+
|
| 177 |
+
def _fix_abiflags(parts: Tuple[str]) -> Generator[str, None, None]:
|
| 178 |
+
ldversion = sysconfig.get_config_var("LDVERSION")
|
| 179 |
+
abiflags = getattr(sys, "abiflags", None)
|
| 180 |
+
|
| 181 |
+
# LDVERSION does not end with sys.abiflags. Just return the path unchanged.
|
| 182 |
+
if not ldversion or not abiflags or not ldversion.endswith(abiflags):
|
| 183 |
+
yield from parts
|
| 184 |
+
return
|
| 185 |
+
|
| 186 |
+
# Strip sys.abiflags from LDVERSION-based path components.
|
| 187 |
+
for part in parts:
|
| 188 |
+
if part.endswith(ldversion):
|
| 189 |
+
part = part[: (0 - len(abiflags))]
|
| 190 |
+
yield part
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
@functools.lru_cache(maxsize=None)
|
| 194 |
+
def _warn_mismatched(old: pathlib.Path, new: pathlib.Path, *, key: str) -> None:
|
| 195 |
+
issue_url = "https://github.com/pypa/pip/issues/10151"
|
| 196 |
+
message = (
|
| 197 |
+
"Value for %s does not match. Please report this to <%s>"
|
| 198 |
+
"\ndistutils: %s"
|
| 199 |
+
"\nsysconfig: %s"
|
| 200 |
+
)
|
| 201 |
+
logger.log(_MISMATCH_LEVEL, message, key, issue_url, old, new)
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool:
|
| 205 |
+
if old == new:
|
| 206 |
+
return False
|
| 207 |
+
_warn_mismatched(old, new, key=key)
|
| 208 |
+
return True
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
@functools.lru_cache(maxsize=None)
|
| 212 |
+
def _log_context(
|
| 213 |
+
*,
|
| 214 |
+
user: bool = False,
|
| 215 |
+
home: Optional[str] = None,
|
| 216 |
+
root: Optional[str] = None,
|
| 217 |
+
prefix: Optional[str] = None,
|
| 218 |
+
) -> None:
|
| 219 |
+
parts = [
|
| 220 |
+
"Additional context:",
|
| 221 |
+
"user = %r",
|
| 222 |
+
"home = %r",
|
| 223 |
+
"root = %r",
|
| 224 |
+
"prefix = %r",
|
| 225 |
+
]
|
| 226 |
+
|
| 227 |
+
logger.log(_MISMATCH_LEVEL, "\n".join(parts), user, home, root, prefix)
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
def get_scheme(
|
| 231 |
+
dist_name: str,
|
| 232 |
+
user: bool = False,
|
| 233 |
+
home: Optional[str] = None,
|
| 234 |
+
root: Optional[str] = None,
|
| 235 |
+
isolated: bool = False,
|
| 236 |
+
prefix: Optional[str] = None,
|
| 237 |
+
) -> Scheme:
|
| 238 |
+
new = _sysconfig.get_scheme(
|
| 239 |
+
dist_name,
|
| 240 |
+
user=user,
|
| 241 |
+
home=home,
|
| 242 |
+
root=root,
|
| 243 |
+
isolated=isolated,
|
| 244 |
+
prefix=prefix,
|
| 245 |
+
)
|
| 246 |
+
if _USE_SYSCONFIG:
|
| 247 |
+
return new
|
| 248 |
+
|
| 249 |
+
old = _distutils.get_scheme(
|
| 250 |
+
dist_name,
|
| 251 |
+
user=user,
|
| 252 |
+
home=home,
|
| 253 |
+
root=root,
|
| 254 |
+
isolated=isolated,
|
| 255 |
+
prefix=prefix,
|
| 256 |
+
)
|
| 257 |
+
|
| 258 |
+
warning_contexts = []
|
| 259 |
+
for k in SCHEME_KEYS:
|
| 260 |
+
old_v = pathlib.Path(getattr(old, k))
|
| 261 |
+
new_v = pathlib.Path(getattr(new, k))
|
| 262 |
+
|
| 263 |
+
if old_v == new_v:
|
| 264 |
+
continue
|
| 265 |
+
|
| 266 |
+
# distutils incorrectly put PyPy packages under ``site-packages/python``
|
| 267 |
+
# in the ``posix_home`` scheme, but PyPy devs said they expect the
|
| 268 |
+
# directory name to be ``pypy`` instead. So we treat this as a bug fix
|
| 269 |
+
# and not warn about it. See bpo-43307 and python/cpython#24628.
|
| 270 |
+
skip_pypy_special_case = (
|
| 271 |
+
sys.implementation.name == "pypy"
|
| 272 |
+
and home is not None
|
| 273 |
+
and k in ("platlib", "purelib")
|
| 274 |
+
and old_v.parent == new_v.parent
|
| 275 |
+
and old_v.name.startswith("python")
|
| 276 |
+
and new_v.name.startswith("pypy")
|
| 277 |
+
)
|
| 278 |
+
if skip_pypy_special_case:
|
| 279 |
+
continue
|
| 280 |
+
|
| 281 |
+
# sysconfig's ``osx_framework_user`` does not include ``pythonX.Y`` in
|
| 282 |
+
# the ``include`` value, but distutils's ``headers`` does. We'll let
|
| 283 |
+
# CPython decide whether this is a bug or feature. See bpo-43948.
|
| 284 |
+
skip_osx_framework_user_special_case = (
|
| 285 |
+
user
|
| 286 |
+
and is_osx_framework()
|
| 287 |
+
and k == "headers"
|
| 288 |
+
and old_v.parent.parent == new_v.parent
|
| 289 |
+
and old_v.parent.name.startswith("python")
|
| 290 |
+
)
|
| 291 |
+
if skip_osx_framework_user_special_case:
|
| 292 |
+
continue
|
| 293 |
+
|
| 294 |
+
# On Red Hat and derived Linux distributions, distutils is patched to
|
| 295 |
+
# use "lib64" instead of "lib" for platlib.
|
| 296 |
+
if k == "platlib" and _looks_like_red_hat_lib():
|
| 297 |
+
continue
|
| 298 |
+
|
| 299 |
+
# On Python 3.9+, sysconfig's posix_user scheme sets platlib against
|
| 300 |
+
# sys.platlibdir, but distutils's unix_user incorrectly coninutes
|
| 301 |
+
# using the same $usersite for both platlib and purelib. This creates a
|
| 302 |
+
# mismatch when sys.platlibdir is not "lib".
|
| 303 |
+
skip_bpo_44860 = (
|
| 304 |
+
user
|
| 305 |
+
and k == "platlib"
|
| 306 |
+
and not WINDOWS
|
| 307 |
+
and sys.version_info >= (3, 9)
|
| 308 |
+
and _PLATLIBDIR != "lib"
|
| 309 |
+
and _looks_like_bpo_44860()
|
| 310 |
+
)
|
| 311 |
+
if skip_bpo_44860:
|
| 312 |
+
continue
|
| 313 |
+
|
| 314 |
+
# Slackware incorrectly patches posix_user to use lib64 instead of lib,
|
| 315 |
+
# but not usersite to match the location.
|
| 316 |
+
skip_slackware_user_scheme = (
|
| 317 |
+
user
|
| 318 |
+
and k in ("platlib", "purelib")
|
| 319 |
+
and not WINDOWS
|
| 320 |
+
and _looks_like_slackware_scheme()
|
| 321 |
+
)
|
| 322 |
+
if skip_slackware_user_scheme:
|
| 323 |
+
continue
|
| 324 |
+
|
| 325 |
+
# Both Debian and Red Hat patch Python to place the system site under
|
| 326 |
+
# /usr/local instead of /usr. Debian also places lib in dist-packages
|
| 327 |
+
# instead of site-packages, but the /usr/local check should cover it.
|
| 328 |
+
skip_linux_system_special_case = (
|
| 329 |
+
not (user or home or prefix or running_under_virtualenv())
|
| 330 |
+
and old_v.parts[1:3] == ("usr", "local")
|
| 331 |
+
and len(new_v.parts) > 1
|
| 332 |
+
and new_v.parts[1] == "usr"
|
| 333 |
+
and (len(new_v.parts) < 3 or new_v.parts[2] != "local")
|
| 334 |
+
and (_looks_like_red_hat_scheme() or _looks_like_debian_scheme())
|
| 335 |
+
)
|
| 336 |
+
if skip_linux_system_special_case:
|
| 337 |
+
continue
|
| 338 |
+
|
| 339 |
+
# MSYS2 MINGW's sysconfig patch does not include the "site-packages"
|
| 340 |
+
# part of the path. This is incorrect and will be fixed in MSYS.
|
| 341 |
+
skip_msys2_mingw_bug = (
|
| 342 |
+
WINDOWS and k in ("platlib", "purelib") and _looks_like_msys2_mingw_scheme()
|
| 343 |
+
)
|
| 344 |
+
if skip_msys2_mingw_bug:
|
| 345 |
+
continue
|
| 346 |
+
|
| 347 |
+
# CPython's POSIX install script invokes pip (via ensurepip) against the
|
| 348 |
+
# interpreter located in the source tree, not the install site. This
|
| 349 |
+
# triggers special logic in sysconfig that's not present in distutils.
|
| 350 |
+
# https://github.com/python/cpython/blob/8c21941ddaf/Lib/sysconfig.py#L178-L194
|
| 351 |
+
skip_cpython_build = (
|
| 352 |
+
sysconfig.is_python_build(check_home=True)
|
| 353 |
+
and not WINDOWS
|
| 354 |
+
and k in ("headers", "include", "platinclude")
|
| 355 |
+
)
|
| 356 |
+
if skip_cpython_build:
|
| 357 |
+
continue
|
| 358 |
+
|
| 359 |
+
warning_contexts.append((old_v, new_v, f"scheme.{k}"))
|
| 360 |
+
|
| 361 |
+
if not warning_contexts:
|
| 362 |
+
return old
|
| 363 |
+
|
| 364 |
+
# Check if this path mismatch is caused by distutils config files. Those
|
| 365 |
+
# files will no longer work once we switch to sysconfig, so this raises a
|
| 366 |
+
# deprecation message for them.
|
| 367 |
+
default_old = _distutils.distutils_scheme(
|
| 368 |
+
dist_name,
|
| 369 |
+
user,
|
| 370 |
+
home,
|
| 371 |
+
root,
|
| 372 |
+
isolated,
|
| 373 |
+
prefix,
|
| 374 |
+
ignore_config_files=True,
|
| 375 |
+
)
|
| 376 |
+
if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS):
|
| 377 |
+
deprecated(
|
| 378 |
+
reason=(
|
| 379 |
+
"Configuring installation scheme with distutils config files "
|
| 380 |
+
"is deprecated and will no longer work in the near future. If you "
|
| 381 |
+
"are using a Homebrew or Linuxbrew Python, please see discussion "
|
| 382 |
+
"at https://github.com/Homebrew/homebrew-core/issues/76621"
|
| 383 |
+
),
|
| 384 |
+
replacement=None,
|
| 385 |
+
gone_in=None,
|
| 386 |
+
)
|
| 387 |
+
return old
|
| 388 |
+
|
| 389 |
+
# Post warnings about this mismatch so user can report them back.
|
| 390 |
+
for old_v, new_v, key in warning_contexts:
|
| 391 |
+
_warn_mismatched(old_v, new_v, key=key)
|
| 392 |
+
_log_context(user=user, home=home, root=root, prefix=prefix)
|
| 393 |
+
|
| 394 |
+
return old
|
| 395 |
+
|
| 396 |
+
|
| 397 |
+
def get_bin_prefix() -> str:
|
| 398 |
+
new = _sysconfig.get_bin_prefix()
|
| 399 |
+
if _USE_SYSCONFIG:
|
| 400 |
+
return new
|
| 401 |
+
|
| 402 |
+
old = _distutils.get_bin_prefix()
|
| 403 |
+
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"):
|
| 404 |
+
_log_context()
|
| 405 |
+
return old
|
| 406 |
+
|
| 407 |
+
|
| 408 |
+
def get_bin_user() -> str:
|
| 409 |
+
return _sysconfig.get_scheme("", user=True).scripts
|
| 410 |
+
|
| 411 |
+
|
| 412 |
+
def _looks_like_deb_system_dist_packages(value: str) -> bool:
|
| 413 |
+
"""Check if the value is Debian's APT-controlled dist-packages.
|
| 414 |
+
|
| 415 |
+
Debian's ``distutils.sysconfig.get_python_lib()`` implementation returns the
|
| 416 |
+
default package path controlled by APT, but does not patch ``sysconfig`` to
|
| 417 |
+
do the same. This is similar to the bug worked around in ``get_scheme()``,
|
| 418 |
+
but here the default is ``deb_system`` instead of ``unix_local``. Ultimately
|
| 419 |
+
we can't do anything about this Debian bug, and this detection allows us to
|
| 420 |
+
skip the warning when needed.
|
| 421 |
+
"""
|
| 422 |
+
if not _looks_like_debian_scheme():
|
| 423 |
+
return False
|
| 424 |
+
if value == "/usr/lib/python3/dist-packages":
|
| 425 |
+
return True
|
| 426 |
+
return False
|
| 427 |
+
|
| 428 |
+
|
| 429 |
+
def get_purelib() -> str:
|
| 430 |
+
"""Return the default pure-Python lib location."""
|
| 431 |
+
new = _sysconfig.get_purelib()
|
| 432 |
+
if _USE_SYSCONFIG:
|
| 433 |
+
return new
|
| 434 |
+
|
| 435 |
+
old = _distutils.get_purelib()
|
| 436 |
+
if _looks_like_deb_system_dist_packages(old):
|
| 437 |
+
return old
|
| 438 |
+
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"):
|
| 439 |
+
_log_context()
|
| 440 |
+
return old
|
| 441 |
+
|
| 442 |
+
|
| 443 |
+
def get_platlib() -> str:
|
| 444 |
+
"""Return the default platform-shared lib location."""
|
| 445 |
+
new = _sysconfig.get_platlib()
|
| 446 |
+
if _USE_SYSCONFIG:
|
| 447 |
+
return new
|
| 448 |
+
|
| 449 |
+
from . import _distutils
|
| 450 |
+
|
| 451 |
+
old = _distutils.get_platlib()
|
| 452 |
+
if _looks_like_deb_system_dist_packages(old):
|
| 453 |
+
return old
|
| 454 |
+
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"):
|
| 455 |
+
_log_context()
|
| 456 |
+
return old
|
llava/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (10.9 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-310.pyc
ADDED
|
Binary file (4.54 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-310.pyc
ADDED
|
Binary file (5.99 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/base.cpython-310.pyc
ADDED
|
Binary file (2.38 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/locations/_distutils.py
ADDED
|
@@ -0,0 +1,172 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Locations where we look for configs, install stuff, etc"""
|
| 2 |
+
|
| 3 |
+
# The following comment should be removed at some point in the future.
|
| 4 |
+
# mypy: strict-optional=False
|
| 5 |
+
|
| 6 |
+
# If pip's going to use distutils, it should not be using the copy that setuptools
|
| 7 |
+
# might have injected into the environment. This is done by removing the injected
|
| 8 |
+
# shim, if it's injected.
|
| 9 |
+
#
|
| 10 |
+
# See https://github.com/pypa/pip/issues/8761 for the original discussion and
|
| 11 |
+
# rationale for why this is done within pip.
|
| 12 |
+
try:
|
| 13 |
+
__import__("_distutils_hack").remove_shim()
|
| 14 |
+
except (ImportError, AttributeError):
|
| 15 |
+
pass
|
| 16 |
+
|
| 17 |
+
import logging
|
| 18 |
+
import os
|
| 19 |
+
import sys
|
| 20 |
+
from distutils.cmd import Command as DistutilsCommand
|
| 21 |
+
from distutils.command.install import SCHEME_KEYS
|
| 22 |
+
from distutils.command.install import install as distutils_install_command
|
| 23 |
+
from distutils.sysconfig import get_python_lib
|
| 24 |
+
from typing import Dict, List, Optional, Union
|
| 25 |
+
|
| 26 |
+
from pip._internal.models.scheme import Scheme
|
| 27 |
+
from pip._internal.utils.compat import WINDOWS
|
| 28 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
| 29 |
+
|
| 30 |
+
from .base import get_major_minor_version
|
| 31 |
+
|
| 32 |
+
logger = logging.getLogger(__name__)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def distutils_scheme(
|
| 36 |
+
dist_name: str,
|
| 37 |
+
user: bool = False,
|
| 38 |
+
home: Optional[str] = None,
|
| 39 |
+
root: Optional[str] = None,
|
| 40 |
+
isolated: bool = False,
|
| 41 |
+
prefix: Optional[str] = None,
|
| 42 |
+
*,
|
| 43 |
+
ignore_config_files: bool = False,
|
| 44 |
+
) -> Dict[str, str]:
|
| 45 |
+
"""
|
| 46 |
+
Return a distutils install scheme
|
| 47 |
+
"""
|
| 48 |
+
from distutils.dist import Distribution
|
| 49 |
+
|
| 50 |
+
dist_args: Dict[str, Union[str, List[str]]] = {"name": dist_name}
|
| 51 |
+
if isolated:
|
| 52 |
+
dist_args["script_args"] = ["--no-user-cfg"]
|
| 53 |
+
|
| 54 |
+
d = Distribution(dist_args)
|
| 55 |
+
if not ignore_config_files:
|
| 56 |
+
try:
|
| 57 |
+
d.parse_config_files()
|
| 58 |
+
except UnicodeDecodeError:
|
| 59 |
+
paths = d.find_config_files()
|
| 60 |
+
logger.warning(
|
| 61 |
+
"Ignore distutils configs in %s due to encoding errors.",
|
| 62 |
+
", ".join(os.path.basename(p) for p in paths),
|
| 63 |
+
)
|
| 64 |
+
obj: Optional[DistutilsCommand] = None
|
| 65 |
+
obj = d.get_command_obj("install", create=True)
|
| 66 |
+
assert obj is not None
|
| 67 |
+
i: distutils_install_command = obj
|
| 68 |
+
# NOTE: setting user or home has the side-effect of creating the home dir
|
| 69 |
+
# or user base for installations during finalize_options()
|
| 70 |
+
# ideally, we'd prefer a scheme class that has no side-effects.
|
| 71 |
+
assert not (user and prefix), f"user={user} prefix={prefix}"
|
| 72 |
+
assert not (home and prefix), f"home={home} prefix={prefix}"
|
| 73 |
+
i.user = user or i.user
|
| 74 |
+
if user or home:
|
| 75 |
+
i.prefix = ""
|
| 76 |
+
i.prefix = prefix or i.prefix
|
| 77 |
+
i.home = home or i.home
|
| 78 |
+
i.root = root or i.root
|
| 79 |
+
i.finalize_options()
|
| 80 |
+
|
| 81 |
+
scheme: Dict[str, str] = {}
|
| 82 |
+
for key in SCHEME_KEYS:
|
| 83 |
+
scheme[key] = getattr(i, "install_" + key)
|
| 84 |
+
|
| 85 |
+
# install_lib specified in setup.cfg should install *everything*
|
| 86 |
+
# into there (i.e. it takes precedence over both purelib and
|
| 87 |
+
# platlib). Note, i.install_lib is *always* set after
|
| 88 |
+
# finalize_options(); we only want to override here if the user
|
| 89 |
+
# has explicitly requested it hence going back to the config
|
| 90 |
+
if "install_lib" in d.get_option_dict("install"):
|
| 91 |
+
scheme.update({"purelib": i.install_lib, "platlib": i.install_lib})
|
| 92 |
+
|
| 93 |
+
if running_under_virtualenv():
|
| 94 |
+
if home:
|
| 95 |
+
prefix = home
|
| 96 |
+
elif user:
|
| 97 |
+
prefix = i.install_userbase
|
| 98 |
+
else:
|
| 99 |
+
prefix = i.prefix
|
| 100 |
+
scheme["headers"] = os.path.join(
|
| 101 |
+
prefix,
|
| 102 |
+
"include",
|
| 103 |
+
"site",
|
| 104 |
+
f"python{get_major_minor_version()}",
|
| 105 |
+
dist_name,
|
| 106 |
+
)
|
| 107 |
+
|
| 108 |
+
if root is not None:
|
| 109 |
+
path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1]
|
| 110 |
+
scheme["headers"] = os.path.join(root, path_no_drive[1:])
|
| 111 |
+
|
| 112 |
+
return scheme
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def get_scheme(
|
| 116 |
+
dist_name: str,
|
| 117 |
+
user: bool = False,
|
| 118 |
+
home: Optional[str] = None,
|
| 119 |
+
root: Optional[str] = None,
|
| 120 |
+
isolated: bool = False,
|
| 121 |
+
prefix: Optional[str] = None,
|
| 122 |
+
) -> Scheme:
|
| 123 |
+
"""
|
| 124 |
+
Get the "scheme" corresponding to the input parameters. The distutils
|
| 125 |
+
documentation provides the context for the available schemes:
|
| 126 |
+
https://docs.python.org/3/install/index.html#alternate-installation
|
| 127 |
+
|
| 128 |
+
:param dist_name: the name of the package to retrieve the scheme for, used
|
| 129 |
+
in the headers scheme path
|
| 130 |
+
:param user: indicates to use the "user" scheme
|
| 131 |
+
:param home: indicates to use the "home" scheme and provides the base
|
| 132 |
+
directory for the same
|
| 133 |
+
:param root: root under which other directories are re-based
|
| 134 |
+
:param isolated: equivalent to --no-user-cfg, i.e. do not consider
|
| 135 |
+
~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for
|
| 136 |
+
scheme paths
|
| 137 |
+
:param prefix: indicates to use the "prefix" scheme and provides the
|
| 138 |
+
base directory for the same
|
| 139 |
+
"""
|
| 140 |
+
scheme = distutils_scheme(dist_name, user, home, root, isolated, prefix)
|
| 141 |
+
return Scheme(
|
| 142 |
+
platlib=scheme["platlib"],
|
| 143 |
+
purelib=scheme["purelib"],
|
| 144 |
+
headers=scheme["headers"],
|
| 145 |
+
scripts=scheme["scripts"],
|
| 146 |
+
data=scheme["data"],
|
| 147 |
+
)
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
def get_bin_prefix() -> str:
|
| 151 |
+
# XXX: In old virtualenv versions, sys.prefix can contain '..' components,
|
| 152 |
+
# so we need to call normpath to eliminate them.
|
| 153 |
+
prefix = os.path.normpath(sys.prefix)
|
| 154 |
+
if WINDOWS:
|
| 155 |
+
bin_py = os.path.join(prefix, "Scripts")
|
| 156 |
+
# buildout uses 'bin' on Windows too?
|
| 157 |
+
if not os.path.exists(bin_py):
|
| 158 |
+
bin_py = os.path.join(prefix, "bin")
|
| 159 |
+
return bin_py
|
| 160 |
+
# Forcing to use /usr/local/bin for standard macOS framework installs
|
| 161 |
+
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
|
| 162 |
+
if sys.platform[:6] == "darwin" and prefix[:16] == "/System/Library/":
|
| 163 |
+
return "/usr/local/bin"
|
| 164 |
+
return os.path.join(prefix, "bin")
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
def get_purelib() -> str:
|
| 168 |
+
return get_python_lib(plat_specific=False)
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
def get_platlib() -> str:
|
| 172 |
+
return get_python_lib(plat_specific=True)
|
llava/lib/python3.10/site-packages/pip/_internal/locations/_sysconfig.py
ADDED
|
@@ -0,0 +1,214 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os
|
| 3 |
+
import sys
|
| 4 |
+
import sysconfig
|
| 5 |
+
import typing
|
| 6 |
+
|
| 7 |
+
from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationInvalid
|
| 8 |
+
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
| 9 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
| 10 |
+
|
| 11 |
+
from .base import change_root, get_major_minor_version, is_osx_framework
|
| 12 |
+
|
| 13 |
+
logger = logging.getLogger(__name__)
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
# Notes on _infer_* functions.
|
| 17 |
+
# Unfortunately ``get_default_scheme()`` didn't exist before 3.10, so there's no
|
| 18 |
+
# way to ask things like "what is the '_prefix' scheme on this platform". These
|
| 19 |
+
# functions try to answer that with some heuristics while accounting for ad-hoc
|
| 20 |
+
# platforms not covered by CPython's default sysconfig implementation. If the
|
| 21 |
+
# ad-hoc implementation does not fully implement sysconfig, we'll fall back to
|
| 22 |
+
# a POSIX scheme.
|
| 23 |
+
|
| 24 |
+
_AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names())
|
| 25 |
+
|
| 26 |
+
_PREFERRED_SCHEME_API = getattr(sysconfig, "get_preferred_scheme", None)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def _should_use_osx_framework_prefix() -> bool:
|
| 30 |
+
"""Check for Apple's ``osx_framework_library`` scheme.
|
| 31 |
+
|
| 32 |
+
Python distributed by Apple's Command Line Tools has this special scheme
|
| 33 |
+
that's used when:
|
| 34 |
+
|
| 35 |
+
* This is a framework build.
|
| 36 |
+
* We are installing into the system prefix.
|
| 37 |
+
|
| 38 |
+
This does not account for ``pip install --prefix`` (also means we're not
|
| 39 |
+
installing to the system prefix), which should use ``posix_prefix``, but
|
| 40 |
+
logic here means ``_infer_prefix()`` outputs ``osx_framework_library``. But
|
| 41 |
+
since ``prefix`` is not available for ``sysconfig.get_default_scheme()``,
|
| 42 |
+
which is the stdlib replacement for ``_infer_prefix()``, presumably Apple
|
| 43 |
+
wouldn't be able to magically switch between ``osx_framework_library`` and
|
| 44 |
+
``posix_prefix``. ``_infer_prefix()`` returning ``osx_framework_library``
|
| 45 |
+
means its behavior is consistent whether we use the stdlib implementation
|
| 46 |
+
or our own, and we deal with this special case in ``get_scheme()`` instead.
|
| 47 |
+
"""
|
| 48 |
+
return (
|
| 49 |
+
"osx_framework_library" in _AVAILABLE_SCHEMES
|
| 50 |
+
and not running_under_virtualenv()
|
| 51 |
+
and is_osx_framework()
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def _infer_prefix() -> str:
|
| 56 |
+
"""Try to find a prefix scheme for the current platform.
|
| 57 |
+
|
| 58 |
+
This tries:
|
| 59 |
+
|
| 60 |
+
* A special ``osx_framework_library`` for Python distributed by Apple's
|
| 61 |
+
Command Line Tools, when not running in a virtual environment.
|
| 62 |
+
* Implementation + OS, used by PyPy on Windows (``pypy_nt``).
|
| 63 |
+
* Implementation without OS, used by PyPy on POSIX (``pypy``).
|
| 64 |
+
* OS + "prefix", used by CPython on POSIX (``posix_prefix``).
|
| 65 |
+
* Just the OS name, used by CPython on Windows (``nt``).
|
| 66 |
+
|
| 67 |
+
If none of the above works, fall back to ``posix_prefix``.
|
| 68 |
+
"""
|
| 69 |
+
if _PREFERRED_SCHEME_API:
|
| 70 |
+
return _PREFERRED_SCHEME_API("prefix")
|
| 71 |
+
if _should_use_osx_framework_prefix():
|
| 72 |
+
return "osx_framework_library"
|
| 73 |
+
implementation_suffixed = f"{sys.implementation.name}_{os.name}"
|
| 74 |
+
if implementation_suffixed in _AVAILABLE_SCHEMES:
|
| 75 |
+
return implementation_suffixed
|
| 76 |
+
if sys.implementation.name in _AVAILABLE_SCHEMES:
|
| 77 |
+
return sys.implementation.name
|
| 78 |
+
suffixed = f"{os.name}_prefix"
|
| 79 |
+
if suffixed in _AVAILABLE_SCHEMES:
|
| 80 |
+
return suffixed
|
| 81 |
+
if os.name in _AVAILABLE_SCHEMES: # On Windows, prefx is just called "nt".
|
| 82 |
+
return os.name
|
| 83 |
+
return "posix_prefix"
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def _infer_user() -> str:
|
| 87 |
+
"""Try to find a user scheme for the current platform."""
|
| 88 |
+
if _PREFERRED_SCHEME_API:
|
| 89 |
+
return _PREFERRED_SCHEME_API("user")
|
| 90 |
+
if is_osx_framework() and not running_under_virtualenv():
|
| 91 |
+
suffixed = "osx_framework_user"
|
| 92 |
+
else:
|
| 93 |
+
suffixed = f"{os.name}_user"
|
| 94 |
+
if suffixed in _AVAILABLE_SCHEMES:
|
| 95 |
+
return suffixed
|
| 96 |
+
if "posix_user" not in _AVAILABLE_SCHEMES: # User scheme unavailable.
|
| 97 |
+
raise UserInstallationInvalid()
|
| 98 |
+
return "posix_user"
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def _infer_home() -> str:
|
| 102 |
+
"""Try to find a home for the current platform."""
|
| 103 |
+
if _PREFERRED_SCHEME_API:
|
| 104 |
+
return _PREFERRED_SCHEME_API("home")
|
| 105 |
+
suffixed = f"{os.name}_home"
|
| 106 |
+
if suffixed in _AVAILABLE_SCHEMES:
|
| 107 |
+
return suffixed
|
| 108 |
+
return "posix_home"
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
# Update these keys if the user sets a custom home.
|
| 112 |
+
_HOME_KEYS = [
|
| 113 |
+
"installed_base",
|
| 114 |
+
"base",
|
| 115 |
+
"installed_platbase",
|
| 116 |
+
"platbase",
|
| 117 |
+
"prefix",
|
| 118 |
+
"exec_prefix",
|
| 119 |
+
]
|
| 120 |
+
if sysconfig.get_config_var("userbase") is not None:
|
| 121 |
+
_HOME_KEYS.append("userbase")
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
def get_scheme(
|
| 125 |
+
dist_name: str,
|
| 126 |
+
user: bool = False,
|
| 127 |
+
home: typing.Optional[str] = None,
|
| 128 |
+
root: typing.Optional[str] = None,
|
| 129 |
+
isolated: bool = False,
|
| 130 |
+
prefix: typing.Optional[str] = None,
|
| 131 |
+
) -> Scheme:
|
| 132 |
+
"""
|
| 133 |
+
Get the "scheme" corresponding to the input parameters.
|
| 134 |
+
|
| 135 |
+
:param dist_name: the name of the package to retrieve the scheme for, used
|
| 136 |
+
in the headers scheme path
|
| 137 |
+
:param user: indicates to use the "user" scheme
|
| 138 |
+
:param home: indicates to use the "home" scheme
|
| 139 |
+
:param root: root under which other directories are re-based
|
| 140 |
+
:param isolated: ignored, but kept for distutils compatibility (where
|
| 141 |
+
this controls whether the user-site pydistutils.cfg is honored)
|
| 142 |
+
:param prefix: indicates to use the "prefix" scheme and provides the
|
| 143 |
+
base directory for the same
|
| 144 |
+
"""
|
| 145 |
+
if user and prefix:
|
| 146 |
+
raise InvalidSchemeCombination("--user", "--prefix")
|
| 147 |
+
if home and prefix:
|
| 148 |
+
raise InvalidSchemeCombination("--home", "--prefix")
|
| 149 |
+
|
| 150 |
+
if home is not None:
|
| 151 |
+
scheme_name = _infer_home()
|
| 152 |
+
elif user:
|
| 153 |
+
scheme_name = _infer_user()
|
| 154 |
+
else:
|
| 155 |
+
scheme_name = _infer_prefix()
|
| 156 |
+
|
| 157 |
+
# Special case: When installing into a custom prefix, use posix_prefix
|
| 158 |
+
# instead of osx_framework_library. See _should_use_osx_framework_prefix()
|
| 159 |
+
# docstring for details.
|
| 160 |
+
if prefix is not None and scheme_name == "osx_framework_library":
|
| 161 |
+
scheme_name = "posix_prefix"
|
| 162 |
+
|
| 163 |
+
if home is not None:
|
| 164 |
+
variables = {k: home for k in _HOME_KEYS}
|
| 165 |
+
elif prefix is not None:
|
| 166 |
+
variables = {k: prefix for k in _HOME_KEYS}
|
| 167 |
+
else:
|
| 168 |
+
variables = {}
|
| 169 |
+
|
| 170 |
+
paths = sysconfig.get_paths(scheme=scheme_name, vars=variables)
|
| 171 |
+
|
| 172 |
+
# Logic here is very arbitrary, we're doing it for compatibility, don't ask.
|
| 173 |
+
# 1. Pip historically uses a special header path in virtual environments.
|
| 174 |
+
# 2. If the distribution name is not known, distutils uses 'UNKNOWN'. We
|
| 175 |
+
# only do the same when not running in a virtual environment because
|
| 176 |
+
# pip's historical header path logic (see point 1) did not do this.
|
| 177 |
+
if running_under_virtualenv():
|
| 178 |
+
if user:
|
| 179 |
+
base = variables.get("userbase", sys.prefix)
|
| 180 |
+
else:
|
| 181 |
+
base = variables.get("base", sys.prefix)
|
| 182 |
+
python_xy = f"python{get_major_minor_version()}"
|
| 183 |
+
paths["include"] = os.path.join(base, "include", "site", python_xy)
|
| 184 |
+
elif not dist_name:
|
| 185 |
+
dist_name = "UNKNOWN"
|
| 186 |
+
|
| 187 |
+
scheme = Scheme(
|
| 188 |
+
platlib=paths["platlib"],
|
| 189 |
+
purelib=paths["purelib"],
|
| 190 |
+
headers=os.path.join(paths["include"], dist_name),
|
| 191 |
+
scripts=paths["scripts"],
|
| 192 |
+
data=paths["data"],
|
| 193 |
+
)
|
| 194 |
+
if root is not None:
|
| 195 |
+
converted_keys = {}
|
| 196 |
+
for key in SCHEME_KEYS:
|
| 197 |
+
converted_keys[key] = change_root(root, getattr(scheme, key))
|
| 198 |
+
scheme = Scheme(**converted_keys)
|
| 199 |
+
return scheme
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
def get_bin_prefix() -> str:
|
| 203 |
+
# Forcing to use /usr/local/bin for standard macOS framework installs.
|
| 204 |
+
if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/":
|
| 205 |
+
return "/usr/local/bin"
|
| 206 |
+
return sysconfig.get_paths()["scripts"]
|
| 207 |
+
|
| 208 |
+
|
| 209 |
+
def get_purelib() -> str:
|
| 210 |
+
return sysconfig.get_paths()["purelib"]
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
def get_platlib() -> str:
|
| 214 |
+
return sysconfig.get_paths()["platlib"]
|
llava/lib/python3.10/site-packages/pip/_internal/locations/base.py
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import os
|
| 3 |
+
import site
|
| 4 |
+
import sys
|
| 5 |
+
import sysconfig
|
| 6 |
+
import typing
|
| 7 |
+
|
| 8 |
+
from pip._internal.exceptions import InstallationError
|
| 9 |
+
from pip._internal.utils import appdirs
|
| 10 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
| 11 |
+
|
| 12 |
+
# Application Directories
|
| 13 |
+
USER_CACHE_DIR = appdirs.user_cache_dir("pip")
|
| 14 |
+
|
| 15 |
+
# FIXME doesn't account for venv linked to global site-packages
|
| 16 |
+
site_packages: str = sysconfig.get_path("purelib")
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def get_major_minor_version() -> str:
|
| 20 |
+
"""
|
| 21 |
+
Return the major-minor version of the current Python as a string, e.g.
|
| 22 |
+
"3.7" or "3.10".
|
| 23 |
+
"""
|
| 24 |
+
return "{}.{}".format(*sys.version_info)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def change_root(new_root: str, pathname: str) -> str:
|
| 28 |
+
"""Return 'pathname' with 'new_root' prepended.
|
| 29 |
+
|
| 30 |
+
If 'pathname' is relative, this is equivalent to os.path.join(new_root, pathname).
|
| 31 |
+
Otherwise, it requires making 'pathname' relative and then joining the
|
| 32 |
+
two, which is tricky on DOS/Windows and Mac OS.
|
| 33 |
+
|
| 34 |
+
This is borrowed from Python's standard library's distutils module.
|
| 35 |
+
"""
|
| 36 |
+
if os.name == "posix":
|
| 37 |
+
if not os.path.isabs(pathname):
|
| 38 |
+
return os.path.join(new_root, pathname)
|
| 39 |
+
else:
|
| 40 |
+
return os.path.join(new_root, pathname[1:])
|
| 41 |
+
|
| 42 |
+
elif os.name == "nt":
|
| 43 |
+
(drive, path) = os.path.splitdrive(pathname)
|
| 44 |
+
if path[0] == "\\":
|
| 45 |
+
path = path[1:]
|
| 46 |
+
return os.path.join(new_root, path)
|
| 47 |
+
|
| 48 |
+
else:
|
| 49 |
+
raise InstallationError(
|
| 50 |
+
f"Unknown platform: {os.name}\n"
|
| 51 |
+
"Can not change root path prefix on unknown platform."
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def get_src_prefix() -> str:
|
| 56 |
+
if running_under_virtualenv():
|
| 57 |
+
src_prefix = os.path.join(sys.prefix, "src")
|
| 58 |
+
else:
|
| 59 |
+
# FIXME: keep src in cwd for now (it is not a temporary folder)
|
| 60 |
+
try:
|
| 61 |
+
src_prefix = os.path.join(os.getcwd(), "src")
|
| 62 |
+
except OSError:
|
| 63 |
+
# In case the current working directory has been renamed or deleted
|
| 64 |
+
sys.exit("The folder you are executing pip from can no longer be found.")
|
| 65 |
+
|
| 66 |
+
# under macOS + virtualenv sys.prefix is not properly resolved
|
| 67 |
+
# it is something like /path/to/python/bin/..
|
| 68 |
+
return os.path.abspath(src_prefix)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
try:
|
| 72 |
+
# Use getusersitepackages if this is present, as it ensures that the
|
| 73 |
+
# value is initialised properly.
|
| 74 |
+
user_site: typing.Optional[str] = site.getusersitepackages()
|
| 75 |
+
except AttributeError:
|
| 76 |
+
user_site = site.USER_SITE
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
@functools.lru_cache(maxsize=None)
|
| 80 |
+
def is_osx_framework() -> bool:
|
| 81 |
+
return bool(sysconfig.get_config_var("PYTHONFRAMEWORK"))
|
llava/lib/python3.10/site-packages/pip/_internal/models/__init__.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""A package that contains models that represent entities.
|
| 2 |
+
"""
|
llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (239 Bytes). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/candidate.cpython-310.pyc
ADDED
|
Binary file (1.21 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/index.cpython-310.pyc
ADDED
|
Binary file (1.21 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-310.pyc
ADDED
|
Binary file (1.73 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/wheel.cpython-310.pyc
ADDED
|
Binary file (5.07 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/models/candidate.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from dataclasses import dataclass
|
| 2 |
+
|
| 3 |
+
from pip._vendor.packaging.version import Version
|
| 4 |
+
from pip._vendor.packaging.version import parse as parse_version
|
| 5 |
+
|
| 6 |
+
from pip._internal.models.link import Link
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
@dataclass(frozen=True)
|
| 10 |
+
class InstallationCandidate:
|
| 11 |
+
"""Represents a potential "candidate" for installation."""
|
| 12 |
+
|
| 13 |
+
__slots__ = ["name", "version", "link"]
|
| 14 |
+
|
| 15 |
+
name: str
|
| 16 |
+
version: Version
|
| 17 |
+
link: Link
|
| 18 |
+
|
| 19 |
+
def __init__(self, name: str, version: str, link: Link) -> None:
|
| 20 |
+
object.__setattr__(self, "name", name)
|
| 21 |
+
object.__setattr__(self, "version", parse_version(version))
|
| 22 |
+
object.__setattr__(self, "link", link)
|
| 23 |
+
|
| 24 |
+
def __str__(self) -> str:
|
| 25 |
+
return f"{self.name!r} candidate (version {self.version} at {self.link})"
|
llava/lib/python3.10/site-packages/pip/_internal/models/direct_url.py
ADDED
|
@@ -0,0 +1,224 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
""" PEP 610 """
|
| 2 |
+
|
| 3 |
+
import json
|
| 4 |
+
import re
|
| 5 |
+
import urllib.parse
|
| 6 |
+
from dataclasses import dataclass
|
| 7 |
+
from typing import Any, ClassVar, Dict, Iterable, Optional, Type, TypeVar, Union
|
| 8 |
+
|
| 9 |
+
__all__ = [
|
| 10 |
+
"DirectUrl",
|
| 11 |
+
"DirectUrlValidationError",
|
| 12 |
+
"DirInfo",
|
| 13 |
+
"ArchiveInfo",
|
| 14 |
+
"VcsInfo",
|
| 15 |
+
]
|
| 16 |
+
|
| 17 |
+
T = TypeVar("T")
|
| 18 |
+
|
| 19 |
+
DIRECT_URL_METADATA_NAME = "direct_url.json"
|
| 20 |
+
ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$")
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class DirectUrlValidationError(Exception):
|
| 24 |
+
pass
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def _get(
|
| 28 |
+
d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
|
| 29 |
+
) -> Optional[T]:
|
| 30 |
+
"""Get value from dictionary and verify expected type."""
|
| 31 |
+
if key not in d:
|
| 32 |
+
return default
|
| 33 |
+
value = d[key]
|
| 34 |
+
if not isinstance(value, expected_type):
|
| 35 |
+
raise DirectUrlValidationError(
|
| 36 |
+
f"{value!r} has unexpected type for {key} (expected {expected_type})"
|
| 37 |
+
)
|
| 38 |
+
return value
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def _get_required(
|
| 42 |
+
d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
|
| 43 |
+
) -> T:
|
| 44 |
+
value = _get(d, expected_type, key, default)
|
| 45 |
+
if value is None:
|
| 46 |
+
raise DirectUrlValidationError(f"{key} must have a value")
|
| 47 |
+
return value
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def _exactly_one_of(infos: Iterable[Optional["InfoType"]]) -> "InfoType":
|
| 51 |
+
infos = [info for info in infos if info is not None]
|
| 52 |
+
if not infos:
|
| 53 |
+
raise DirectUrlValidationError(
|
| 54 |
+
"missing one of archive_info, dir_info, vcs_info"
|
| 55 |
+
)
|
| 56 |
+
if len(infos) > 1:
|
| 57 |
+
raise DirectUrlValidationError(
|
| 58 |
+
"more than one of archive_info, dir_info, vcs_info"
|
| 59 |
+
)
|
| 60 |
+
assert infos[0] is not None
|
| 61 |
+
return infos[0]
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def _filter_none(**kwargs: Any) -> Dict[str, Any]:
|
| 65 |
+
"""Make dict excluding None values."""
|
| 66 |
+
return {k: v for k, v in kwargs.items() if v is not None}
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
@dataclass
|
| 70 |
+
class VcsInfo:
|
| 71 |
+
name: ClassVar = "vcs_info"
|
| 72 |
+
|
| 73 |
+
vcs: str
|
| 74 |
+
commit_id: str
|
| 75 |
+
requested_revision: Optional[str] = None
|
| 76 |
+
|
| 77 |
+
@classmethod
|
| 78 |
+
def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]:
|
| 79 |
+
if d is None:
|
| 80 |
+
return None
|
| 81 |
+
return cls(
|
| 82 |
+
vcs=_get_required(d, str, "vcs"),
|
| 83 |
+
commit_id=_get_required(d, str, "commit_id"),
|
| 84 |
+
requested_revision=_get(d, str, "requested_revision"),
|
| 85 |
+
)
|
| 86 |
+
|
| 87 |
+
def _to_dict(self) -> Dict[str, Any]:
|
| 88 |
+
return _filter_none(
|
| 89 |
+
vcs=self.vcs,
|
| 90 |
+
requested_revision=self.requested_revision,
|
| 91 |
+
commit_id=self.commit_id,
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
class ArchiveInfo:
|
| 96 |
+
name = "archive_info"
|
| 97 |
+
|
| 98 |
+
def __init__(
|
| 99 |
+
self,
|
| 100 |
+
hash: Optional[str] = None,
|
| 101 |
+
hashes: Optional[Dict[str, str]] = None,
|
| 102 |
+
) -> None:
|
| 103 |
+
# set hashes before hash, since the hash setter will further populate hashes
|
| 104 |
+
self.hashes = hashes
|
| 105 |
+
self.hash = hash
|
| 106 |
+
|
| 107 |
+
@property
|
| 108 |
+
def hash(self) -> Optional[str]:
|
| 109 |
+
return self._hash
|
| 110 |
+
|
| 111 |
+
@hash.setter
|
| 112 |
+
def hash(self, value: Optional[str]) -> None:
|
| 113 |
+
if value is not None:
|
| 114 |
+
# Auto-populate the hashes key to upgrade to the new format automatically.
|
| 115 |
+
# We don't back-populate the legacy hash key from hashes.
|
| 116 |
+
try:
|
| 117 |
+
hash_name, hash_value = value.split("=", 1)
|
| 118 |
+
except ValueError:
|
| 119 |
+
raise DirectUrlValidationError(
|
| 120 |
+
f"invalid archive_info.hash format: {value!r}"
|
| 121 |
+
)
|
| 122 |
+
if self.hashes is None:
|
| 123 |
+
self.hashes = {hash_name: hash_value}
|
| 124 |
+
elif hash_name not in self.hashes:
|
| 125 |
+
self.hashes = self.hashes.copy()
|
| 126 |
+
self.hashes[hash_name] = hash_value
|
| 127 |
+
self._hash = value
|
| 128 |
+
|
| 129 |
+
@classmethod
|
| 130 |
+
def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]:
|
| 131 |
+
if d is None:
|
| 132 |
+
return None
|
| 133 |
+
return cls(hash=_get(d, str, "hash"), hashes=_get(d, dict, "hashes"))
|
| 134 |
+
|
| 135 |
+
def _to_dict(self) -> Dict[str, Any]:
|
| 136 |
+
return _filter_none(hash=self.hash, hashes=self.hashes)
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
@dataclass
|
| 140 |
+
class DirInfo:
|
| 141 |
+
name: ClassVar = "dir_info"
|
| 142 |
+
|
| 143 |
+
editable: bool = False
|
| 144 |
+
|
| 145 |
+
@classmethod
|
| 146 |
+
def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]:
|
| 147 |
+
if d is None:
|
| 148 |
+
return None
|
| 149 |
+
return cls(editable=_get_required(d, bool, "editable", default=False))
|
| 150 |
+
|
| 151 |
+
def _to_dict(self) -> Dict[str, Any]:
|
| 152 |
+
return _filter_none(editable=self.editable or None)
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
InfoType = Union[ArchiveInfo, DirInfo, VcsInfo]
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
@dataclass
|
| 159 |
+
class DirectUrl:
|
| 160 |
+
url: str
|
| 161 |
+
info: InfoType
|
| 162 |
+
subdirectory: Optional[str] = None
|
| 163 |
+
|
| 164 |
+
def _remove_auth_from_netloc(self, netloc: str) -> str:
|
| 165 |
+
if "@" not in netloc:
|
| 166 |
+
return netloc
|
| 167 |
+
user_pass, netloc_no_user_pass = netloc.split("@", 1)
|
| 168 |
+
if (
|
| 169 |
+
isinstance(self.info, VcsInfo)
|
| 170 |
+
and self.info.vcs == "git"
|
| 171 |
+
and user_pass == "git"
|
| 172 |
+
):
|
| 173 |
+
return netloc
|
| 174 |
+
if ENV_VAR_RE.match(user_pass):
|
| 175 |
+
return netloc
|
| 176 |
+
return netloc_no_user_pass
|
| 177 |
+
|
| 178 |
+
@property
|
| 179 |
+
def redacted_url(self) -> str:
|
| 180 |
+
"""url with user:password part removed unless it is formed with
|
| 181 |
+
environment variables as specified in PEP 610, or it is ``git``
|
| 182 |
+
in the case of a git URL.
|
| 183 |
+
"""
|
| 184 |
+
purl = urllib.parse.urlsplit(self.url)
|
| 185 |
+
netloc = self._remove_auth_from_netloc(purl.netloc)
|
| 186 |
+
surl = urllib.parse.urlunsplit(
|
| 187 |
+
(purl.scheme, netloc, purl.path, purl.query, purl.fragment)
|
| 188 |
+
)
|
| 189 |
+
return surl
|
| 190 |
+
|
| 191 |
+
def validate(self) -> None:
|
| 192 |
+
self.from_dict(self.to_dict())
|
| 193 |
+
|
| 194 |
+
@classmethod
|
| 195 |
+
def from_dict(cls, d: Dict[str, Any]) -> "DirectUrl":
|
| 196 |
+
return DirectUrl(
|
| 197 |
+
url=_get_required(d, str, "url"),
|
| 198 |
+
subdirectory=_get(d, str, "subdirectory"),
|
| 199 |
+
info=_exactly_one_of(
|
| 200 |
+
[
|
| 201 |
+
ArchiveInfo._from_dict(_get(d, dict, "archive_info")),
|
| 202 |
+
DirInfo._from_dict(_get(d, dict, "dir_info")),
|
| 203 |
+
VcsInfo._from_dict(_get(d, dict, "vcs_info")),
|
| 204 |
+
]
|
| 205 |
+
),
|
| 206 |
+
)
|
| 207 |
+
|
| 208 |
+
def to_dict(self) -> Dict[str, Any]:
|
| 209 |
+
res = _filter_none(
|
| 210 |
+
url=self.redacted_url,
|
| 211 |
+
subdirectory=self.subdirectory,
|
| 212 |
+
)
|
| 213 |
+
res[self.info.name] = self.info._to_dict()
|
| 214 |
+
return res
|
| 215 |
+
|
| 216 |
+
@classmethod
|
| 217 |
+
def from_json(cls, s: str) -> "DirectUrl":
|
| 218 |
+
return cls.from_dict(json.loads(s))
|
| 219 |
+
|
| 220 |
+
def to_json(self) -> str:
|
| 221 |
+
return json.dumps(self.to_dict(), sort_keys=True)
|
| 222 |
+
|
| 223 |
+
def is_local_editable(self) -> bool:
|
| 224 |
+
return isinstance(self.info, DirInfo) and self.info.editable
|
llava/lib/python3.10/site-packages/pip/_internal/models/index.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import urllib.parse
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
class PackageIndex:
|
| 5 |
+
"""Represents a Package Index and provides easier access to endpoints"""
|
| 6 |
+
|
| 7 |
+
__slots__ = ["url", "netloc", "simple_url", "pypi_url", "file_storage_domain"]
|
| 8 |
+
|
| 9 |
+
def __init__(self, url: str, file_storage_domain: str) -> None:
|
| 10 |
+
super().__init__()
|
| 11 |
+
self.url = url
|
| 12 |
+
self.netloc = urllib.parse.urlsplit(url).netloc
|
| 13 |
+
self.simple_url = self._url_for_path("simple")
|
| 14 |
+
self.pypi_url = self._url_for_path("pypi")
|
| 15 |
+
|
| 16 |
+
# This is part of a temporary hack used to block installs of PyPI
|
| 17 |
+
# packages which depend on external urls only necessary until PyPI can
|
| 18 |
+
# block such packages themselves
|
| 19 |
+
self.file_storage_domain = file_storage_domain
|
| 20 |
+
|
| 21 |
+
def _url_for_path(self, path: str) -> str:
|
| 22 |
+
return urllib.parse.urljoin(self.url, path)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
PyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org")
|
| 26 |
+
TestPyPI = PackageIndex(
|
| 27 |
+
"https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org"
|
| 28 |
+
)
|
llava/lib/python3.10/site-packages/pip/_internal/models/installation_report.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Any, Dict, Sequence
|
| 2 |
+
|
| 3 |
+
from pip._vendor.packaging.markers import default_environment
|
| 4 |
+
|
| 5 |
+
from pip import __version__
|
| 6 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class InstallationReport:
|
| 10 |
+
def __init__(self, install_requirements: Sequence[InstallRequirement]):
|
| 11 |
+
self._install_requirements = install_requirements
|
| 12 |
+
|
| 13 |
+
@classmethod
|
| 14 |
+
def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]:
|
| 15 |
+
assert ireq.download_info, f"No download_info for {ireq}"
|
| 16 |
+
res = {
|
| 17 |
+
# PEP 610 json for the download URL. download_info.archive_info.hashes may
|
| 18 |
+
# be absent when the requirement was installed from the wheel cache
|
| 19 |
+
# and the cache entry was populated by an older pip version that did not
|
| 20 |
+
# record origin.json.
|
| 21 |
+
"download_info": ireq.download_info.to_dict(),
|
| 22 |
+
# is_direct is true if the requirement was a direct URL reference (which
|
| 23 |
+
# includes editable requirements), and false if the requirement was
|
| 24 |
+
# downloaded from a PEP 503 index or --find-links.
|
| 25 |
+
"is_direct": ireq.is_direct,
|
| 26 |
+
# is_yanked is true if the requirement was yanked from the index, but
|
| 27 |
+
# was still selected by pip to conform to PEP 592.
|
| 28 |
+
"is_yanked": ireq.link.is_yanked if ireq.link else False,
|
| 29 |
+
# requested is true if the requirement was specified by the user (aka
|
| 30 |
+
# top level requirement), and false if it was installed as a dependency of a
|
| 31 |
+
# requirement. https://peps.python.org/pep-0376/#requested
|
| 32 |
+
"requested": ireq.user_supplied,
|
| 33 |
+
# PEP 566 json encoding for metadata
|
| 34 |
+
# https://www.python.org/dev/peps/pep-0566/#json-compatible-metadata
|
| 35 |
+
"metadata": ireq.get_dist().metadata_dict,
|
| 36 |
+
}
|
| 37 |
+
if ireq.user_supplied and ireq.extras:
|
| 38 |
+
# For top level requirements, the list of requested extras, if any.
|
| 39 |
+
res["requested_extras"] = sorted(ireq.extras)
|
| 40 |
+
return res
|
| 41 |
+
|
| 42 |
+
def to_dict(self) -> Dict[str, Any]:
|
| 43 |
+
return {
|
| 44 |
+
"version": "1",
|
| 45 |
+
"pip_version": __version__,
|
| 46 |
+
"install": [
|
| 47 |
+
self._install_req_to_dict(ireq) for ireq in self._install_requirements
|
| 48 |
+
],
|
| 49 |
+
# https://peps.python.org/pep-0508/#environment-markers
|
| 50 |
+
# TODO: currently, the resolver uses the default environment to evaluate
|
| 51 |
+
# environment markers, so that is what we report here. In the future, it
|
| 52 |
+
# should also take into account options such as --python-version or
|
| 53 |
+
# --platform, perhaps under the form of an environment_override field?
|
| 54 |
+
# https://github.com/pypa/pip/issues/11198
|
| 55 |
+
"environment": default_environment(),
|
| 56 |
+
}
|
llava/lib/python3.10/site-packages/pip/_internal/models/link.py
ADDED
|
@@ -0,0 +1,604 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import itertools
|
| 3 |
+
import logging
|
| 4 |
+
import os
|
| 5 |
+
import posixpath
|
| 6 |
+
import re
|
| 7 |
+
import urllib.parse
|
| 8 |
+
from dataclasses import dataclass
|
| 9 |
+
from typing import (
|
| 10 |
+
TYPE_CHECKING,
|
| 11 |
+
Any,
|
| 12 |
+
Dict,
|
| 13 |
+
List,
|
| 14 |
+
Mapping,
|
| 15 |
+
NamedTuple,
|
| 16 |
+
Optional,
|
| 17 |
+
Tuple,
|
| 18 |
+
Union,
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
from pip._internal.utils.deprecation import deprecated
|
| 22 |
+
from pip._internal.utils.filetypes import WHEEL_EXTENSION
|
| 23 |
+
from pip._internal.utils.hashes import Hashes
|
| 24 |
+
from pip._internal.utils.misc import (
|
| 25 |
+
pairwise,
|
| 26 |
+
redact_auth_from_url,
|
| 27 |
+
split_auth_from_netloc,
|
| 28 |
+
splitext,
|
| 29 |
+
)
|
| 30 |
+
from pip._internal.utils.urls import path_to_url, url_to_path
|
| 31 |
+
|
| 32 |
+
if TYPE_CHECKING:
|
| 33 |
+
from pip._internal.index.collector import IndexContent
|
| 34 |
+
|
| 35 |
+
logger = logging.getLogger(__name__)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
# Order matters, earlier hashes have a precedence over later hashes for what
|
| 39 |
+
# we will pick to use.
|
| 40 |
+
_SUPPORTED_HASHES = ("sha512", "sha384", "sha256", "sha224", "sha1", "md5")
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
@dataclass(frozen=True)
|
| 44 |
+
class LinkHash:
|
| 45 |
+
"""Links to content may have embedded hash values. This class parses those.
|
| 46 |
+
|
| 47 |
+
`name` must be any member of `_SUPPORTED_HASHES`.
|
| 48 |
+
|
| 49 |
+
This class can be converted to and from `ArchiveInfo`. While ArchiveInfo intends to
|
| 50 |
+
be JSON-serializable to conform to PEP 610, this class contains the logic for
|
| 51 |
+
parsing a hash name and value for correctness, and then checking whether that hash
|
| 52 |
+
conforms to a schema with `.is_hash_allowed()`."""
|
| 53 |
+
|
| 54 |
+
name: str
|
| 55 |
+
value: str
|
| 56 |
+
|
| 57 |
+
_hash_url_fragment_re = re.compile(
|
| 58 |
+
# NB: we do not validate that the second group (.*) is a valid hex
|
| 59 |
+
# digest. Instead, we simply keep that string in this class, and then check it
|
| 60 |
+
# against Hashes when hash-checking is needed. This is easier to debug than
|
| 61 |
+
# proactively discarding an invalid hex digest, as we handle incorrect hashes
|
| 62 |
+
# and malformed hashes in the same place.
|
| 63 |
+
r"[#&]({choices})=([^&]*)".format(
|
| 64 |
+
choices="|".join(re.escape(hash_name) for hash_name in _SUPPORTED_HASHES)
|
| 65 |
+
),
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
def __post_init__(self) -> None:
|
| 69 |
+
assert self.name in _SUPPORTED_HASHES
|
| 70 |
+
|
| 71 |
+
@classmethod
|
| 72 |
+
@functools.lru_cache(maxsize=None)
|
| 73 |
+
def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]:
|
| 74 |
+
"""Search a string for a checksum algorithm name and encoded output value."""
|
| 75 |
+
match = cls._hash_url_fragment_re.search(url)
|
| 76 |
+
if match is None:
|
| 77 |
+
return None
|
| 78 |
+
name, value = match.groups()
|
| 79 |
+
return cls(name=name, value=value)
|
| 80 |
+
|
| 81 |
+
def as_dict(self) -> Dict[str, str]:
|
| 82 |
+
return {self.name: self.value}
|
| 83 |
+
|
| 84 |
+
def as_hashes(self) -> Hashes:
|
| 85 |
+
"""Return a Hashes instance which checks only for the current hash."""
|
| 86 |
+
return Hashes({self.name: [self.value]})
|
| 87 |
+
|
| 88 |
+
def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
|
| 89 |
+
"""
|
| 90 |
+
Return True if the current hash is allowed by `hashes`.
|
| 91 |
+
"""
|
| 92 |
+
if hashes is None:
|
| 93 |
+
return False
|
| 94 |
+
return hashes.is_hash_allowed(self.name, hex_digest=self.value)
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
@dataclass(frozen=True)
|
| 98 |
+
class MetadataFile:
|
| 99 |
+
"""Information about a core metadata file associated with a distribution."""
|
| 100 |
+
|
| 101 |
+
hashes: Optional[Dict[str, str]]
|
| 102 |
+
|
| 103 |
+
def __post_init__(self) -> None:
|
| 104 |
+
if self.hashes is not None:
|
| 105 |
+
assert all(name in _SUPPORTED_HASHES for name in self.hashes)
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def supported_hashes(hashes: Optional[Dict[str, str]]) -> Optional[Dict[str, str]]:
|
| 109 |
+
# Remove any unsupported hash types from the mapping. If this leaves no
|
| 110 |
+
# supported hashes, return None
|
| 111 |
+
if hashes is None:
|
| 112 |
+
return None
|
| 113 |
+
hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES}
|
| 114 |
+
if not hashes:
|
| 115 |
+
return None
|
| 116 |
+
return hashes
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def _clean_url_path_part(part: str) -> str:
|
| 120 |
+
"""
|
| 121 |
+
Clean a "part" of a URL path (i.e. after splitting on "@" characters).
|
| 122 |
+
"""
|
| 123 |
+
# We unquote prior to quoting to make sure nothing is double quoted.
|
| 124 |
+
return urllib.parse.quote(urllib.parse.unquote(part))
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
def _clean_file_url_path(part: str) -> str:
|
| 128 |
+
"""
|
| 129 |
+
Clean the first part of a URL path that corresponds to a local
|
| 130 |
+
filesystem path (i.e. the first part after splitting on "@" characters).
|
| 131 |
+
"""
|
| 132 |
+
# We unquote prior to quoting to make sure nothing is double quoted.
|
| 133 |
+
# Also, on Windows the path part might contain a drive letter which
|
| 134 |
+
# should not be quoted. On Linux where drive letters do not
|
| 135 |
+
# exist, the colon should be quoted. We rely on urllib.request
|
| 136 |
+
# to do the right thing here.
|
| 137 |
+
return urllib.request.pathname2url(urllib.request.url2pathname(part))
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
# percent-encoded: /
|
| 141 |
+
_reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE)
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def _clean_url_path(path: str, is_local_path: bool) -> str:
|
| 145 |
+
"""
|
| 146 |
+
Clean the path portion of a URL.
|
| 147 |
+
"""
|
| 148 |
+
if is_local_path:
|
| 149 |
+
clean_func = _clean_file_url_path
|
| 150 |
+
else:
|
| 151 |
+
clean_func = _clean_url_path_part
|
| 152 |
+
|
| 153 |
+
# Split on the reserved characters prior to cleaning so that
|
| 154 |
+
# revision strings in VCS URLs are properly preserved.
|
| 155 |
+
parts = _reserved_chars_re.split(path)
|
| 156 |
+
|
| 157 |
+
cleaned_parts = []
|
| 158 |
+
for to_clean, reserved in pairwise(itertools.chain(parts, [""])):
|
| 159 |
+
cleaned_parts.append(clean_func(to_clean))
|
| 160 |
+
# Normalize %xx escapes (e.g. %2f -> %2F)
|
| 161 |
+
cleaned_parts.append(reserved.upper())
|
| 162 |
+
|
| 163 |
+
return "".join(cleaned_parts)
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
def _ensure_quoted_url(url: str) -> str:
|
| 167 |
+
"""
|
| 168 |
+
Make sure a link is fully quoted.
|
| 169 |
+
For example, if ' ' occurs in the URL, it will be replaced with "%20",
|
| 170 |
+
and without double-quoting other characters.
|
| 171 |
+
"""
|
| 172 |
+
# Split the URL into parts according to the general structure
|
| 173 |
+
# `scheme://netloc/path?query#fragment`.
|
| 174 |
+
result = urllib.parse.urlsplit(url)
|
| 175 |
+
# If the netloc is empty, then the URL refers to a local filesystem path.
|
| 176 |
+
is_local_path = not result.netloc
|
| 177 |
+
path = _clean_url_path(result.path, is_local_path=is_local_path)
|
| 178 |
+
return urllib.parse.urlunsplit(result._replace(path=path))
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
def _absolute_link_url(base_url: str, url: str) -> str:
|
| 182 |
+
"""
|
| 183 |
+
A faster implementation of urllib.parse.urljoin with a shortcut
|
| 184 |
+
for absolute http/https URLs.
|
| 185 |
+
"""
|
| 186 |
+
if url.startswith(("https://", "http://")):
|
| 187 |
+
return url
|
| 188 |
+
else:
|
| 189 |
+
return urllib.parse.urljoin(base_url, url)
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
@functools.total_ordering
|
| 193 |
+
class Link:
|
| 194 |
+
"""Represents a parsed link from a Package Index's simple URL"""
|
| 195 |
+
|
| 196 |
+
__slots__ = [
|
| 197 |
+
"_parsed_url",
|
| 198 |
+
"_url",
|
| 199 |
+
"_path",
|
| 200 |
+
"_hashes",
|
| 201 |
+
"comes_from",
|
| 202 |
+
"requires_python",
|
| 203 |
+
"yanked_reason",
|
| 204 |
+
"metadata_file_data",
|
| 205 |
+
"cache_link_parsing",
|
| 206 |
+
"egg_fragment",
|
| 207 |
+
]
|
| 208 |
+
|
| 209 |
+
def __init__(
|
| 210 |
+
self,
|
| 211 |
+
url: str,
|
| 212 |
+
comes_from: Optional[Union[str, "IndexContent"]] = None,
|
| 213 |
+
requires_python: Optional[str] = None,
|
| 214 |
+
yanked_reason: Optional[str] = None,
|
| 215 |
+
metadata_file_data: Optional[MetadataFile] = None,
|
| 216 |
+
cache_link_parsing: bool = True,
|
| 217 |
+
hashes: Optional[Mapping[str, str]] = None,
|
| 218 |
+
) -> None:
|
| 219 |
+
"""
|
| 220 |
+
:param url: url of the resource pointed to (href of the link)
|
| 221 |
+
:param comes_from: instance of IndexContent where the link was found,
|
| 222 |
+
or string.
|
| 223 |
+
:param requires_python: String containing the `Requires-Python`
|
| 224 |
+
metadata field, specified in PEP 345. This may be specified by
|
| 225 |
+
a data-requires-python attribute in the HTML link tag, as
|
| 226 |
+
described in PEP 503.
|
| 227 |
+
:param yanked_reason: the reason the file has been yanked, if the
|
| 228 |
+
file has been yanked, or None if the file hasn't been yanked.
|
| 229 |
+
This is the value of the "data-yanked" attribute, if present, in
|
| 230 |
+
a simple repository HTML link. If the file has been yanked but
|
| 231 |
+
no reason was provided, this should be the empty string. See
|
| 232 |
+
PEP 592 for more information and the specification.
|
| 233 |
+
:param metadata_file_data: the metadata attached to the file, or None if
|
| 234 |
+
no such metadata is provided. This argument, if not None, indicates
|
| 235 |
+
that a separate metadata file exists, and also optionally supplies
|
| 236 |
+
hashes for that file.
|
| 237 |
+
:param cache_link_parsing: A flag that is used elsewhere to determine
|
| 238 |
+
whether resources retrieved from this link should be cached. PyPI
|
| 239 |
+
URLs should generally have this set to False, for example.
|
| 240 |
+
:param hashes: A mapping of hash names to digests to allow us to
|
| 241 |
+
determine the validity of a download.
|
| 242 |
+
"""
|
| 243 |
+
|
| 244 |
+
# The comes_from, requires_python, and metadata_file_data arguments are
|
| 245 |
+
# only used by classmethods of this class, and are not used in client
|
| 246 |
+
# code directly.
|
| 247 |
+
|
| 248 |
+
# url can be a UNC windows share
|
| 249 |
+
if url.startswith("\\\\"):
|
| 250 |
+
url = path_to_url(url)
|
| 251 |
+
|
| 252 |
+
self._parsed_url = urllib.parse.urlsplit(url)
|
| 253 |
+
# Store the url as a private attribute to prevent accidentally
|
| 254 |
+
# trying to set a new value.
|
| 255 |
+
self._url = url
|
| 256 |
+
# The .path property is hot, so calculate its value ahead of time.
|
| 257 |
+
self._path = urllib.parse.unquote(self._parsed_url.path)
|
| 258 |
+
|
| 259 |
+
link_hash = LinkHash.find_hash_url_fragment(url)
|
| 260 |
+
hashes_from_link = {} if link_hash is None else link_hash.as_dict()
|
| 261 |
+
if hashes is None:
|
| 262 |
+
self._hashes = hashes_from_link
|
| 263 |
+
else:
|
| 264 |
+
self._hashes = {**hashes, **hashes_from_link}
|
| 265 |
+
|
| 266 |
+
self.comes_from = comes_from
|
| 267 |
+
self.requires_python = requires_python if requires_python else None
|
| 268 |
+
self.yanked_reason = yanked_reason
|
| 269 |
+
self.metadata_file_data = metadata_file_data
|
| 270 |
+
|
| 271 |
+
self.cache_link_parsing = cache_link_parsing
|
| 272 |
+
self.egg_fragment = self._egg_fragment()
|
| 273 |
+
|
| 274 |
+
@classmethod
|
| 275 |
+
def from_json(
|
| 276 |
+
cls,
|
| 277 |
+
file_data: Dict[str, Any],
|
| 278 |
+
page_url: str,
|
| 279 |
+
) -> Optional["Link"]:
|
| 280 |
+
"""
|
| 281 |
+
Convert an pypi json document from a simple repository page into a Link.
|
| 282 |
+
"""
|
| 283 |
+
file_url = file_data.get("url")
|
| 284 |
+
if file_url is None:
|
| 285 |
+
return None
|
| 286 |
+
|
| 287 |
+
url = _ensure_quoted_url(_absolute_link_url(page_url, file_url))
|
| 288 |
+
pyrequire = file_data.get("requires-python")
|
| 289 |
+
yanked_reason = file_data.get("yanked")
|
| 290 |
+
hashes = file_data.get("hashes", {})
|
| 291 |
+
|
| 292 |
+
# PEP 714: Indexes must use the name core-metadata, but
|
| 293 |
+
# clients should support the old name as a fallback for compatibility.
|
| 294 |
+
metadata_info = file_data.get("core-metadata")
|
| 295 |
+
if metadata_info is None:
|
| 296 |
+
metadata_info = file_data.get("dist-info-metadata")
|
| 297 |
+
|
| 298 |
+
# The metadata info value may be a boolean, or a dict of hashes.
|
| 299 |
+
if isinstance(metadata_info, dict):
|
| 300 |
+
# The file exists, and hashes have been supplied
|
| 301 |
+
metadata_file_data = MetadataFile(supported_hashes(metadata_info))
|
| 302 |
+
elif metadata_info:
|
| 303 |
+
# The file exists, but there are no hashes
|
| 304 |
+
metadata_file_data = MetadataFile(None)
|
| 305 |
+
else:
|
| 306 |
+
# False or not present: the file does not exist
|
| 307 |
+
metadata_file_data = None
|
| 308 |
+
|
| 309 |
+
# The Link.yanked_reason expects an empty string instead of a boolean.
|
| 310 |
+
if yanked_reason and not isinstance(yanked_reason, str):
|
| 311 |
+
yanked_reason = ""
|
| 312 |
+
# The Link.yanked_reason expects None instead of False.
|
| 313 |
+
elif not yanked_reason:
|
| 314 |
+
yanked_reason = None
|
| 315 |
+
|
| 316 |
+
return cls(
|
| 317 |
+
url,
|
| 318 |
+
comes_from=page_url,
|
| 319 |
+
requires_python=pyrequire,
|
| 320 |
+
yanked_reason=yanked_reason,
|
| 321 |
+
hashes=hashes,
|
| 322 |
+
metadata_file_data=metadata_file_data,
|
| 323 |
+
)
|
| 324 |
+
|
| 325 |
+
@classmethod
|
| 326 |
+
def from_element(
|
| 327 |
+
cls,
|
| 328 |
+
anchor_attribs: Dict[str, Optional[str]],
|
| 329 |
+
page_url: str,
|
| 330 |
+
base_url: str,
|
| 331 |
+
) -> Optional["Link"]:
|
| 332 |
+
"""
|
| 333 |
+
Convert an anchor element's attributes in a simple repository page to a Link.
|
| 334 |
+
"""
|
| 335 |
+
href = anchor_attribs.get("href")
|
| 336 |
+
if not href:
|
| 337 |
+
return None
|
| 338 |
+
|
| 339 |
+
url = _ensure_quoted_url(_absolute_link_url(base_url, href))
|
| 340 |
+
pyrequire = anchor_attribs.get("data-requires-python")
|
| 341 |
+
yanked_reason = anchor_attribs.get("data-yanked")
|
| 342 |
+
|
| 343 |
+
# PEP 714: Indexes must use the name data-core-metadata, but
|
| 344 |
+
# clients should support the old name as a fallback for compatibility.
|
| 345 |
+
metadata_info = anchor_attribs.get("data-core-metadata")
|
| 346 |
+
if metadata_info is None:
|
| 347 |
+
metadata_info = anchor_attribs.get("data-dist-info-metadata")
|
| 348 |
+
# The metadata info value may be the string "true", or a string of
|
| 349 |
+
# the form "hashname=hashval"
|
| 350 |
+
if metadata_info == "true":
|
| 351 |
+
# The file exists, but there are no hashes
|
| 352 |
+
metadata_file_data = MetadataFile(None)
|
| 353 |
+
elif metadata_info is None:
|
| 354 |
+
# The file does not exist
|
| 355 |
+
metadata_file_data = None
|
| 356 |
+
else:
|
| 357 |
+
# The file exists, and hashes have been supplied
|
| 358 |
+
hashname, sep, hashval = metadata_info.partition("=")
|
| 359 |
+
if sep == "=":
|
| 360 |
+
metadata_file_data = MetadataFile(supported_hashes({hashname: hashval}))
|
| 361 |
+
else:
|
| 362 |
+
# Error - data is wrong. Treat as no hashes supplied.
|
| 363 |
+
logger.debug(
|
| 364 |
+
"Index returned invalid data-dist-info-metadata value: %s",
|
| 365 |
+
metadata_info,
|
| 366 |
+
)
|
| 367 |
+
metadata_file_data = MetadataFile(None)
|
| 368 |
+
|
| 369 |
+
return cls(
|
| 370 |
+
url,
|
| 371 |
+
comes_from=page_url,
|
| 372 |
+
requires_python=pyrequire,
|
| 373 |
+
yanked_reason=yanked_reason,
|
| 374 |
+
metadata_file_data=metadata_file_data,
|
| 375 |
+
)
|
| 376 |
+
|
| 377 |
+
def __str__(self) -> str:
|
| 378 |
+
if self.requires_python:
|
| 379 |
+
rp = f" (requires-python:{self.requires_python})"
|
| 380 |
+
else:
|
| 381 |
+
rp = ""
|
| 382 |
+
if self.comes_from:
|
| 383 |
+
return f"{redact_auth_from_url(self._url)} (from {self.comes_from}){rp}"
|
| 384 |
+
else:
|
| 385 |
+
return redact_auth_from_url(str(self._url))
|
| 386 |
+
|
| 387 |
+
def __repr__(self) -> str:
|
| 388 |
+
return f"<Link {self}>"
|
| 389 |
+
|
| 390 |
+
def __hash__(self) -> int:
|
| 391 |
+
return hash(self.url)
|
| 392 |
+
|
| 393 |
+
def __eq__(self, other: Any) -> bool:
|
| 394 |
+
if not isinstance(other, Link):
|
| 395 |
+
return NotImplemented
|
| 396 |
+
return self.url == other.url
|
| 397 |
+
|
| 398 |
+
def __lt__(self, other: Any) -> bool:
|
| 399 |
+
if not isinstance(other, Link):
|
| 400 |
+
return NotImplemented
|
| 401 |
+
return self.url < other.url
|
| 402 |
+
|
| 403 |
+
@property
|
| 404 |
+
def url(self) -> str:
|
| 405 |
+
return self._url
|
| 406 |
+
|
| 407 |
+
@property
|
| 408 |
+
def filename(self) -> str:
|
| 409 |
+
path = self.path.rstrip("/")
|
| 410 |
+
name = posixpath.basename(path)
|
| 411 |
+
if not name:
|
| 412 |
+
# Make sure we don't leak auth information if the netloc
|
| 413 |
+
# includes a username and password.
|
| 414 |
+
netloc, user_pass = split_auth_from_netloc(self.netloc)
|
| 415 |
+
return netloc
|
| 416 |
+
|
| 417 |
+
name = urllib.parse.unquote(name)
|
| 418 |
+
assert name, f"URL {self._url!r} produced no filename"
|
| 419 |
+
return name
|
| 420 |
+
|
| 421 |
+
@property
|
| 422 |
+
def file_path(self) -> str:
|
| 423 |
+
return url_to_path(self.url)
|
| 424 |
+
|
| 425 |
+
@property
|
| 426 |
+
def scheme(self) -> str:
|
| 427 |
+
return self._parsed_url.scheme
|
| 428 |
+
|
| 429 |
+
@property
|
| 430 |
+
def netloc(self) -> str:
|
| 431 |
+
"""
|
| 432 |
+
This can contain auth information.
|
| 433 |
+
"""
|
| 434 |
+
return self._parsed_url.netloc
|
| 435 |
+
|
| 436 |
+
@property
|
| 437 |
+
def path(self) -> str:
|
| 438 |
+
return self._path
|
| 439 |
+
|
| 440 |
+
def splitext(self) -> Tuple[str, str]:
|
| 441 |
+
return splitext(posixpath.basename(self.path.rstrip("/")))
|
| 442 |
+
|
| 443 |
+
@property
|
| 444 |
+
def ext(self) -> str:
|
| 445 |
+
return self.splitext()[1]
|
| 446 |
+
|
| 447 |
+
@property
|
| 448 |
+
def url_without_fragment(self) -> str:
|
| 449 |
+
scheme, netloc, path, query, fragment = self._parsed_url
|
| 450 |
+
return urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
|
| 451 |
+
|
| 452 |
+
_egg_fragment_re = re.compile(r"[#&]egg=([^&]*)")
|
| 453 |
+
|
| 454 |
+
# Per PEP 508.
|
| 455 |
+
_project_name_re = re.compile(
|
| 456 |
+
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
|
| 457 |
+
)
|
| 458 |
+
|
| 459 |
+
def _egg_fragment(self) -> Optional[str]:
|
| 460 |
+
match = self._egg_fragment_re.search(self._url)
|
| 461 |
+
if not match:
|
| 462 |
+
return None
|
| 463 |
+
|
| 464 |
+
# An egg fragment looks like a PEP 508 project name, along with
|
| 465 |
+
# an optional extras specifier. Anything else is invalid.
|
| 466 |
+
project_name = match.group(1)
|
| 467 |
+
if not self._project_name_re.match(project_name):
|
| 468 |
+
deprecated(
|
| 469 |
+
reason=f"{self} contains an egg fragment with a non-PEP 508 name.",
|
| 470 |
+
replacement="to use the req @ url syntax, and remove the egg fragment",
|
| 471 |
+
gone_in="25.1",
|
| 472 |
+
issue=13157,
|
| 473 |
+
)
|
| 474 |
+
|
| 475 |
+
return project_name
|
| 476 |
+
|
| 477 |
+
_subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)")
|
| 478 |
+
|
| 479 |
+
@property
|
| 480 |
+
def subdirectory_fragment(self) -> Optional[str]:
|
| 481 |
+
match = self._subdirectory_fragment_re.search(self._url)
|
| 482 |
+
if not match:
|
| 483 |
+
return None
|
| 484 |
+
return match.group(1)
|
| 485 |
+
|
| 486 |
+
def metadata_link(self) -> Optional["Link"]:
|
| 487 |
+
"""Return a link to the associated core metadata file (if any)."""
|
| 488 |
+
if self.metadata_file_data is None:
|
| 489 |
+
return None
|
| 490 |
+
metadata_url = f"{self.url_without_fragment}.metadata"
|
| 491 |
+
if self.metadata_file_data.hashes is None:
|
| 492 |
+
return Link(metadata_url)
|
| 493 |
+
return Link(metadata_url, hashes=self.metadata_file_data.hashes)
|
| 494 |
+
|
| 495 |
+
def as_hashes(self) -> Hashes:
|
| 496 |
+
return Hashes({k: [v] for k, v in self._hashes.items()})
|
| 497 |
+
|
| 498 |
+
@property
|
| 499 |
+
def hash(self) -> Optional[str]:
|
| 500 |
+
return next(iter(self._hashes.values()), None)
|
| 501 |
+
|
| 502 |
+
@property
|
| 503 |
+
def hash_name(self) -> Optional[str]:
|
| 504 |
+
return next(iter(self._hashes), None)
|
| 505 |
+
|
| 506 |
+
@property
|
| 507 |
+
def show_url(self) -> str:
|
| 508 |
+
return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0])
|
| 509 |
+
|
| 510 |
+
@property
|
| 511 |
+
def is_file(self) -> bool:
|
| 512 |
+
return self.scheme == "file"
|
| 513 |
+
|
| 514 |
+
def is_existing_dir(self) -> bool:
|
| 515 |
+
return self.is_file and os.path.isdir(self.file_path)
|
| 516 |
+
|
| 517 |
+
@property
|
| 518 |
+
def is_wheel(self) -> bool:
|
| 519 |
+
return self.ext == WHEEL_EXTENSION
|
| 520 |
+
|
| 521 |
+
@property
|
| 522 |
+
def is_vcs(self) -> bool:
|
| 523 |
+
from pip._internal.vcs import vcs
|
| 524 |
+
|
| 525 |
+
return self.scheme in vcs.all_schemes
|
| 526 |
+
|
| 527 |
+
@property
|
| 528 |
+
def is_yanked(self) -> bool:
|
| 529 |
+
return self.yanked_reason is not None
|
| 530 |
+
|
| 531 |
+
@property
|
| 532 |
+
def has_hash(self) -> bool:
|
| 533 |
+
return bool(self._hashes)
|
| 534 |
+
|
| 535 |
+
def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
|
| 536 |
+
"""
|
| 537 |
+
Return True if the link has a hash and it is allowed by `hashes`.
|
| 538 |
+
"""
|
| 539 |
+
if hashes is None:
|
| 540 |
+
return False
|
| 541 |
+
return any(hashes.is_hash_allowed(k, v) for k, v in self._hashes.items())
|
| 542 |
+
|
| 543 |
+
|
| 544 |
+
class _CleanResult(NamedTuple):
|
| 545 |
+
"""Convert link for equivalency check.
|
| 546 |
+
|
| 547 |
+
This is used in the resolver to check whether two URL-specified requirements
|
| 548 |
+
likely point to the same distribution and can be considered equivalent. This
|
| 549 |
+
equivalency logic avoids comparing URLs literally, which can be too strict
|
| 550 |
+
(e.g. "a=1&b=2" vs "b=2&a=1") and produce conflicts unexpecting to users.
|
| 551 |
+
|
| 552 |
+
Currently this does three things:
|
| 553 |
+
|
| 554 |
+
1. Drop the basic auth part. This is technically wrong since a server can
|
| 555 |
+
serve different content based on auth, but if it does that, it is even
|
| 556 |
+
impossible to guarantee two URLs without auth are equivalent, since
|
| 557 |
+
the user can input different auth information when prompted. So the
|
| 558 |
+
practical solution is to assume the auth doesn't affect the response.
|
| 559 |
+
2. Parse the query to avoid the ordering issue. Note that ordering under the
|
| 560 |
+
same key in the query are NOT cleaned; i.e. "a=1&a=2" and "a=2&a=1" are
|
| 561 |
+
still considered different.
|
| 562 |
+
3. Explicitly drop most of the fragment part, except ``subdirectory=`` and
|
| 563 |
+
hash values, since it should have no impact the downloaded content. Note
|
| 564 |
+
that this drops the "egg=" part historically used to denote the requested
|
| 565 |
+
project (and extras), which is wrong in the strictest sense, but too many
|
| 566 |
+
people are supplying it inconsistently to cause superfluous resolution
|
| 567 |
+
conflicts, so we choose to also ignore them.
|
| 568 |
+
"""
|
| 569 |
+
|
| 570 |
+
parsed: urllib.parse.SplitResult
|
| 571 |
+
query: Dict[str, List[str]]
|
| 572 |
+
subdirectory: str
|
| 573 |
+
hashes: Dict[str, str]
|
| 574 |
+
|
| 575 |
+
|
| 576 |
+
def _clean_link(link: Link) -> _CleanResult:
|
| 577 |
+
parsed = link._parsed_url
|
| 578 |
+
netloc = parsed.netloc.rsplit("@", 1)[-1]
|
| 579 |
+
# According to RFC 8089, an empty host in file: means localhost.
|
| 580 |
+
if parsed.scheme == "file" and not netloc:
|
| 581 |
+
netloc = "localhost"
|
| 582 |
+
fragment = urllib.parse.parse_qs(parsed.fragment)
|
| 583 |
+
if "egg" in fragment:
|
| 584 |
+
logger.debug("Ignoring egg= fragment in %s", link)
|
| 585 |
+
try:
|
| 586 |
+
# If there are multiple subdirectory values, use the first one.
|
| 587 |
+
# This matches the behavior of Link.subdirectory_fragment.
|
| 588 |
+
subdirectory = fragment["subdirectory"][0]
|
| 589 |
+
except (IndexError, KeyError):
|
| 590 |
+
subdirectory = ""
|
| 591 |
+
# If there are multiple hash values under the same algorithm, use the
|
| 592 |
+
# first one. This matches the behavior of Link.hash_value.
|
| 593 |
+
hashes = {k: fragment[k][0] for k in _SUPPORTED_HASHES if k in fragment}
|
| 594 |
+
return _CleanResult(
|
| 595 |
+
parsed=parsed._replace(netloc=netloc, query="", fragment=""),
|
| 596 |
+
query=urllib.parse.parse_qs(parsed.query),
|
| 597 |
+
subdirectory=subdirectory,
|
| 598 |
+
hashes=hashes,
|
| 599 |
+
)
|
| 600 |
+
|
| 601 |
+
|
| 602 |
+
@functools.lru_cache(maxsize=None)
|
| 603 |
+
def links_equivalent(link1: Link, link2: Link) -> bool:
|
| 604 |
+
return _clean_link(link1) == _clean_link(link2)
|
llava/lib/python3.10/site-packages/pip/_internal/models/scheme.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
For types associated with installation schemes.
|
| 3 |
+
|
| 4 |
+
For a general overview of available schemes and their context, see
|
| 5 |
+
https://docs.python.org/3/install/index.html#alternate-installation.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
from dataclasses import dataclass
|
| 9 |
+
|
| 10 |
+
SCHEME_KEYS = ["platlib", "purelib", "headers", "scripts", "data"]
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
@dataclass(frozen=True)
|
| 14 |
+
class Scheme:
|
| 15 |
+
"""A Scheme holds paths which are used as the base directories for
|
| 16 |
+
artifacts associated with a Python package.
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
__slots__ = SCHEME_KEYS
|
| 20 |
+
|
| 21 |
+
platlib: str
|
| 22 |
+
purelib: str
|
| 23 |
+
headers: str
|
| 24 |
+
scripts: str
|
| 25 |
+
data: str
|
llava/lib/python3.10/site-packages/pip/_internal/models/selection_prefs.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Optional
|
| 2 |
+
|
| 3 |
+
from pip._internal.models.format_control import FormatControl
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
# TODO: This needs Python 3.10's improved slots support for dataclasses
|
| 7 |
+
# to be converted into a dataclass.
|
| 8 |
+
class SelectionPreferences:
|
| 9 |
+
"""
|
| 10 |
+
Encapsulates the candidate selection preferences for downloading
|
| 11 |
+
and installing files.
|
| 12 |
+
"""
|
| 13 |
+
|
| 14 |
+
__slots__ = [
|
| 15 |
+
"allow_yanked",
|
| 16 |
+
"allow_all_prereleases",
|
| 17 |
+
"format_control",
|
| 18 |
+
"prefer_binary",
|
| 19 |
+
"ignore_requires_python",
|
| 20 |
+
]
|
| 21 |
+
|
| 22 |
+
# Don't include an allow_yanked default value to make sure each call
|
| 23 |
+
# site considers whether yanked releases are allowed. This also causes
|
| 24 |
+
# that decision to be made explicit in the calling code, which helps
|
| 25 |
+
# people when reading the code.
|
| 26 |
+
def __init__(
|
| 27 |
+
self,
|
| 28 |
+
allow_yanked: bool,
|
| 29 |
+
allow_all_prereleases: bool = False,
|
| 30 |
+
format_control: Optional[FormatControl] = None,
|
| 31 |
+
prefer_binary: bool = False,
|
| 32 |
+
ignore_requires_python: Optional[bool] = None,
|
| 33 |
+
) -> None:
|
| 34 |
+
"""Create a SelectionPreferences object.
|
| 35 |
+
|
| 36 |
+
:param allow_yanked: Whether files marked as yanked (in the sense
|
| 37 |
+
of PEP 592) are permitted to be candidates for install.
|
| 38 |
+
:param format_control: A FormatControl object or None. Used to control
|
| 39 |
+
the selection of source packages / binary packages when consulting
|
| 40 |
+
the index and links.
|
| 41 |
+
:param prefer_binary: Whether to prefer an old, but valid, binary
|
| 42 |
+
dist over a new source dist.
|
| 43 |
+
:param ignore_requires_python: Whether to ignore incompatible
|
| 44 |
+
"Requires-Python" values in links. Defaults to False.
|
| 45 |
+
"""
|
| 46 |
+
if ignore_requires_python is None:
|
| 47 |
+
ignore_requires_python = False
|
| 48 |
+
|
| 49 |
+
self.allow_yanked = allow_yanked
|
| 50 |
+
self.allow_all_prereleases = allow_all_prereleases
|
| 51 |
+
self.format_control = format_control
|
| 52 |
+
self.prefer_binary = prefer_binary
|
| 53 |
+
self.ignore_requires_python = ignore_requires_python
|
llava/lib/python3.10/site-packages/pip/_internal/models/target_python.py
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
from typing import List, Optional, Set, Tuple
|
| 3 |
+
|
| 4 |
+
from pip._vendor.packaging.tags import Tag
|
| 5 |
+
|
| 6 |
+
from pip._internal.utils.compatibility_tags import get_supported, version_info_to_nodot
|
| 7 |
+
from pip._internal.utils.misc import normalize_version_info
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class TargetPython:
|
| 11 |
+
"""
|
| 12 |
+
Encapsulates the properties of a Python interpreter one is targeting
|
| 13 |
+
for a package install, download, etc.
|
| 14 |
+
"""
|
| 15 |
+
|
| 16 |
+
__slots__ = [
|
| 17 |
+
"_given_py_version_info",
|
| 18 |
+
"abis",
|
| 19 |
+
"implementation",
|
| 20 |
+
"platforms",
|
| 21 |
+
"py_version",
|
| 22 |
+
"py_version_info",
|
| 23 |
+
"_valid_tags",
|
| 24 |
+
"_valid_tags_set",
|
| 25 |
+
]
|
| 26 |
+
|
| 27 |
+
def __init__(
|
| 28 |
+
self,
|
| 29 |
+
platforms: Optional[List[str]] = None,
|
| 30 |
+
py_version_info: Optional[Tuple[int, ...]] = None,
|
| 31 |
+
abis: Optional[List[str]] = None,
|
| 32 |
+
implementation: Optional[str] = None,
|
| 33 |
+
) -> None:
|
| 34 |
+
"""
|
| 35 |
+
:param platforms: A list of strings or None. If None, searches for
|
| 36 |
+
packages that are supported by the current system. Otherwise, will
|
| 37 |
+
find packages that can be built on the platforms passed in. These
|
| 38 |
+
packages will only be downloaded for distribution: they will
|
| 39 |
+
not be built locally.
|
| 40 |
+
:param py_version_info: An optional tuple of ints representing the
|
| 41 |
+
Python version information to use (e.g. `sys.version_info[:3]`).
|
| 42 |
+
This can have length 1, 2, or 3 when provided.
|
| 43 |
+
:param abis: A list of strings or None. This is passed to
|
| 44 |
+
compatibility_tags.py's get_supported() function as is.
|
| 45 |
+
:param implementation: A string or None. This is passed to
|
| 46 |
+
compatibility_tags.py's get_supported() function as is.
|
| 47 |
+
"""
|
| 48 |
+
# Store the given py_version_info for when we call get_supported().
|
| 49 |
+
self._given_py_version_info = py_version_info
|
| 50 |
+
|
| 51 |
+
if py_version_info is None:
|
| 52 |
+
py_version_info = sys.version_info[:3]
|
| 53 |
+
else:
|
| 54 |
+
py_version_info = normalize_version_info(py_version_info)
|
| 55 |
+
|
| 56 |
+
py_version = ".".join(map(str, py_version_info[:2]))
|
| 57 |
+
|
| 58 |
+
self.abis = abis
|
| 59 |
+
self.implementation = implementation
|
| 60 |
+
self.platforms = platforms
|
| 61 |
+
self.py_version = py_version
|
| 62 |
+
self.py_version_info = py_version_info
|
| 63 |
+
|
| 64 |
+
# This is used to cache the return value of get_(un)sorted_tags.
|
| 65 |
+
self._valid_tags: Optional[List[Tag]] = None
|
| 66 |
+
self._valid_tags_set: Optional[Set[Tag]] = None
|
| 67 |
+
|
| 68 |
+
def format_given(self) -> str:
|
| 69 |
+
"""
|
| 70 |
+
Format the given, non-None attributes for display.
|
| 71 |
+
"""
|
| 72 |
+
display_version = None
|
| 73 |
+
if self._given_py_version_info is not None:
|
| 74 |
+
display_version = ".".join(
|
| 75 |
+
str(part) for part in self._given_py_version_info
|
| 76 |
+
)
|
| 77 |
+
|
| 78 |
+
key_values = [
|
| 79 |
+
("platforms", self.platforms),
|
| 80 |
+
("version_info", display_version),
|
| 81 |
+
("abis", self.abis),
|
| 82 |
+
("implementation", self.implementation),
|
| 83 |
+
]
|
| 84 |
+
return " ".join(
|
| 85 |
+
f"{key}={value!r}" for key, value in key_values if value is not None
|
| 86 |
+
)
|
| 87 |
+
|
| 88 |
+
def get_sorted_tags(self) -> List[Tag]:
|
| 89 |
+
"""
|
| 90 |
+
Return the supported PEP 425 tags to check wheel candidates against.
|
| 91 |
+
|
| 92 |
+
The tags are returned in order of preference (most preferred first).
|
| 93 |
+
"""
|
| 94 |
+
if self._valid_tags is None:
|
| 95 |
+
# Pass versions=None if no py_version_info was given since
|
| 96 |
+
# versions=None uses special default logic.
|
| 97 |
+
py_version_info = self._given_py_version_info
|
| 98 |
+
if py_version_info is None:
|
| 99 |
+
version = None
|
| 100 |
+
else:
|
| 101 |
+
version = version_info_to_nodot(py_version_info)
|
| 102 |
+
|
| 103 |
+
tags = get_supported(
|
| 104 |
+
version=version,
|
| 105 |
+
platforms=self.platforms,
|
| 106 |
+
abis=self.abis,
|
| 107 |
+
impl=self.implementation,
|
| 108 |
+
)
|
| 109 |
+
self._valid_tags = tags
|
| 110 |
+
|
| 111 |
+
return self._valid_tags
|
| 112 |
+
|
| 113 |
+
def get_unsorted_tags(self) -> Set[Tag]:
|
| 114 |
+
"""Exactly the same as get_sorted_tags, but returns a set.
|
| 115 |
+
|
| 116 |
+
This is important for performance.
|
| 117 |
+
"""
|
| 118 |
+
if self._valid_tags_set is None:
|
| 119 |
+
self._valid_tags_set = set(self.get_sorted_tags())
|
| 120 |
+
|
| 121 |
+
return self._valid_tags_set
|
llava/lib/python3.10/site-packages/pip/_internal/operations/__init__.py
ADDED
|
File without changes
|
llava/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (175 Bytes). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/check.cpython-310.pyc
ADDED
|
Binary file (4.79 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-310.pyc
ADDED
|
Binary file (6.32 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-310.pyc
ADDED
|
Binary file (15.7 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/operations/build/__init__.py
ADDED
|
File without changes
|
llava/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (181 Bytes). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-310.pyc
ADDED
|
Binary file (4.95 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-310.pyc
ADDED
|
Binary file (1.41 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-310.pyc
ADDED
|
Binary file (1.46 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-310.pyc
ADDED
|
Binary file (2.34 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-310.pyc
ADDED
|
Binary file (1.2 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-310.pyc
ADDED
|
Binary file (1.43 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-310.pyc
ADDED
|
Binary file (2.71 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/operations/build/build_tracker.py
ADDED
|
@@ -0,0 +1,138 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import hashlib
|
| 3 |
+
import logging
|
| 4 |
+
import os
|
| 5 |
+
from types import TracebackType
|
| 6 |
+
from typing import Dict, Generator, Optional, Type, Union
|
| 7 |
+
|
| 8 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 9 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
| 10 |
+
|
| 11 |
+
logger = logging.getLogger(__name__)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@contextlib.contextmanager
|
| 15 |
+
def update_env_context_manager(**changes: str) -> Generator[None, None, None]:
|
| 16 |
+
target = os.environ
|
| 17 |
+
|
| 18 |
+
# Save values from the target and change them.
|
| 19 |
+
non_existent_marker = object()
|
| 20 |
+
saved_values: Dict[str, Union[object, str]] = {}
|
| 21 |
+
for name, new_value in changes.items():
|
| 22 |
+
try:
|
| 23 |
+
saved_values[name] = target[name]
|
| 24 |
+
except KeyError:
|
| 25 |
+
saved_values[name] = non_existent_marker
|
| 26 |
+
target[name] = new_value
|
| 27 |
+
|
| 28 |
+
try:
|
| 29 |
+
yield
|
| 30 |
+
finally:
|
| 31 |
+
# Restore original values in the target.
|
| 32 |
+
for name, original_value in saved_values.items():
|
| 33 |
+
if original_value is non_existent_marker:
|
| 34 |
+
del target[name]
|
| 35 |
+
else:
|
| 36 |
+
assert isinstance(original_value, str) # for mypy
|
| 37 |
+
target[name] = original_value
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
@contextlib.contextmanager
|
| 41 |
+
def get_build_tracker() -> Generator["BuildTracker", None, None]:
|
| 42 |
+
root = os.environ.get("PIP_BUILD_TRACKER")
|
| 43 |
+
with contextlib.ExitStack() as ctx:
|
| 44 |
+
if root is None:
|
| 45 |
+
root = ctx.enter_context(TempDirectory(kind="build-tracker")).path
|
| 46 |
+
ctx.enter_context(update_env_context_manager(PIP_BUILD_TRACKER=root))
|
| 47 |
+
logger.debug("Initialized build tracking at %s", root)
|
| 48 |
+
|
| 49 |
+
with BuildTracker(root) as tracker:
|
| 50 |
+
yield tracker
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class TrackerId(str):
|
| 54 |
+
"""Uniquely identifying string provided to the build tracker."""
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
class BuildTracker:
|
| 58 |
+
"""Ensure that an sdist cannot request itself as a setup requirement.
|
| 59 |
+
|
| 60 |
+
When an sdist is prepared, it identifies its setup requirements in the
|
| 61 |
+
context of ``BuildTracker.track()``. If a requirement shows up recursively, this
|
| 62 |
+
raises an exception.
|
| 63 |
+
|
| 64 |
+
This stops fork bombs embedded in malicious packages."""
|
| 65 |
+
|
| 66 |
+
def __init__(self, root: str) -> None:
|
| 67 |
+
self._root = root
|
| 68 |
+
self._entries: Dict[TrackerId, InstallRequirement] = {}
|
| 69 |
+
logger.debug("Created build tracker: %s", self._root)
|
| 70 |
+
|
| 71 |
+
def __enter__(self) -> "BuildTracker":
|
| 72 |
+
logger.debug("Entered build tracker: %s", self._root)
|
| 73 |
+
return self
|
| 74 |
+
|
| 75 |
+
def __exit__(
|
| 76 |
+
self,
|
| 77 |
+
exc_type: Optional[Type[BaseException]],
|
| 78 |
+
exc_val: Optional[BaseException],
|
| 79 |
+
exc_tb: Optional[TracebackType],
|
| 80 |
+
) -> None:
|
| 81 |
+
self.cleanup()
|
| 82 |
+
|
| 83 |
+
def _entry_path(self, key: TrackerId) -> str:
|
| 84 |
+
hashed = hashlib.sha224(key.encode()).hexdigest()
|
| 85 |
+
return os.path.join(self._root, hashed)
|
| 86 |
+
|
| 87 |
+
def add(self, req: InstallRequirement, key: TrackerId) -> None:
|
| 88 |
+
"""Add an InstallRequirement to build tracking."""
|
| 89 |
+
|
| 90 |
+
# Get the file to write information about this requirement.
|
| 91 |
+
entry_path = self._entry_path(key)
|
| 92 |
+
|
| 93 |
+
# Try reading from the file. If it exists and can be read from, a build
|
| 94 |
+
# is already in progress, so a LookupError is raised.
|
| 95 |
+
try:
|
| 96 |
+
with open(entry_path) as fp:
|
| 97 |
+
contents = fp.read()
|
| 98 |
+
except FileNotFoundError:
|
| 99 |
+
pass
|
| 100 |
+
else:
|
| 101 |
+
message = f"{req.link} is already being built: {contents}"
|
| 102 |
+
raise LookupError(message)
|
| 103 |
+
|
| 104 |
+
# If we're here, req should really not be building already.
|
| 105 |
+
assert key not in self._entries
|
| 106 |
+
|
| 107 |
+
# Start tracking this requirement.
|
| 108 |
+
with open(entry_path, "w", encoding="utf-8") as fp:
|
| 109 |
+
fp.write(str(req))
|
| 110 |
+
self._entries[key] = req
|
| 111 |
+
|
| 112 |
+
logger.debug("Added %s to build tracker %r", req, self._root)
|
| 113 |
+
|
| 114 |
+
def remove(self, req: InstallRequirement, key: TrackerId) -> None:
|
| 115 |
+
"""Remove an InstallRequirement from build tracking."""
|
| 116 |
+
|
| 117 |
+
# Delete the created file and the corresponding entry.
|
| 118 |
+
os.unlink(self._entry_path(key))
|
| 119 |
+
del self._entries[key]
|
| 120 |
+
|
| 121 |
+
logger.debug("Removed %s from build tracker %r", req, self._root)
|
| 122 |
+
|
| 123 |
+
def cleanup(self) -> None:
|
| 124 |
+
for key, req in list(self._entries.items()):
|
| 125 |
+
self.remove(req, key)
|
| 126 |
+
|
| 127 |
+
logger.debug("Removed build tracker: %r", self._root)
|
| 128 |
+
|
| 129 |
+
@contextlib.contextmanager
|
| 130 |
+
def track(self, req: InstallRequirement, key: str) -> Generator[None, None, None]:
|
| 131 |
+
"""Ensure that `key` cannot install itself as a setup requirement.
|
| 132 |
+
|
| 133 |
+
:raises LookupError: If `key` was already provided in a parent invocation of
|
| 134 |
+
the context introduced by this method."""
|
| 135 |
+
tracker_id = TrackerId(key)
|
| 136 |
+
self.add(req, tracker_id)
|
| 137 |
+
yield
|
| 138 |
+
self.remove(req, tracker_id)
|
llava/lib/python3.10/site-packages/pip/_internal/operations/build/metadata.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Metadata generation logic for source distributions.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import os
|
| 5 |
+
|
| 6 |
+
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
|
| 7 |
+
|
| 8 |
+
from pip._internal.build_env import BuildEnvironment
|
| 9 |
+
from pip._internal.exceptions import (
|
| 10 |
+
InstallationSubprocessError,
|
| 11 |
+
MetadataGenerationFailed,
|
| 12 |
+
)
|
| 13 |
+
from pip._internal.utils.subprocess import runner_with_spinner_message
|
| 14 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def generate_metadata(
|
| 18 |
+
build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str
|
| 19 |
+
) -> str:
|
| 20 |
+
"""Generate metadata using mechanisms described in PEP 517.
|
| 21 |
+
|
| 22 |
+
Returns the generated metadata directory.
|
| 23 |
+
"""
|
| 24 |
+
metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
|
| 25 |
+
|
| 26 |
+
metadata_dir = metadata_tmpdir.path
|
| 27 |
+
|
| 28 |
+
with build_env:
|
| 29 |
+
# Note that BuildBackendHookCaller implements a fallback for
|
| 30 |
+
# prepare_metadata_for_build_wheel, so we don't have to
|
| 31 |
+
# consider the possibility that this hook doesn't exist.
|
| 32 |
+
runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)")
|
| 33 |
+
with backend.subprocess_runner(runner):
|
| 34 |
+
try:
|
| 35 |
+
distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir)
|
| 36 |
+
except InstallationSubprocessError as error:
|
| 37 |
+
raise MetadataGenerationFailed(package_details=details) from error
|
| 38 |
+
|
| 39 |
+
return os.path.join(metadata_dir, distinfo_dir)
|
llava/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_editable.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Metadata generation logic for source distributions.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import os
|
| 5 |
+
|
| 6 |
+
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
|
| 7 |
+
|
| 8 |
+
from pip._internal.build_env import BuildEnvironment
|
| 9 |
+
from pip._internal.exceptions import (
|
| 10 |
+
InstallationSubprocessError,
|
| 11 |
+
MetadataGenerationFailed,
|
| 12 |
+
)
|
| 13 |
+
from pip._internal.utils.subprocess import runner_with_spinner_message
|
| 14 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def generate_editable_metadata(
|
| 18 |
+
build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str
|
| 19 |
+
) -> str:
|
| 20 |
+
"""Generate metadata using mechanisms described in PEP 660.
|
| 21 |
+
|
| 22 |
+
Returns the generated metadata directory.
|
| 23 |
+
"""
|
| 24 |
+
metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
|
| 25 |
+
|
| 26 |
+
metadata_dir = metadata_tmpdir.path
|
| 27 |
+
|
| 28 |
+
with build_env:
|
| 29 |
+
# Note that BuildBackendHookCaller implements a fallback for
|
| 30 |
+
# prepare_metadata_for_build_wheel/editable, so we don't have to
|
| 31 |
+
# consider the possibility that this hook doesn't exist.
|
| 32 |
+
runner = runner_with_spinner_message(
|
| 33 |
+
"Preparing editable metadata (pyproject.toml)"
|
| 34 |
+
)
|
| 35 |
+
with backend.subprocess_runner(runner):
|
| 36 |
+
try:
|
| 37 |
+
distinfo_dir = backend.prepare_metadata_for_build_editable(metadata_dir)
|
| 38 |
+
except InstallationSubprocessError as error:
|
| 39 |
+
raise MetadataGenerationFailed(package_details=details) from error
|
| 40 |
+
|
| 41 |
+
assert distinfo_dir is not None
|
| 42 |
+
return os.path.join(metadata_dir, distinfo_dir)
|
llava/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_legacy.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Metadata generation logic for legacy source distributions.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import logging
|
| 5 |
+
import os
|
| 6 |
+
|
| 7 |
+
from pip._internal.build_env import BuildEnvironment
|
| 8 |
+
from pip._internal.cli.spinners import open_spinner
|
| 9 |
+
from pip._internal.exceptions import (
|
| 10 |
+
InstallationError,
|
| 11 |
+
InstallationSubprocessError,
|
| 12 |
+
MetadataGenerationFailed,
|
| 13 |
+
)
|
| 14 |
+
from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
|
| 15 |
+
from pip._internal.utils.subprocess import call_subprocess
|
| 16 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
| 17 |
+
|
| 18 |
+
logger = logging.getLogger(__name__)
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def _find_egg_info(directory: str) -> str:
|
| 22 |
+
"""Find an .egg-info subdirectory in `directory`."""
|
| 23 |
+
filenames = [f for f in os.listdir(directory) if f.endswith(".egg-info")]
|
| 24 |
+
|
| 25 |
+
if not filenames:
|
| 26 |
+
raise InstallationError(f"No .egg-info directory found in {directory}")
|
| 27 |
+
|
| 28 |
+
if len(filenames) > 1:
|
| 29 |
+
raise InstallationError(
|
| 30 |
+
f"More than one .egg-info directory found in {directory}"
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
return os.path.join(directory, filenames[0])
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def generate_metadata(
|
| 37 |
+
build_env: BuildEnvironment,
|
| 38 |
+
setup_py_path: str,
|
| 39 |
+
source_dir: str,
|
| 40 |
+
isolated: bool,
|
| 41 |
+
details: str,
|
| 42 |
+
) -> str:
|
| 43 |
+
"""Generate metadata using setup.py-based defacto mechanisms.
|
| 44 |
+
|
| 45 |
+
Returns the generated metadata directory.
|
| 46 |
+
"""
|
| 47 |
+
logger.debug(
|
| 48 |
+
"Running setup.py (path:%s) egg_info for package %s",
|
| 49 |
+
setup_py_path,
|
| 50 |
+
details,
|
| 51 |
+
)
|
| 52 |
+
|
| 53 |
+
egg_info_dir = TempDirectory(kind="pip-egg-info", globally_managed=True).path
|
| 54 |
+
|
| 55 |
+
args = make_setuptools_egg_info_args(
|
| 56 |
+
setup_py_path,
|
| 57 |
+
egg_info_dir=egg_info_dir,
|
| 58 |
+
no_user_config=isolated,
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
with build_env:
|
| 62 |
+
with open_spinner("Preparing metadata (setup.py)") as spinner:
|
| 63 |
+
try:
|
| 64 |
+
call_subprocess(
|
| 65 |
+
args,
|
| 66 |
+
cwd=source_dir,
|
| 67 |
+
command_desc="python setup.py egg_info",
|
| 68 |
+
spinner=spinner,
|
| 69 |
+
)
|
| 70 |
+
except InstallationSubprocessError as error:
|
| 71 |
+
raise MetadataGenerationFailed(package_details=details) from error
|
| 72 |
+
|
| 73 |
+
# Return the .egg-info directory.
|
| 74 |
+
return _find_egg_info(egg_info_dir)
|
llava/lib/python3.10/site-packages/pip/_internal/operations/build/wheel.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os
|
| 3 |
+
from typing import Optional
|
| 4 |
+
|
| 5 |
+
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
|
| 6 |
+
|
| 7 |
+
from pip._internal.utils.subprocess import runner_with_spinner_message
|
| 8 |
+
|
| 9 |
+
logger = logging.getLogger(__name__)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def build_wheel_pep517(
|
| 13 |
+
name: str,
|
| 14 |
+
backend: BuildBackendHookCaller,
|
| 15 |
+
metadata_directory: str,
|
| 16 |
+
tempd: str,
|
| 17 |
+
) -> Optional[str]:
|
| 18 |
+
"""Build one InstallRequirement using the PEP 517 build process.
|
| 19 |
+
|
| 20 |
+
Returns path to wheel if successfully built. Otherwise, returns None.
|
| 21 |
+
"""
|
| 22 |
+
assert metadata_directory is not None
|
| 23 |
+
try:
|
| 24 |
+
logger.debug("Destination directory: %s", tempd)
|
| 25 |
+
|
| 26 |
+
runner = runner_with_spinner_message(
|
| 27 |
+
f"Building wheel for {name} (pyproject.toml)"
|
| 28 |
+
)
|
| 29 |
+
with backend.subprocess_runner(runner):
|
| 30 |
+
wheel_name = backend.build_wheel(
|
| 31 |
+
tempd,
|
| 32 |
+
metadata_directory=metadata_directory,
|
| 33 |
+
)
|
| 34 |
+
except Exception:
|
| 35 |
+
logger.error("Failed building wheel for %s", name)
|
| 36 |
+
return None
|
| 37 |
+
return os.path.join(tempd, wheel_name)
|
llava/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_editable.py
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os
|
| 3 |
+
from typing import Optional
|
| 4 |
+
|
| 5 |
+
from pip._vendor.pyproject_hooks import BuildBackendHookCaller, HookMissing
|
| 6 |
+
|
| 7 |
+
from pip._internal.utils.subprocess import runner_with_spinner_message
|
| 8 |
+
|
| 9 |
+
logger = logging.getLogger(__name__)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def build_wheel_editable(
|
| 13 |
+
name: str,
|
| 14 |
+
backend: BuildBackendHookCaller,
|
| 15 |
+
metadata_directory: str,
|
| 16 |
+
tempd: str,
|
| 17 |
+
) -> Optional[str]:
|
| 18 |
+
"""Build one InstallRequirement using the PEP 660 build process.
|
| 19 |
+
|
| 20 |
+
Returns path to wheel if successfully built. Otherwise, returns None.
|
| 21 |
+
"""
|
| 22 |
+
assert metadata_directory is not None
|
| 23 |
+
try:
|
| 24 |
+
logger.debug("Destination directory: %s", tempd)
|
| 25 |
+
|
| 26 |
+
runner = runner_with_spinner_message(
|
| 27 |
+
f"Building editable for {name} (pyproject.toml)"
|
| 28 |
+
)
|
| 29 |
+
with backend.subprocess_runner(runner):
|
| 30 |
+
try:
|
| 31 |
+
wheel_name = backend.build_editable(
|
| 32 |
+
tempd,
|
| 33 |
+
metadata_directory=metadata_directory,
|
| 34 |
+
)
|
| 35 |
+
except HookMissing as e:
|
| 36 |
+
logger.error(
|
| 37 |
+
"Cannot build editable %s because the build "
|
| 38 |
+
"backend does not have the %s hook",
|
| 39 |
+
name,
|
| 40 |
+
e,
|
| 41 |
+
)
|
| 42 |
+
return None
|
| 43 |
+
except Exception:
|
| 44 |
+
logger.error("Failed building editable for %s", name)
|
| 45 |
+
return None
|
| 46 |
+
return os.path.join(tempd, wheel_name)
|
llava/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_legacy.py
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os.path
|
| 3 |
+
from typing import List, Optional
|
| 4 |
+
|
| 5 |
+
from pip._internal.cli.spinners import open_spinner
|
| 6 |
+
from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args
|
| 7 |
+
from pip._internal.utils.subprocess import call_subprocess, format_command_args
|
| 8 |
+
|
| 9 |
+
logger = logging.getLogger(__name__)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def format_command_result(
|
| 13 |
+
command_args: List[str],
|
| 14 |
+
command_output: str,
|
| 15 |
+
) -> str:
|
| 16 |
+
"""Format command information for logging."""
|
| 17 |
+
command_desc = format_command_args(command_args)
|
| 18 |
+
text = f"Command arguments: {command_desc}\n"
|
| 19 |
+
|
| 20 |
+
if not command_output:
|
| 21 |
+
text += "Command output: None"
|
| 22 |
+
elif logger.getEffectiveLevel() > logging.DEBUG:
|
| 23 |
+
text += "Command output: [use --verbose to show]"
|
| 24 |
+
else:
|
| 25 |
+
if not command_output.endswith("\n"):
|
| 26 |
+
command_output += "\n"
|
| 27 |
+
text += f"Command output:\n{command_output}"
|
| 28 |
+
|
| 29 |
+
return text
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def get_legacy_build_wheel_path(
|
| 33 |
+
names: List[str],
|
| 34 |
+
temp_dir: str,
|
| 35 |
+
name: str,
|
| 36 |
+
command_args: List[str],
|
| 37 |
+
command_output: str,
|
| 38 |
+
) -> Optional[str]:
|
| 39 |
+
"""Return the path to the wheel in the temporary build directory."""
|
| 40 |
+
# Sort for determinism.
|
| 41 |
+
names = sorted(names)
|
| 42 |
+
if not names:
|
| 43 |
+
msg = f"Legacy build of wheel for {name!r} created no files.\n"
|
| 44 |
+
msg += format_command_result(command_args, command_output)
|
| 45 |
+
logger.warning(msg)
|
| 46 |
+
return None
|
| 47 |
+
|
| 48 |
+
if len(names) > 1:
|
| 49 |
+
msg = (
|
| 50 |
+
f"Legacy build of wheel for {name!r} created more than one file.\n"
|
| 51 |
+
f"Filenames (choosing first): {names}\n"
|
| 52 |
+
)
|
| 53 |
+
msg += format_command_result(command_args, command_output)
|
| 54 |
+
logger.warning(msg)
|
| 55 |
+
|
| 56 |
+
return os.path.join(temp_dir, names[0])
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def build_wheel_legacy(
|
| 60 |
+
name: str,
|
| 61 |
+
setup_py_path: str,
|
| 62 |
+
source_dir: str,
|
| 63 |
+
global_options: List[str],
|
| 64 |
+
build_options: List[str],
|
| 65 |
+
tempd: str,
|
| 66 |
+
) -> Optional[str]:
|
| 67 |
+
"""Build one unpacked package using the "legacy" build process.
|
| 68 |
+
|
| 69 |
+
Returns path to wheel if successfully built. Otherwise, returns None.
|
| 70 |
+
"""
|
| 71 |
+
wheel_args = make_setuptools_bdist_wheel_args(
|
| 72 |
+
setup_py_path,
|
| 73 |
+
global_options=global_options,
|
| 74 |
+
build_options=build_options,
|
| 75 |
+
destination_dir=tempd,
|
| 76 |
+
)
|
| 77 |
+
|
| 78 |
+
spin_message = f"Building wheel for {name} (setup.py)"
|
| 79 |
+
with open_spinner(spin_message) as spinner:
|
| 80 |
+
logger.debug("Destination directory: %s", tempd)
|
| 81 |
+
|
| 82 |
+
try:
|
| 83 |
+
output = call_subprocess(
|
| 84 |
+
wheel_args,
|
| 85 |
+
command_desc="python setup.py bdist_wheel",
|
| 86 |
+
cwd=source_dir,
|
| 87 |
+
spinner=spinner,
|
| 88 |
+
)
|
| 89 |
+
except Exception:
|
| 90 |
+
spinner.finish("error")
|
| 91 |
+
logger.error("Failed building wheel for %s", name)
|
| 92 |
+
return None
|
| 93 |
+
|
| 94 |
+
names = os.listdir(tempd)
|
| 95 |
+
wheel_path = get_legacy_build_wheel_path(
|
| 96 |
+
names=names,
|
| 97 |
+
temp_dir=tempd,
|
| 98 |
+
name=name,
|
| 99 |
+
command_args=wheel_args,
|
| 100 |
+
command_output=output,
|
| 101 |
+
)
|
| 102 |
+
return wheel_path
|
llava/lib/python3.10/site-packages/pip/_internal/operations/check.py
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Validation of dependencies of packages
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import logging
|
| 5 |
+
from contextlib import suppress
|
| 6 |
+
from email.parser import Parser
|
| 7 |
+
from functools import reduce
|
| 8 |
+
from typing import (
|
| 9 |
+
Callable,
|
| 10 |
+
Dict,
|
| 11 |
+
FrozenSet,
|
| 12 |
+
Generator,
|
| 13 |
+
Iterable,
|
| 14 |
+
List,
|
| 15 |
+
NamedTuple,
|
| 16 |
+
Optional,
|
| 17 |
+
Set,
|
| 18 |
+
Tuple,
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
from pip._vendor.packaging.requirements import Requirement
|
| 22 |
+
from pip._vendor.packaging.tags import Tag, parse_tag
|
| 23 |
+
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
| 24 |
+
from pip._vendor.packaging.version import Version
|
| 25 |
+
|
| 26 |
+
from pip._internal.distributions import make_distribution_for_install_requirement
|
| 27 |
+
from pip._internal.metadata import get_default_environment
|
| 28 |
+
from pip._internal.metadata.base import BaseDistribution
|
| 29 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 30 |
+
|
| 31 |
+
logger = logging.getLogger(__name__)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class PackageDetails(NamedTuple):
|
| 35 |
+
version: Version
|
| 36 |
+
dependencies: List[Requirement]
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
# Shorthands
|
| 40 |
+
PackageSet = Dict[NormalizedName, PackageDetails]
|
| 41 |
+
Missing = Tuple[NormalizedName, Requirement]
|
| 42 |
+
Conflicting = Tuple[NormalizedName, Version, Requirement]
|
| 43 |
+
|
| 44 |
+
MissingDict = Dict[NormalizedName, List[Missing]]
|
| 45 |
+
ConflictingDict = Dict[NormalizedName, List[Conflicting]]
|
| 46 |
+
CheckResult = Tuple[MissingDict, ConflictingDict]
|
| 47 |
+
ConflictDetails = Tuple[PackageSet, CheckResult]
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def create_package_set_from_installed() -> Tuple[PackageSet, bool]:
|
| 51 |
+
"""Converts a list of distributions into a PackageSet."""
|
| 52 |
+
package_set = {}
|
| 53 |
+
problems = False
|
| 54 |
+
env = get_default_environment()
|
| 55 |
+
for dist in env.iter_installed_distributions(local_only=False, skip=()):
|
| 56 |
+
name = dist.canonical_name
|
| 57 |
+
try:
|
| 58 |
+
dependencies = list(dist.iter_dependencies())
|
| 59 |
+
package_set[name] = PackageDetails(dist.version, dependencies)
|
| 60 |
+
except (OSError, ValueError) as e:
|
| 61 |
+
# Don't crash on unreadable or broken metadata.
|
| 62 |
+
logger.warning("Error parsing dependencies of %s: %s", name, e)
|
| 63 |
+
problems = True
|
| 64 |
+
return package_set, problems
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def check_package_set(
|
| 68 |
+
package_set: PackageSet, should_ignore: Optional[Callable[[str], bool]] = None
|
| 69 |
+
) -> CheckResult:
|
| 70 |
+
"""Check if a package set is consistent
|
| 71 |
+
|
| 72 |
+
If should_ignore is passed, it should be a callable that takes a
|
| 73 |
+
package name and returns a boolean.
|
| 74 |
+
"""
|
| 75 |
+
|
| 76 |
+
missing = {}
|
| 77 |
+
conflicting = {}
|
| 78 |
+
|
| 79 |
+
for package_name, package_detail in package_set.items():
|
| 80 |
+
# Info about dependencies of package_name
|
| 81 |
+
missing_deps: Set[Missing] = set()
|
| 82 |
+
conflicting_deps: Set[Conflicting] = set()
|
| 83 |
+
|
| 84 |
+
if should_ignore and should_ignore(package_name):
|
| 85 |
+
continue
|
| 86 |
+
|
| 87 |
+
for req in package_detail.dependencies:
|
| 88 |
+
name = canonicalize_name(req.name)
|
| 89 |
+
|
| 90 |
+
# Check if it's missing
|
| 91 |
+
if name not in package_set:
|
| 92 |
+
missed = True
|
| 93 |
+
if req.marker is not None:
|
| 94 |
+
missed = req.marker.evaluate({"extra": ""})
|
| 95 |
+
if missed:
|
| 96 |
+
missing_deps.add((name, req))
|
| 97 |
+
continue
|
| 98 |
+
|
| 99 |
+
# Check if there's a conflict
|
| 100 |
+
version = package_set[name].version
|
| 101 |
+
if not req.specifier.contains(version, prereleases=True):
|
| 102 |
+
conflicting_deps.add((name, version, req))
|
| 103 |
+
|
| 104 |
+
if missing_deps:
|
| 105 |
+
missing[package_name] = sorted(missing_deps, key=str)
|
| 106 |
+
if conflicting_deps:
|
| 107 |
+
conflicting[package_name] = sorted(conflicting_deps, key=str)
|
| 108 |
+
|
| 109 |
+
return missing, conflicting
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
def check_install_conflicts(to_install: List[InstallRequirement]) -> ConflictDetails:
|
| 113 |
+
"""For checking if the dependency graph would be consistent after \
|
| 114 |
+
installing given requirements
|
| 115 |
+
"""
|
| 116 |
+
# Start from the current state
|
| 117 |
+
package_set, _ = create_package_set_from_installed()
|
| 118 |
+
# Install packages
|
| 119 |
+
would_be_installed = _simulate_installation_of(to_install, package_set)
|
| 120 |
+
|
| 121 |
+
# Only warn about directly-dependent packages; create a whitelist of them
|
| 122 |
+
whitelist = _create_whitelist(would_be_installed, package_set)
|
| 123 |
+
|
| 124 |
+
return (
|
| 125 |
+
package_set,
|
| 126 |
+
check_package_set(
|
| 127 |
+
package_set, should_ignore=lambda name: name not in whitelist
|
| 128 |
+
),
|
| 129 |
+
)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
def check_unsupported(
|
| 133 |
+
packages: Iterable[BaseDistribution],
|
| 134 |
+
supported_tags: Iterable[Tag],
|
| 135 |
+
) -> Generator[BaseDistribution, None, None]:
|
| 136 |
+
for p in packages:
|
| 137 |
+
with suppress(FileNotFoundError):
|
| 138 |
+
wheel_file = p.read_text("WHEEL")
|
| 139 |
+
wheel_tags: FrozenSet[Tag] = reduce(
|
| 140 |
+
frozenset.union,
|
| 141 |
+
map(parse_tag, Parser().parsestr(wheel_file).get_all("Tag", [])),
|
| 142 |
+
frozenset(),
|
| 143 |
+
)
|
| 144 |
+
if wheel_tags.isdisjoint(supported_tags):
|
| 145 |
+
yield p
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def _simulate_installation_of(
|
| 149 |
+
to_install: List[InstallRequirement], package_set: PackageSet
|
| 150 |
+
) -> Set[NormalizedName]:
|
| 151 |
+
"""Computes the version of packages after installing to_install."""
|
| 152 |
+
# Keep track of packages that were installed
|
| 153 |
+
installed = set()
|
| 154 |
+
|
| 155 |
+
# Modify it as installing requirement_set would (assuming no errors)
|
| 156 |
+
for inst_req in to_install:
|
| 157 |
+
abstract_dist = make_distribution_for_install_requirement(inst_req)
|
| 158 |
+
dist = abstract_dist.get_metadata_distribution()
|
| 159 |
+
name = dist.canonical_name
|
| 160 |
+
package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies()))
|
| 161 |
+
|
| 162 |
+
installed.add(name)
|
| 163 |
+
|
| 164 |
+
return installed
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
def _create_whitelist(
|
| 168 |
+
would_be_installed: Set[NormalizedName], package_set: PackageSet
|
| 169 |
+
) -> Set[NormalizedName]:
|
| 170 |
+
packages_affected = set(would_be_installed)
|
| 171 |
+
|
| 172 |
+
for package_name in package_set:
|
| 173 |
+
if package_name in packages_affected:
|
| 174 |
+
continue
|
| 175 |
+
|
| 176 |
+
for req in package_set[package_name].dependencies:
|
| 177 |
+
if canonicalize_name(req.name) in packages_affected:
|
| 178 |
+
packages_affected.add(package_name)
|
| 179 |
+
break
|
| 180 |
+
|
| 181 |
+
return packages_affected
|
llava/lib/python3.10/site-packages/pip/_internal/operations/freeze.py
ADDED
|
@@ -0,0 +1,256 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
import logging
|
| 3 |
+
import os
|
| 4 |
+
from dataclasses import dataclass, field
|
| 5 |
+
from typing import Container, Dict, Generator, Iterable, List, NamedTuple, Optional, Set
|
| 6 |
+
|
| 7 |
+
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
| 8 |
+
from pip._vendor.packaging.version import InvalidVersion
|
| 9 |
+
|
| 10 |
+
from pip._internal.exceptions import BadCommand, InstallationError
|
| 11 |
+
from pip._internal.metadata import BaseDistribution, get_environment
|
| 12 |
+
from pip._internal.req.constructors import (
|
| 13 |
+
install_req_from_editable,
|
| 14 |
+
install_req_from_line,
|
| 15 |
+
)
|
| 16 |
+
from pip._internal.req.req_file import COMMENT_RE
|
| 17 |
+
from pip._internal.utils.direct_url_helpers import direct_url_as_pep440_direct_reference
|
| 18 |
+
|
| 19 |
+
logger = logging.getLogger(__name__)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class _EditableInfo(NamedTuple):
|
| 23 |
+
requirement: str
|
| 24 |
+
comments: List[str]
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def freeze(
|
| 28 |
+
requirement: Optional[List[str]] = None,
|
| 29 |
+
local_only: bool = False,
|
| 30 |
+
user_only: bool = False,
|
| 31 |
+
paths: Optional[List[str]] = None,
|
| 32 |
+
isolated: bool = False,
|
| 33 |
+
exclude_editable: bool = False,
|
| 34 |
+
skip: Container[str] = (),
|
| 35 |
+
) -> Generator[str, None, None]:
|
| 36 |
+
installations: Dict[str, FrozenRequirement] = {}
|
| 37 |
+
|
| 38 |
+
dists = get_environment(paths).iter_installed_distributions(
|
| 39 |
+
local_only=local_only,
|
| 40 |
+
skip=(),
|
| 41 |
+
user_only=user_only,
|
| 42 |
+
)
|
| 43 |
+
for dist in dists:
|
| 44 |
+
req = FrozenRequirement.from_dist(dist)
|
| 45 |
+
if exclude_editable and req.editable:
|
| 46 |
+
continue
|
| 47 |
+
installations[req.canonical_name] = req
|
| 48 |
+
|
| 49 |
+
if requirement:
|
| 50 |
+
# the options that don't get turned into an InstallRequirement
|
| 51 |
+
# should only be emitted once, even if the same option is in multiple
|
| 52 |
+
# requirements files, so we need to keep track of what has been emitted
|
| 53 |
+
# so that we don't emit it again if it's seen again
|
| 54 |
+
emitted_options: Set[str] = set()
|
| 55 |
+
# keep track of which files a requirement is in so that we can
|
| 56 |
+
# give an accurate warning if a requirement appears multiple times.
|
| 57 |
+
req_files: Dict[str, List[str]] = collections.defaultdict(list)
|
| 58 |
+
for req_file_path in requirement:
|
| 59 |
+
with open(req_file_path) as req_file:
|
| 60 |
+
for line in req_file:
|
| 61 |
+
if (
|
| 62 |
+
not line.strip()
|
| 63 |
+
or line.strip().startswith("#")
|
| 64 |
+
or line.startswith(
|
| 65 |
+
(
|
| 66 |
+
"-r",
|
| 67 |
+
"--requirement",
|
| 68 |
+
"-f",
|
| 69 |
+
"--find-links",
|
| 70 |
+
"-i",
|
| 71 |
+
"--index-url",
|
| 72 |
+
"--pre",
|
| 73 |
+
"--trusted-host",
|
| 74 |
+
"--process-dependency-links",
|
| 75 |
+
"--extra-index-url",
|
| 76 |
+
"--use-feature",
|
| 77 |
+
)
|
| 78 |
+
)
|
| 79 |
+
):
|
| 80 |
+
line = line.rstrip()
|
| 81 |
+
if line not in emitted_options:
|
| 82 |
+
emitted_options.add(line)
|
| 83 |
+
yield line
|
| 84 |
+
continue
|
| 85 |
+
|
| 86 |
+
if line.startswith("-e") or line.startswith("--editable"):
|
| 87 |
+
if line.startswith("-e"):
|
| 88 |
+
line = line[2:].strip()
|
| 89 |
+
else:
|
| 90 |
+
line = line[len("--editable") :].strip().lstrip("=")
|
| 91 |
+
line_req = install_req_from_editable(
|
| 92 |
+
line,
|
| 93 |
+
isolated=isolated,
|
| 94 |
+
)
|
| 95 |
+
else:
|
| 96 |
+
line_req = install_req_from_line(
|
| 97 |
+
COMMENT_RE.sub("", line).strip(),
|
| 98 |
+
isolated=isolated,
|
| 99 |
+
)
|
| 100 |
+
|
| 101 |
+
if not line_req.name:
|
| 102 |
+
logger.info(
|
| 103 |
+
"Skipping line in requirement file [%s] because "
|
| 104 |
+
"it's not clear what it would install: %s",
|
| 105 |
+
req_file_path,
|
| 106 |
+
line.strip(),
|
| 107 |
+
)
|
| 108 |
+
logger.info(
|
| 109 |
+
" (add #egg=PackageName to the URL to avoid"
|
| 110 |
+
" this warning)"
|
| 111 |
+
)
|
| 112 |
+
else:
|
| 113 |
+
line_req_canonical_name = canonicalize_name(line_req.name)
|
| 114 |
+
if line_req_canonical_name not in installations:
|
| 115 |
+
# either it's not installed, or it is installed
|
| 116 |
+
# but has been processed already
|
| 117 |
+
if not req_files[line_req.name]:
|
| 118 |
+
logger.warning(
|
| 119 |
+
"Requirement file [%s] contains %s, but "
|
| 120 |
+
"package %r is not installed",
|
| 121 |
+
req_file_path,
|
| 122 |
+
COMMENT_RE.sub("", line).strip(),
|
| 123 |
+
line_req.name,
|
| 124 |
+
)
|
| 125 |
+
else:
|
| 126 |
+
req_files[line_req.name].append(req_file_path)
|
| 127 |
+
else:
|
| 128 |
+
yield str(installations[line_req_canonical_name]).rstrip()
|
| 129 |
+
del installations[line_req_canonical_name]
|
| 130 |
+
req_files[line_req.name].append(req_file_path)
|
| 131 |
+
|
| 132 |
+
# Warn about requirements that were included multiple times (in a
|
| 133 |
+
# single requirements file or in different requirements files).
|
| 134 |
+
for name, files in req_files.items():
|
| 135 |
+
if len(files) > 1:
|
| 136 |
+
logger.warning(
|
| 137 |
+
"Requirement %s included multiple times [%s]",
|
| 138 |
+
name,
|
| 139 |
+
", ".join(sorted(set(files))),
|
| 140 |
+
)
|
| 141 |
+
|
| 142 |
+
yield ("## The following requirements were added by pip freeze:")
|
| 143 |
+
for installation in sorted(installations.values(), key=lambda x: x.name.lower()):
|
| 144 |
+
if installation.canonical_name not in skip:
|
| 145 |
+
yield str(installation).rstrip()
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def _format_as_name_version(dist: BaseDistribution) -> str:
|
| 149 |
+
try:
|
| 150 |
+
dist_version = dist.version
|
| 151 |
+
except InvalidVersion:
|
| 152 |
+
# legacy version
|
| 153 |
+
return f"{dist.raw_name}==={dist.raw_version}"
|
| 154 |
+
else:
|
| 155 |
+
return f"{dist.raw_name}=={dist_version}"
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:
|
| 159 |
+
"""
|
| 160 |
+
Compute and return values (req, comments) for use in
|
| 161 |
+
FrozenRequirement.from_dist().
|
| 162 |
+
"""
|
| 163 |
+
editable_project_location = dist.editable_project_location
|
| 164 |
+
assert editable_project_location
|
| 165 |
+
location = os.path.normcase(os.path.abspath(editable_project_location))
|
| 166 |
+
|
| 167 |
+
from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs
|
| 168 |
+
|
| 169 |
+
vcs_backend = vcs.get_backend_for_dir(location)
|
| 170 |
+
|
| 171 |
+
if vcs_backend is None:
|
| 172 |
+
display = _format_as_name_version(dist)
|
| 173 |
+
logger.debug(
|
| 174 |
+
'No VCS found for editable requirement "%s" in: %r',
|
| 175 |
+
display,
|
| 176 |
+
location,
|
| 177 |
+
)
|
| 178 |
+
return _EditableInfo(
|
| 179 |
+
requirement=location,
|
| 180 |
+
comments=[f"# Editable install with no version control ({display})"],
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
vcs_name = type(vcs_backend).__name__
|
| 184 |
+
|
| 185 |
+
try:
|
| 186 |
+
req = vcs_backend.get_src_requirement(location, dist.raw_name)
|
| 187 |
+
except RemoteNotFoundError:
|
| 188 |
+
display = _format_as_name_version(dist)
|
| 189 |
+
return _EditableInfo(
|
| 190 |
+
requirement=location,
|
| 191 |
+
comments=[f"# Editable {vcs_name} install with no remote ({display})"],
|
| 192 |
+
)
|
| 193 |
+
except RemoteNotValidError as ex:
|
| 194 |
+
display = _format_as_name_version(dist)
|
| 195 |
+
return _EditableInfo(
|
| 196 |
+
requirement=location,
|
| 197 |
+
comments=[
|
| 198 |
+
f"# Editable {vcs_name} install ({display}) with either a deleted "
|
| 199 |
+
f"local remote or invalid URI:",
|
| 200 |
+
f"# '{ex.url}'",
|
| 201 |
+
],
|
| 202 |
+
)
|
| 203 |
+
except BadCommand:
|
| 204 |
+
logger.warning(
|
| 205 |
+
"cannot determine version of editable source in %s "
|
| 206 |
+
"(%s command not found in path)",
|
| 207 |
+
location,
|
| 208 |
+
vcs_backend.name,
|
| 209 |
+
)
|
| 210 |
+
return _EditableInfo(requirement=location, comments=[])
|
| 211 |
+
except InstallationError as exc:
|
| 212 |
+
logger.warning("Error when trying to get requirement for VCS system %s", exc)
|
| 213 |
+
else:
|
| 214 |
+
return _EditableInfo(requirement=req, comments=[])
|
| 215 |
+
|
| 216 |
+
logger.warning("Could not determine repository location of %s", location)
|
| 217 |
+
|
| 218 |
+
return _EditableInfo(
|
| 219 |
+
requirement=location,
|
| 220 |
+
comments=["## !! Could not determine repository location"],
|
| 221 |
+
)
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
@dataclass(frozen=True)
|
| 225 |
+
class FrozenRequirement:
|
| 226 |
+
name: str
|
| 227 |
+
req: str
|
| 228 |
+
editable: bool
|
| 229 |
+
comments: Iterable[str] = field(default_factory=tuple)
|
| 230 |
+
|
| 231 |
+
@property
|
| 232 |
+
def canonical_name(self) -> NormalizedName:
|
| 233 |
+
return canonicalize_name(self.name)
|
| 234 |
+
|
| 235 |
+
@classmethod
|
| 236 |
+
def from_dist(cls, dist: BaseDistribution) -> "FrozenRequirement":
|
| 237 |
+
editable = dist.editable
|
| 238 |
+
if editable:
|
| 239 |
+
req, comments = _get_editable_info(dist)
|
| 240 |
+
else:
|
| 241 |
+
comments = []
|
| 242 |
+
direct_url = dist.direct_url
|
| 243 |
+
if direct_url:
|
| 244 |
+
# if PEP 610 metadata is present, use it
|
| 245 |
+
req = direct_url_as_pep440_direct_reference(direct_url, dist.raw_name)
|
| 246 |
+
else:
|
| 247 |
+
# name==version requirement
|
| 248 |
+
req = _format_as_name_version(dist)
|
| 249 |
+
|
| 250 |
+
return cls(dist.raw_name, req, editable, comments=comments)
|
| 251 |
+
|
| 252 |
+
def __str__(self) -> str:
|
| 253 |
+
req = self.req
|
| 254 |
+
if self.editable:
|
| 255 |
+
req = f"-e {req}"
|
| 256 |
+
return "\n".join(list(self.comments) + [str(req)]) + "\n"
|
llava/lib/python3.10/site-packages/pip/_internal/operations/install/__init__.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""For modules related to installing packages.
|
| 2 |
+
"""
|
llava/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (239 Bytes). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-310.pyc
ADDED
|
Binary file (1.47 kB). View file
|
|
|