Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +1 -0
- evalkit_llava/lib/libatomic.so +3 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/distributions/__init__.py +21 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/base.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/distributions/base.py +53 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/distributions/installed.py +29 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/distributions/sdist.py +158 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/distributions/wheel.py +42 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__init__.py +2 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/link.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/scheme.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/wheel.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/candidate.py +25 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/format_control.py +78 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/index.py +28 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/installation_report.py +56 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/link.py +604 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/scheme.py +25 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/search_scope.py +127 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/selection_prefs.py +53 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/target_python.py +121 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/wheel.py +118 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/__init__.py +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/build/__init__.py +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/build/build_tracker.py +138 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/build/metadata.py +39 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_editable.py +42 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_legacy.py +74 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_legacy.py +102 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/check.py +181 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/freeze.py +256 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/install/__init__.py +2 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/install/editable_legacy.py +47 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/install/wheel.py +741 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/prepare.py +732 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/req/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_llava/lib/python3.10/site-packages/pip/_internal/req/__pycache__/constructors.cpython-310.pyc +0 -0
.gitattributes
CHANGED
|
@@ -60,3 +60,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 60 |
evalkit_llava/bin/bzip2 filter=lfs diff=lfs merge=lfs -text
|
| 61 |
evalkit_llava/bin/xz filter=lfs diff=lfs merge=lfs -text
|
| 62 |
evalkit_llava/lib/libncurses.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 60 |
evalkit_llava/bin/bzip2 filter=lfs diff=lfs merge=lfs -text
|
| 61 |
evalkit_llava/bin/xz filter=lfs diff=lfs merge=lfs -text
|
| 62 |
evalkit_llava/lib/libncurses.so filter=lfs diff=lfs merge=lfs -text
|
| 63 |
+
evalkit_llava/lib/libatomic.so filter=lfs diff=lfs merge=lfs -text
|
evalkit_llava/lib/libatomic.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2f1a92c18f01c13c9a89908fb86a7309ae5b89a882db9914114957bc4b6fed92
|
| 3 |
+
size 143648
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/distributions/__init__.py
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pip._internal.distributions.base import AbstractDistribution
|
| 2 |
+
from pip._internal.distributions.sdist import SourceDistribution
|
| 3 |
+
from pip._internal.distributions.wheel import WheelDistribution
|
| 4 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def make_distribution_for_install_requirement(
|
| 8 |
+
install_req: InstallRequirement,
|
| 9 |
+
) -> AbstractDistribution:
|
| 10 |
+
"""Returns a Distribution for the given InstallRequirement"""
|
| 11 |
+
# Editable requirements will always be source distributions. They use the
|
| 12 |
+
# legacy logic until we create a modern standard for them.
|
| 13 |
+
if install_req.editable:
|
| 14 |
+
return SourceDistribution(install_req)
|
| 15 |
+
|
| 16 |
+
# If it's a wheel, it's a WheelDistribution
|
| 17 |
+
if install_req.is_wheel:
|
| 18 |
+
return WheelDistribution(install_req)
|
| 19 |
+
|
| 20 |
+
# Otherwise, a SourceDistribution
|
| 21 |
+
return SourceDistribution(install_req)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (786 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/base.cpython-310.pyc
ADDED
|
Binary file (2.5 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-310.pyc
ADDED
|
Binary file (1.47 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-310.pyc
ADDED
|
Binary file (5.3 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-310.pyc
ADDED
|
Binary file (1.87 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/distributions/base.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import abc
|
| 2 |
+
from typing import TYPE_CHECKING, Optional
|
| 3 |
+
|
| 4 |
+
from pip._internal.metadata.base import BaseDistribution
|
| 5 |
+
from pip._internal.req import InstallRequirement
|
| 6 |
+
|
| 7 |
+
if TYPE_CHECKING:
|
| 8 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class AbstractDistribution(metaclass=abc.ABCMeta):
|
| 12 |
+
"""A base class for handling installable artifacts.
|
| 13 |
+
|
| 14 |
+
The requirements for anything installable are as follows:
|
| 15 |
+
|
| 16 |
+
- we must be able to determine the requirement name
|
| 17 |
+
(or we can't correctly handle the non-upgrade case).
|
| 18 |
+
|
| 19 |
+
- for packages with setup requirements, we must also be able
|
| 20 |
+
to determine their requirements without installing additional
|
| 21 |
+
packages (for the same reason as run-time dependencies)
|
| 22 |
+
|
| 23 |
+
- we must be able to create a Distribution object exposing the
|
| 24 |
+
above metadata.
|
| 25 |
+
|
| 26 |
+
- if we need to do work in the build tracker, we must be able to generate a unique
|
| 27 |
+
string to identify the requirement in the build tracker.
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
def __init__(self, req: InstallRequirement) -> None:
|
| 31 |
+
super().__init__()
|
| 32 |
+
self.req = req
|
| 33 |
+
|
| 34 |
+
@abc.abstractproperty
|
| 35 |
+
def build_tracker_id(self) -> Optional[str]:
|
| 36 |
+
"""A string that uniquely identifies this requirement to the build tracker.
|
| 37 |
+
|
| 38 |
+
If None, then this dist has no work to do in the build tracker, and
|
| 39 |
+
``.prepare_distribution_metadata()`` will not be called."""
|
| 40 |
+
raise NotImplementedError()
|
| 41 |
+
|
| 42 |
+
@abc.abstractmethod
|
| 43 |
+
def get_metadata_distribution(self) -> BaseDistribution:
|
| 44 |
+
raise NotImplementedError()
|
| 45 |
+
|
| 46 |
+
@abc.abstractmethod
|
| 47 |
+
def prepare_distribution_metadata(
|
| 48 |
+
self,
|
| 49 |
+
finder: "PackageFinder",
|
| 50 |
+
build_isolation: bool,
|
| 51 |
+
check_build_deps: bool,
|
| 52 |
+
) -> None:
|
| 53 |
+
raise NotImplementedError()
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/distributions/installed.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Optional
|
| 2 |
+
|
| 3 |
+
from pip._internal.distributions.base import AbstractDistribution
|
| 4 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 5 |
+
from pip._internal.metadata import BaseDistribution
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class InstalledDistribution(AbstractDistribution):
|
| 9 |
+
"""Represents an installed package.
|
| 10 |
+
|
| 11 |
+
This does not need any preparation as the required information has already
|
| 12 |
+
been computed.
|
| 13 |
+
"""
|
| 14 |
+
|
| 15 |
+
@property
|
| 16 |
+
def build_tracker_id(self) -> Optional[str]:
|
| 17 |
+
return None
|
| 18 |
+
|
| 19 |
+
def get_metadata_distribution(self) -> BaseDistribution:
|
| 20 |
+
assert self.req.satisfied_by is not None, "not actually installed"
|
| 21 |
+
return self.req.satisfied_by
|
| 22 |
+
|
| 23 |
+
def prepare_distribution_metadata(
|
| 24 |
+
self,
|
| 25 |
+
finder: PackageFinder,
|
| 26 |
+
build_isolation: bool,
|
| 27 |
+
check_build_deps: bool,
|
| 28 |
+
) -> None:
|
| 29 |
+
pass
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/distributions/sdist.py
ADDED
|
@@ -0,0 +1,158 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from typing import TYPE_CHECKING, Iterable, Optional, Set, Tuple
|
| 3 |
+
|
| 4 |
+
from pip._internal.build_env import BuildEnvironment
|
| 5 |
+
from pip._internal.distributions.base import AbstractDistribution
|
| 6 |
+
from pip._internal.exceptions import InstallationError
|
| 7 |
+
from pip._internal.metadata import BaseDistribution
|
| 8 |
+
from pip._internal.utils.subprocess import runner_with_spinner_message
|
| 9 |
+
|
| 10 |
+
if TYPE_CHECKING:
|
| 11 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 12 |
+
|
| 13 |
+
logger = logging.getLogger(__name__)
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class SourceDistribution(AbstractDistribution):
|
| 17 |
+
"""Represents a source distribution.
|
| 18 |
+
|
| 19 |
+
The preparation step for these needs metadata for the packages to be
|
| 20 |
+
generated, either using PEP 517 or using the legacy `setup.py egg_info`.
|
| 21 |
+
"""
|
| 22 |
+
|
| 23 |
+
@property
|
| 24 |
+
def build_tracker_id(self) -> Optional[str]:
|
| 25 |
+
"""Identify this requirement uniquely by its link."""
|
| 26 |
+
assert self.req.link
|
| 27 |
+
return self.req.link.url_without_fragment
|
| 28 |
+
|
| 29 |
+
def get_metadata_distribution(self) -> BaseDistribution:
|
| 30 |
+
return self.req.get_dist()
|
| 31 |
+
|
| 32 |
+
def prepare_distribution_metadata(
|
| 33 |
+
self,
|
| 34 |
+
finder: "PackageFinder",
|
| 35 |
+
build_isolation: bool,
|
| 36 |
+
check_build_deps: bool,
|
| 37 |
+
) -> None:
|
| 38 |
+
# Load pyproject.toml, to determine whether PEP 517 is to be used
|
| 39 |
+
self.req.load_pyproject_toml()
|
| 40 |
+
|
| 41 |
+
# Set up the build isolation, if this requirement should be isolated
|
| 42 |
+
should_isolate = self.req.use_pep517 and build_isolation
|
| 43 |
+
if should_isolate:
|
| 44 |
+
# Setup an isolated environment and install the build backend static
|
| 45 |
+
# requirements in it.
|
| 46 |
+
self._prepare_build_backend(finder)
|
| 47 |
+
# Check that if the requirement is editable, it either supports PEP 660 or
|
| 48 |
+
# has a setup.py or a setup.cfg. This cannot be done earlier because we need
|
| 49 |
+
# to setup the build backend to verify it supports build_editable, nor can
|
| 50 |
+
# it be done later, because we want to avoid installing build requirements
|
| 51 |
+
# needlessly. Doing it here also works around setuptools generating
|
| 52 |
+
# UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory
|
| 53 |
+
# without setup.py nor setup.cfg.
|
| 54 |
+
self.req.isolated_editable_sanity_check()
|
| 55 |
+
# Install the dynamic build requirements.
|
| 56 |
+
self._install_build_reqs(finder)
|
| 57 |
+
# Check if the current environment provides build dependencies
|
| 58 |
+
should_check_deps = self.req.use_pep517 and check_build_deps
|
| 59 |
+
if should_check_deps:
|
| 60 |
+
pyproject_requires = self.req.pyproject_requires
|
| 61 |
+
assert pyproject_requires is not None
|
| 62 |
+
conflicting, missing = self.req.build_env.check_requirements(
|
| 63 |
+
pyproject_requires
|
| 64 |
+
)
|
| 65 |
+
if conflicting:
|
| 66 |
+
self._raise_conflicts("the backend dependencies", conflicting)
|
| 67 |
+
if missing:
|
| 68 |
+
self._raise_missing_reqs(missing)
|
| 69 |
+
self.req.prepare_metadata()
|
| 70 |
+
|
| 71 |
+
def _prepare_build_backend(self, finder: "PackageFinder") -> None:
|
| 72 |
+
# Isolate in a BuildEnvironment and install the build-time
|
| 73 |
+
# requirements.
|
| 74 |
+
pyproject_requires = self.req.pyproject_requires
|
| 75 |
+
assert pyproject_requires is not None
|
| 76 |
+
|
| 77 |
+
self.req.build_env = BuildEnvironment()
|
| 78 |
+
self.req.build_env.install_requirements(
|
| 79 |
+
finder, pyproject_requires, "overlay", kind="build dependencies"
|
| 80 |
+
)
|
| 81 |
+
conflicting, missing = self.req.build_env.check_requirements(
|
| 82 |
+
self.req.requirements_to_check
|
| 83 |
+
)
|
| 84 |
+
if conflicting:
|
| 85 |
+
self._raise_conflicts("PEP 517/518 supported requirements", conflicting)
|
| 86 |
+
if missing:
|
| 87 |
+
logger.warning(
|
| 88 |
+
"Missing build requirements in pyproject.toml for %s.",
|
| 89 |
+
self.req,
|
| 90 |
+
)
|
| 91 |
+
logger.warning(
|
| 92 |
+
"The project does not specify a build backend, and "
|
| 93 |
+
"pip cannot fall back to setuptools without %s.",
|
| 94 |
+
" and ".join(map(repr, sorted(missing))),
|
| 95 |
+
)
|
| 96 |
+
|
| 97 |
+
def _get_build_requires_wheel(self) -> Iterable[str]:
|
| 98 |
+
with self.req.build_env:
|
| 99 |
+
runner = runner_with_spinner_message("Getting requirements to build wheel")
|
| 100 |
+
backend = self.req.pep517_backend
|
| 101 |
+
assert backend is not None
|
| 102 |
+
with backend.subprocess_runner(runner):
|
| 103 |
+
return backend.get_requires_for_build_wheel()
|
| 104 |
+
|
| 105 |
+
def _get_build_requires_editable(self) -> Iterable[str]:
|
| 106 |
+
with self.req.build_env:
|
| 107 |
+
runner = runner_with_spinner_message(
|
| 108 |
+
"Getting requirements to build editable"
|
| 109 |
+
)
|
| 110 |
+
backend = self.req.pep517_backend
|
| 111 |
+
assert backend is not None
|
| 112 |
+
with backend.subprocess_runner(runner):
|
| 113 |
+
return backend.get_requires_for_build_editable()
|
| 114 |
+
|
| 115 |
+
def _install_build_reqs(self, finder: "PackageFinder") -> None:
|
| 116 |
+
# Install any extra build dependencies that the backend requests.
|
| 117 |
+
# This must be done in a second pass, as the pyproject.toml
|
| 118 |
+
# dependencies must be installed before we can call the backend.
|
| 119 |
+
if (
|
| 120 |
+
self.req.editable
|
| 121 |
+
and self.req.permit_editable_wheels
|
| 122 |
+
and self.req.supports_pyproject_editable
|
| 123 |
+
):
|
| 124 |
+
build_reqs = self._get_build_requires_editable()
|
| 125 |
+
else:
|
| 126 |
+
build_reqs = self._get_build_requires_wheel()
|
| 127 |
+
conflicting, missing = self.req.build_env.check_requirements(build_reqs)
|
| 128 |
+
if conflicting:
|
| 129 |
+
self._raise_conflicts("the backend dependencies", conflicting)
|
| 130 |
+
self.req.build_env.install_requirements(
|
| 131 |
+
finder, missing, "normal", kind="backend dependencies"
|
| 132 |
+
)
|
| 133 |
+
|
| 134 |
+
def _raise_conflicts(
|
| 135 |
+
self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]]
|
| 136 |
+
) -> None:
|
| 137 |
+
format_string = (
|
| 138 |
+
"Some build dependencies for {requirement} "
|
| 139 |
+
"conflict with {conflicting_with}: {description}."
|
| 140 |
+
)
|
| 141 |
+
error_message = format_string.format(
|
| 142 |
+
requirement=self.req,
|
| 143 |
+
conflicting_with=conflicting_with,
|
| 144 |
+
description=", ".join(
|
| 145 |
+
f"{installed} is incompatible with {wanted}"
|
| 146 |
+
for installed, wanted in sorted(conflicting_reqs)
|
| 147 |
+
),
|
| 148 |
+
)
|
| 149 |
+
raise InstallationError(error_message)
|
| 150 |
+
|
| 151 |
+
def _raise_missing_reqs(self, missing: Set[str]) -> None:
|
| 152 |
+
format_string = (
|
| 153 |
+
"Some build dependencies for {requirement} are missing: {missing}."
|
| 154 |
+
)
|
| 155 |
+
error_message = format_string.format(
|
| 156 |
+
requirement=self.req, missing=", ".join(map(repr, sorted(missing)))
|
| 157 |
+
)
|
| 158 |
+
raise InstallationError(error_message)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/distributions/wheel.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import TYPE_CHECKING, Optional
|
| 2 |
+
|
| 3 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 4 |
+
|
| 5 |
+
from pip._internal.distributions.base import AbstractDistribution
|
| 6 |
+
from pip._internal.metadata import (
|
| 7 |
+
BaseDistribution,
|
| 8 |
+
FilesystemWheel,
|
| 9 |
+
get_wheel_distribution,
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
if TYPE_CHECKING:
|
| 13 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class WheelDistribution(AbstractDistribution):
|
| 17 |
+
"""Represents a wheel distribution.
|
| 18 |
+
|
| 19 |
+
This does not need any preparation as wheels can be directly unpacked.
|
| 20 |
+
"""
|
| 21 |
+
|
| 22 |
+
@property
|
| 23 |
+
def build_tracker_id(self) -> Optional[str]:
|
| 24 |
+
return None
|
| 25 |
+
|
| 26 |
+
def get_metadata_distribution(self) -> BaseDistribution:
|
| 27 |
+
"""Loads the metadata from the wheel file into memory and returns a
|
| 28 |
+
Distribution that uses it, not relying on the wheel file or
|
| 29 |
+
requirement.
|
| 30 |
+
"""
|
| 31 |
+
assert self.req.local_file_path, "Set as part of preparation during download"
|
| 32 |
+
assert self.req.name, "Wheels are never unnamed"
|
| 33 |
+
wheel = FilesystemWheel(self.req.local_file_path)
|
| 34 |
+
return get_wheel_distribution(wheel, canonicalize_name(self.req.name))
|
| 35 |
+
|
| 36 |
+
def prepare_distribution_metadata(
|
| 37 |
+
self,
|
| 38 |
+
finder: "PackageFinder",
|
| 39 |
+
build_isolation: bool,
|
| 40 |
+
check_build_deps: bool,
|
| 41 |
+
) -> None:
|
| 42 |
+
pass
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__init__.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""A package that contains models that represent entities.
|
| 2 |
+
"""
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-310.pyc
ADDED
|
Binary file (7.39 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/link.cpython-310.pyc
ADDED
|
Binary file (18.7 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/scheme.cpython-310.pyc
ADDED
|
Binary file (918 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-310.pyc
ADDED
|
Binary file (3.46 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-310.pyc
ADDED
|
Binary file (1.68 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/__pycache__/wheel.cpython-310.pyc
ADDED
|
Binary file (5.08 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/candidate.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from dataclasses import dataclass
|
| 2 |
+
|
| 3 |
+
from pip._vendor.packaging.version import Version
|
| 4 |
+
from pip._vendor.packaging.version import parse as parse_version
|
| 5 |
+
|
| 6 |
+
from pip._internal.models.link import Link
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
@dataclass(frozen=True)
|
| 10 |
+
class InstallationCandidate:
|
| 11 |
+
"""Represents a potential "candidate" for installation."""
|
| 12 |
+
|
| 13 |
+
__slots__ = ["name", "version", "link"]
|
| 14 |
+
|
| 15 |
+
name: str
|
| 16 |
+
version: Version
|
| 17 |
+
link: Link
|
| 18 |
+
|
| 19 |
+
def __init__(self, name: str, version: str, link: Link) -> None:
|
| 20 |
+
object.__setattr__(self, "name", name)
|
| 21 |
+
object.__setattr__(self, "version", parse_version(version))
|
| 22 |
+
object.__setattr__(self, "link", link)
|
| 23 |
+
|
| 24 |
+
def __str__(self) -> str:
|
| 25 |
+
return f"{self.name!r} candidate (version {self.version} at {self.link})"
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/format_control.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import FrozenSet, Optional, Set
|
| 2 |
+
|
| 3 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 4 |
+
|
| 5 |
+
from pip._internal.exceptions import CommandError
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class FormatControl:
|
| 9 |
+
"""Helper for managing formats from which a package can be installed."""
|
| 10 |
+
|
| 11 |
+
__slots__ = ["no_binary", "only_binary"]
|
| 12 |
+
|
| 13 |
+
def __init__(
|
| 14 |
+
self,
|
| 15 |
+
no_binary: Optional[Set[str]] = None,
|
| 16 |
+
only_binary: Optional[Set[str]] = None,
|
| 17 |
+
) -> None:
|
| 18 |
+
if no_binary is None:
|
| 19 |
+
no_binary = set()
|
| 20 |
+
if only_binary is None:
|
| 21 |
+
only_binary = set()
|
| 22 |
+
|
| 23 |
+
self.no_binary = no_binary
|
| 24 |
+
self.only_binary = only_binary
|
| 25 |
+
|
| 26 |
+
def __eq__(self, other: object) -> bool:
|
| 27 |
+
if not isinstance(other, self.__class__):
|
| 28 |
+
return NotImplemented
|
| 29 |
+
|
| 30 |
+
if self.__slots__ != other.__slots__:
|
| 31 |
+
return False
|
| 32 |
+
|
| 33 |
+
return all(getattr(self, k) == getattr(other, k) for k in self.__slots__)
|
| 34 |
+
|
| 35 |
+
def __repr__(self) -> str:
|
| 36 |
+
return f"{self.__class__.__name__}({self.no_binary}, {self.only_binary})"
|
| 37 |
+
|
| 38 |
+
@staticmethod
|
| 39 |
+
def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None:
|
| 40 |
+
if value.startswith("-"):
|
| 41 |
+
raise CommandError(
|
| 42 |
+
"--no-binary / --only-binary option requires 1 argument."
|
| 43 |
+
)
|
| 44 |
+
new = value.split(",")
|
| 45 |
+
while ":all:" in new:
|
| 46 |
+
other.clear()
|
| 47 |
+
target.clear()
|
| 48 |
+
target.add(":all:")
|
| 49 |
+
del new[: new.index(":all:") + 1]
|
| 50 |
+
# Without a none, we want to discard everything as :all: covers it
|
| 51 |
+
if ":none:" not in new:
|
| 52 |
+
return
|
| 53 |
+
for name in new:
|
| 54 |
+
if name == ":none:":
|
| 55 |
+
target.clear()
|
| 56 |
+
continue
|
| 57 |
+
name = canonicalize_name(name)
|
| 58 |
+
other.discard(name)
|
| 59 |
+
target.add(name)
|
| 60 |
+
|
| 61 |
+
def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]:
|
| 62 |
+
result = {"binary", "source"}
|
| 63 |
+
if canonical_name in self.only_binary:
|
| 64 |
+
result.discard("source")
|
| 65 |
+
elif canonical_name in self.no_binary:
|
| 66 |
+
result.discard("binary")
|
| 67 |
+
elif ":all:" in self.only_binary:
|
| 68 |
+
result.discard("source")
|
| 69 |
+
elif ":all:" in self.no_binary:
|
| 70 |
+
result.discard("binary")
|
| 71 |
+
return frozenset(result)
|
| 72 |
+
|
| 73 |
+
def disallow_binaries(self) -> None:
|
| 74 |
+
self.handle_mutual_excludes(
|
| 75 |
+
":all:",
|
| 76 |
+
self.no_binary,
|
| 77 |
+
self.only_binary,
|
| 78 |
+
)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/index.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import urllib.parse
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
class PackageIndex:
|
| 5 |
+
"""Represents a Package Index and provides easier access to endpoints"""
|
| 6 |
+
|
| 7 |
+
__slots__ = ["url", "netloc", "simple_url", "pypi_url", "file_storage_domain"]
|
| 8 |
+
|
| 9 |
+
def __init__(self, url: str, file_storage_domain: str) -> None:
|
| 10 |
+
super().__init__()
|
| 11 |
+
self.url = url
|
| 12 |
+
self.netloc = urllib.parse.urlsplit(url).netloc
|
| 13 |
+
self.simple_url = self._url_for_path("simple")
|
| 14 |
+
self.pypi_url = self._url_for_path("pypi")
|
| 15 |
+
|
| 16 |
+
# This is part of a temporary hack used to block installs of PyPI
|
| 17 |
+
# packages which depend on external urls only necessary until PyPI can
|
| 18 |
+
# block such packages themselves
|
| 19 |
+
self.file_storage_domain = file_storage_domain
|
| 20 |
+
|
| 21 |
+
def _url_for_path(self, path: str) -> str:
|
| 22 |
+
return urllib.parse.urljoin(self.url, path)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
PyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org")
|
| 26 |
+
TestPyPI = PackageIndex(
|
| 27 |
+
"https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org"
|
| 28 |
+
)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/installation_report.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Any, Dict, Sequence
|
| 2 |
+
|
| 3 |
+
from pip._vendor.packaging.markers import default_environment
|
| 4 |
+
|
| 5 |
+
from pip import __version__
|
| 6 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class InstallationReport:
|
| 10 |
+
def __init__(self, install_requirements: Sequence[InstallRequirement]):
|
| 11 |
+
self._install_requirements = install_requirements
|
| 12 |
+
|
| 13 |
+
@classmethod
|
| 14 |
+
def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]:
|
| 15 |
+
assert ireq.download_info, f"No download_info for {ireq}"
|
| 16 |
+
res = {
|
| 17 |
+
# PEP 610 json for the download URL. download_info.archive_info.hashes may
|
| 18 |
+
# be absent when the requirement was installed from the wheel cache
|
| 19 |
+
# and the cache entry was populated by an older pip version that did not
|
| 20 |
+
# record origin.json.
|
| 21 |
+
"download_info": ireq.download_info.to_dict(),
|
| 22 |
+
# is_direct is true if the requirement was a direct URL reference (which
|
| 23 |
+
# includes editable requirements), and false if the requirement was
|
| 24 |
+
# downloaded from a PEP 503 index or --find-links.
|
| 25 |
+
"is_direct": ireq.is_direct,
|
| 26 |
+
# is_yanked is true if the requirement was yanked from the index, but
|
| 27 |
+
# was still selected by pip to conform to PEP 592.
|
| 28 |
+
"is_yanked": ireq.link.is_yanked if ireq.link else False,
|
| 29 |
+
# requested is true if the requirement was specified by the user (aka
|
| 30 |
+
# top level requirement), and false if it was installed as a dependency of a
|
| 31 |
+
# requirement. https://peps.python.org/pep-0376/#requested
|
| 32 |
+
"requested": ireq.user_supplied,
|
| 33 |
+
# PEP 566 json encoding for metadata
|
| 34 |
+
# https://www.python.org/dev/peps/pep-0566/#json-compatible-metadata
|
| 35 |
+
"metadata": ireq.get_dist().metadata_dict,
|
| 36 |
+
}
|
| 37 |
+
if ireq.user_supplied and ireq.extras:
|
| 38 |
+
# For top level requirements, the list of requested extras, if any.
|
| 39 |
+
res["requested_extras"] = sorted(ireq.extras)
|
| 40 |
+
return res
|
| 41 |
+
|
| 42 |
+
def to_dict(self) -> Dict[str, Any]:
|
| 43 |
+
return {
|
| 44 |
+
"version": "1",
|
| 45 |
+
"pip_version": __version__,
|
| 46 |
+
"install": [
|
| 47 |
+
self._install_req_to_dict(ireq) for ireq in self._install_requirements
|
| 48 |
+
],
|
| 49 |
+
# https://peps.python.org/pep-0508/#environment-markers
|
| 50 |
+
# TODO: currently, the resolver uses the default environment to evaluate
|
| 51 |
+
# environment markers, so that is what we report here. In the future, it
|
| 52 |
+
# should also take into account options such as --python-version or
|
| 53 |
+
# --platform, perhaps under the form of an environment_override field?
|
| 54 |
+
# https://github.com/pypa/pip/issues/11198
|
| 55 |
+
"environment": default_environment(),
|
| 56 |
+
}
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/link.py
ADDED
|
@@ -0,0 +1,604 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import itertools
|
| 3 |
+
import logging
|
| 4 |
+
import os
|
| 5 |
+
import posixpath
|
| 6 |
+
import re
|
| 7 |
+
import urllib.parse
|
| 8 |
+
from dataclasses import dataclass
|
| 9 |
+
from typing import (
|
| 10 |
+
TYPE_CHECKING,
|
| 11 |
+
Any,
|
| 12 |
+
Dict,
|
| 13 |
+
List,
|
| 14 |
+
Mapping,
|
| 15 |
+
NamedTuple,
|
| 16 |
+
Optional,
|
| 17 |
+
Tuple,
|
| 18 |
+
Union,
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
from pip._internal.utils.deprecation import deprecated
|
| 22 |
+
from pip._internal.utils.filetypes import WHEEL_EXTENSION
|
| 23 |
+
from pip._internal.utils.hashes import Hashes
|
| 24 |
+
from pip._internal.utils.misc import (
|
| 25 |
+
pairwise,
|
| 26 |
+
redact_auth_from_url,
|
| 27 |
+
split_auth_from_netloc,
|
| 28 |
+
splitext,
|
| 29 |
+
)
|
| 30 |
+
from pip._internal.utils.urls import path_to_url, url_to_path
|
| 31 |
+
|
| 32 |
+
if TYPE_CHECKING:
|
| 33 |
+
from pip._internal.index.collector import IndexContent
|
| 34 |
+
|
| 35 |
+
logger = logging.getLogger(__name__)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
# Order matters, earlier hashes have a precedence over later hashes for what
|
| 39 |
+
# we will pick to use.
|
| 40 |
+
_SUPPORTED_HASHES = ("sha512", "sha384", "sha256", "sha224", "sha1", "md5")
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
@dataclass(frozen=True)
|
| 44 |
+
class LinkHash:
|
| 45 |
+
"""Links to content may have embedded hash values. This class parses those.
|
| 46 |
+
|
| 47 |
+
`name` must be any member of `_SUPPORTED_HASHES`.
|
| 48 |
+
|
| 49 |
+
This class can be converted to and from `ArchiveInfo`. While ArchiveInfo intends to
|
| 50 |
+
be JSON-serializable to conform to PEP 610, this class contains the logic for
|
| 51 |
+
parsing a hash name and value for correctness, and then checking whether that hash
|
| 52 |
+
conforms to a schema with `.is_hash_allowed()`."""
|
| 53 |
+
|
| 54 |
+
name: str
|
| 55 |
+
value: str
|
| 56 |
+
|
| 57 |
+
_hash_url_fragment_re = re.compile(
|
| 58 |
+
# NB: we do not validate that the second group (.*) is a valid hex
|
| 59 |
+
# digest. Instead, we simply keep that string in this class, and then check it
|
| 60 |
+
# against Hashes when hash-checking is needed. This is easier to debug than
|
| 61 |
+
# proactively discarding an invalid hex digest, as we handle incorrect hashes
|
| 62 |
+
# and malformed hashes in the same place.
|
| 63 |
+
r"[#&]({choices})=([^&]*)".format(
|
| 64 |
+
choices="|".join(re.escape(hash_name) for hash_name in _SUPPORTED_HASHES)
|
| 65 |
+
),
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
def __post_init__(self) -> None:
|
| 69 |
+
assert self.name in _SUPPORTED_HASHES
|
| 70 |
+
|
| 71 |
+
@classmethod
|
| 72 |
+
@functools.lru_cache(maxsize=None)
|
| 73 |
+
def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]:
|
| 74 |
+
"""Search a string for a checksum algorithm name and encoded output value."""
|
| 75 |
+
match = cls._hash_url_fragment_re.search(url)
|
| 76 |
+
if match is None:
|
| 77 |
+
return None
|
| 78 |
+
name, value = match.groups()
|
| 79 |
+
return cls(name=name, value=value)
|
| 80 |
+
|
| 81 |
+
def as_dict(self) -> Dict[str, str]:
|
| 82 |
+
return {self.name: self.value}
|
| 83 |
+
|
| 84 |
+
def as_hashes(self) -> Hashes:
|
| 85 |
+
"""Return a Hashes instance which checks only for the current hash."""
|
| 86 |
+
return Hashes({self.name: [self.value]})
|
| 87 |
+
|
| 88 |
+
def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
|
| 89 |
+
"""
|
| 90 |
+
Return True if the current hash is allowed by `hashes`.
|
| 91 |
+
"""
|
| 92 |
+
if hashes is None:
|
| 93 |
+
return False
|
| 94 |
+
return hashes.is_hash_allowed(self.name, hex_digest=self.value)
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
@dataclass(frozen=True)
|
| 98 |
+
class MetadataFile:
|
| 99 |
+
"""Information about a core metadata file associated with a distribution."""
|
| 100 |
+
|
| 101 |
+
hashes: Optional[Dict[str, str]]
|
| 102 |
+
|
| 103 |
+
def __post_init__(self) -> None:
|
| 104 |
+
if self.hashes is not None:
|
| 105 |
+
assert all(name in _SUPPORTED_HASHES for name in self.hashes)
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def supported_hashes(hashes: Optional[Dict[str, str]]) -> Optional[Dict[str, str]]:
|
| 109 |
+
# Remove any unsupported hash types from the mapping. If this leaves no
|
| 110 |
+
# supported hashes, return None
|
| 111 |
+
if hashes is None:
|
| 112 |
+
return None
|
| 113 |
+
hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES}
|
| 114 |
+
if not hashes:
|
| 115 |
+
return None
|
| 116 |
+
return hashes
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def _clean_url_path_part(part: str) -> str:
|
| 120 |
+
"""
|
| 121 |
+
Clean a "part" of a URL path (i.e. after splitting on "@" characters).
|
| 122 |
+
"""
|
| 123 |
+
# We unquote prior to quoting to make sure nothing is double quoted.
|
| 124 |
+
return urllib.parse.quote(urllib.parse.unquote(part))
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
def _clean_file_url_path(part: str) -> str:
|
| 128 |
+
"""
|
| 129 |
+
Clean the first part of a URL path that corresponds to a local
|
| 130 |
+
filesystem path (i.e. the first part after splitting on "@" characters).
|
| 131 |
+
"""
|
| 132 |
+
# We unquote prior to quoting to make sure nothing is double quoted.
|
| 133 |
+
# Also, on Windows the path part might contain a drive letter which
|
| 134 |
+
# should not be quoted. On Linux where drive letters do not
|
| 135 |
+
# exist, the colon should be quoted. We rely on urllib.request
|
| 136 |
+
# to do the right thing here.
|
| 137 |
+
return urllib.request.pathname2url(urllib.request.url2pathname(part))
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
# percent-encoded: /
|
| 141 |
+
_reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE)
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def _clean_url_path(path: str, is_local_path: bool) -> str:
|
| 145 |
+
"""
|
| 146 |
+
Clean the path portion of a URL.
|
| 147 |
+
"""
|
| 148 |
+
if is_local_path:
|
| 149 |
+
clean_func = _clean_file_url_path
|
| 150 |
+
else:
|
| 151 |
+
clean_func = _clean_url_path_part
|
| 152 |
+
|
| 153 |
+
# Split on the reserved characters prior to cleaning so that
|
| 154 |
+
# revision strings in VCS URLs are properly preserved.
|
| 155 |
+
parts = _reserved_chars_re.split(path)
|
| 156 |
+
|
| 157 |
+
cleaned_parts = []
|
| 158 |
+
for to_clean, reserved in pairwise(itertools.chain(parts, [""])):
|
| 159 |
+
cleaned_parts.append(clean_func(to_clean))
|
| 160 |
+
# Normalize %xx escapes (e.g. %2f -> %2F)
|
| 161 |
+
cleaned_parts.append(reserved.upper())
|
| 162 |
+
|
| 163 |
+
return "".join(cleaned_parts)
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
def _ensure_quoted_url(url: str) -> str:
|
| 167 |
+
"""
|
| 168 |
+
Make sure a link is fully quoted.
|
| 169 |
+
For example, if ' ' occurs in the URL, it will be replaced with "%20",
|
| 170 |
+
and without double-quoting other characters.
|
| 171 |
+
"""
|
| 172 |
+
# Split the URL into parts according to the general structure
|
| 173 |
+
# `scheme://netloc/path?query#fragment`.
|
| 174 |
+
result = urllib.parse.urlsplit(url)
|
| 175 |
+
# If the netloc is empty, then the URL refers to a local filesystem path.
|
| 176 |
+
is_local_path = not result.netloc
|
| 177 |
+
path = _clean_url_path(result.path, is_local_path=is_local_path)
|
| 178 |
+
return urllib.parse.urlunsplit(result._replace(path=path))
|
| 179 |
+
|
| 180 |
+
|
| 181 |
+
def _absolute_link_url(base_url: str, url: str) -> str:
|
| 182 |
+
"""
|
| 183 |
+
A faster implementation of urllib.parse.urljoin with a shortcut
|
| 184 |
+
for absolute http/https URLs.
|
| 185 |
+
"""
|
| 186 |
+
if url.startswith(("https://", "http://")):
|
| 187 |
+
return url
|
| 188 |
+
else:
|
| 189 |
+
return urllib.parse.urljoin(base_url, url)
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
@functools.total_ordering
|
| 193 |
+
class Link:
|
| 194 |
+
"""Represents a parsed link from a Package Index's simple URL"""
|
| 195 |
+
|
| 196 |
+
__slots__ = [
|
| 197 |
+
"_parsed_url",
|
| 198 |
+
"_url",
|
| 199 |
+
"_path",
|
| 200 |
+
"_hashes",
|
| 201 |
+
"comes_from",
|
| 202 |
+
"requires_python",
|
| 203 |
+
"yanked_reason",
|
| 204 |
+
"metadata_file_data",
|
| 205 |
+
"cache_link_parsing",
|
| 206 |
+
"egg_fragment",
|
| 207 |
+
]
|
| 208 |
+
|
| 209 |
+
def __init__(
|
| 210 |
+
self,
|
| 211 |
+
url: str,
|
| 212 |
+
comes_from: Optional[Union[str, "IndexContent"]] = None,
|
| 213 |
+
requires_python: Optional[str] = None,
|
| 214 |
+
yanked_reason: Optional[str] = None,
|
| 215 |
+
metadata_file_data: Optional[MetadataFile] = None,
|
| 216 |
+
cache_link_parsing: bool = True,
|
| 217 |
+
hashes: Optional[Mapping[str, str]] = None,
|
| 218 |
+
) -> None:
|
| 219 |
+
"""
|
| 220 |
+
:param url: url of the resource pointed to (href of the link)
|
| 221 |
+
:param comes_from: instance of IndexContent where the link was found,
|
| 222 |
+
or string.
|
| 223 |
+
:param requires_python: String containing the `Requires-Python`
|
| 224 |
+
metadata field, specified in PEP 345. This may be specified by
|
| 225 |
+
a data-requires-python attribute in the HTML link tag, as
|
| 226 |
+
described in PEP 503.
|
| 227 |
+
:param yanked_reason: the reason the file has been yanked, if the
|
| 228 |
+
file has been yanked, or None if the file hasn't been yanked.
|
| 229 |
+
This is the value of the "data-yanked" attribute, if present, in
|
| 230 |
+
a simple repository HTML link. If the file has been yanked but
|
| 231 |
+
no reason was provided, this should be the empty string. See
|
| 232 |
+
PEP 592 for more information and the specification.
|
| 233 |
+
:param metadata_file_data: the metadata attached to the file, or None if
|
| 234 |
+
no such metadata is provided. This argument, if not None, indicates
|
| 235 |
+
that a separate metadata file exists, and also optionally supplies
|
| 236 |
+
hashes for that file.
|
| 237 |
+
:param cache_link_parsing: A flag that is used elsewhere to determine
|
| 238 |
+
whether resources retrieved from this link should be cached. PyPI
|
| 239 |
+
URLs should generally have this set to False, for example.
|
| 240 |
+
:param hashes: A mapping of hash names to digests to allow us to
|
| 241 |
+
determine the validity of a download.
|
| 242 |
+
"""
|
| 243 |
+
|
| 244 |
+
# The comes_from, requires_python, and metadata_file_data arguments are
|
| 245 |
+
# only used by classmethods of this class, and are not used in client
|
| 246 |
+
# code directly.
|
| 247 |
+
|
| 248 |
+
# url can be a UNC windows share
|
| 249 |
+
if url.startswith("\\\\"):
|
| 250 |
+
url = path_to_url(url)
|
| 251 |
+
|
| 252 |
+
self._parsed_url = urllib.parse.urlsplit(url)
|
| 253 |
+
# Store the url as a private attribute to prevent accidentally
|
| 254 |
+
# trying to set a new value.
|
| 255 |
+
self._url = url
|
| 256 |
+
# The .path property is hot, so calculate its value ahead of time.
|
| 257 |
+
self._path = urllib.parse.unquote(self._parsed_url.path)
|
| 258 |
+
|
| 259 |
+
link_hash = LinkHash.find_hash_url_fragment(url)
|
| 260 |
+
hashes_from_link = {} if link_hash is None else link_hash.as_dict()
|
| 261 |
+
if hashes is None:
|
| 262 |
+
self._hashes = hashes_from_link
|
| 263 |
+
else:
|
| 264 |
+
self._hashes = {**hashes, **hashes_from_link}
|
| 265 |
+
|
| 266 |
+
self.comes_from = comes_from
|
| 267 |
+
self.requires_python = requires_python if requires_python else None
|
| 268 |
+
self.yanked_reason = yanked_reason
|
| 269 |
+
self.metadata_file_data = metadata_file_data
|
| 270 |
+
|
| 271 |
+
self.cache_link_parsing = cache_link_parsing
|
| 272 |
+
self.egg_fragment = self._egg_fragment()
|
| 273 |
+
|
| 274 |
+
@classmethod
|
| 275 |
+
def from_json(
|
| 276 |
+
cls,
|
| 277 |
+
file_data: Dict[str, Any],
|
| 278 |
+
page_url: str,
|
| 279 |
+
) -> Optional["Link"]:
|
| 280 |
+
"""
|
| 281 |
+
Convert an pypi json document from a simple repository page into a Link.
|
| 282 |
+
"""
|
| 283 |
+
file_url = file_data.get("url")
|
| 284 |
+
if file_url is None:
|
| 285 |
+
return None
|
| 286 |
+
|
| 287 |
+
url = _ensure_quoted_url(_absolute_link_url(page_url, file_url))
|
| 288 |
+
pyrequire = file_data.get("requires-python")
|
| 289 |
+
yanked_reason = file_data.get("yanked")
|
| 290 |
+
hashes = file_data.get("hashes", {})
|
| 291 |
+
|
| 292 |
+
# PEP 714: Indexes must use the name core-metadata, but
|
| 293 |
+
# clients should support the old name as a fallback for compatibility.
|
| 294 |
+
metadata_info = file_data.get("core-metadata")
|
| 295 |
+
if metadata_info is None:
|
| 296 |
+
metadata_info = file_data.get("dist-info-metadata")
|
| 297 |
+
|
| 298 |
+
# The metadata info value may be a boolean, or a dict of hashes.
|
| 299 |
+
if isinstance(metadata_info, dict):
|
| 300 |
+
# The file exists, and hashes have been supplied
|
| 301 |
+
metadata_file_data = MetadataFile(supported_hashes(metadata_info))
|
| 302 |
+
elif metadata_info:
|
| 303 |
+
# The file exists, but there are no hashes
|
| 304 |
+
metadata_file_data = MetadataFile(None)
|
| 305 |
+
else:
|
| 306 |
+
# False or not present: the file does not exist
|
| 307 |
+
metadata_file_data = None
|
| 308 |
+
|
| 309 |
+
# The Link.yanked_reason expects an empty string instead of a boolean.
|
| 310 |
+
if yanked_reason and not isinstance(yanked_reason, str):
|
| 311 |
+
yanked_reason = ""
|
| 312 |
+
# The Link.yanked_reason expects None instead of False.
|
| 313 |
+
elif not yanked_reason:
|
| 314 |
+
yanked_reason = None
|
| 315 |
+
|
| 316 |
+
return cls(
|
| 317 |
+
url,
|
| 318 |
+
comes_from=page_url,
|
| 319 |
+
requires_python=pyrequire,
|
| 320 |
+
yanked_reason=yanked_reason,
|
| 321 |
+
hashes=hashes,
|
| 322 |
+
metadata_file_data=metadata_file_data,
|
| 323 |
+
)
|
| 324 |
+
|
| 325 |
+
@classmethod
|
| 326 |
+
def from_element(
|
| 327 |
+
cls,
|
| 328 |
+
anchor_attribs: Dict[str, Optional[str]],
|
| 329 |
+
page_url: str,
|
| 330 |
+
base_url: str,
|
| 331 |
+
) -> Optional["Link"]:
|
| 332 |
+
"""
|
| 333 |
+
Convert an anchor element's attributes in a simple repository page to a Link.
|
| 334 |
+
"""
|
| 335 |
+
href = anchor_attribs.get("href")
|
| 336 |
+
if not href:
|
| 337 |
+
return None
|
| 338 |
+
|
| 339 |
+
url = _ensure_quoted_url(_absolute_link_url(base_url, href))
|
| 340 |
+
pyrequire = anchor_attribs.get("data-requires-python")
|
| 341 |
+
yanked_reason = anchor_attribs.get("data-yanked")
|
| 342 |
+
|
| 343 |
+
# PEP 714: Indexes must use the name data-core-metadata, but
|
| 344 |
+
# clients should support the old name as a fallback for compatibility.
|
| 345 |
+
metadata_info = anchor_attribs.get("data-core-metadata")
|
| 346 |
+
if metadata_info is None:
|
| 347 |
+
metadata_info = anchor_attribs.get("data-dist-info-metadata")
|
| 348 |
+
# The metadata info value may be the string "true", or a string of
|
| 349 |
+
# the form "hashname=hashval"
|
| 350 |
+
if metadata_info == "true":
|
| 351 |
+
# The file exists, but there are no hashes
|
| 352 |
+
metadata_file_data = MetadataFile(None)
|
| 353 |
+
elif metadata_info is None:
|
| 354 |
+
# The file does not exist
|
| 355 |
+
metadata_file_data = None
|
| 356 |
+
else:
|
| 357 |
+
# The file exists, and hashes have been supplied
|
| 358 |
+
hashname, sep, hashval = metadata_info.partition("=")
|
| 359 |
+
if sep == "=":
|
| 360 |
+
metadata_file_data = MetadataFile(supported_hashes({hashname: hashval}))
|
| 361 |
+
else:
|
| 362 |
+
# Error - data is wrong. Treat as no hashes supplied.
|
| 363 |
+
logger.debug(
|
| 364 |
+
"Index returned invalid data-dist-info-metadata value: %s",
|
| 365 |
+
metadata_info,
|
| 366 |
+
)
|
| 367 |
+
metadata_file_data = MetadataFile(None)
|
| 368 |
+
|
| 369 |
+
return cls(
|
| 370 |
+
url,
|
| 371 |
+
comes_from=page_url,
|
| 372 |
+
requires_python=pyrequire,
|
| 373 |
+
yanked_reason=yanked_reason,
|
| 374 |
+
metadata_file_data=metadata_file_data,
|
| 375 |
+
)
|
| 376 |
+
|
| 377 |
+
def __str__(self) -> str:
|
| 378 |
+
if self.requires_python:
|
| 379 |
+
rp = f" (requires-python:{self.requires_python})"
|
| 380 |
+
else:
|
| 381 |
+
rp = ""
|
| 382 |
+
if self.comes_from:
|
| 383 |
+
return f"{redact_auth_from_url(self._url)} (from {self.comes_from}){rp}"
|
| 384 |
+
else:
|
| 385 |
+
return redact_auth_from_url(str(self._url))
|
| 386 |
+
|
| 387 |
+
def __repr__(self) -> str:
|
| 388 |
+
return f"<Link {self}>"
|
| 389 |
+
|
| 390 |
+
def __hash__(self) -> int:
|
| 391 |
+
return hash(self.url)
|
| 392 |
+
|
| 393 |
+
def __eq__(self, other: Any) -> bool:
|
| 394 |
+
if not isinstance(other, Link):
|
| 395 |
+
return NotImplemented
|
| 396 |
+
return self.url == other.url
|
| 397 |
+
|
| 398 |
+
def __lt__(self, other: Any) -> bool:
|
| 399 |
+
if not isinstance(other, Link):
|
| 400 |
+
return NotImplemented
|
| 401 |
+
return self.url < other.url
|
| 402 |
+
|
| 403 |
+
@property
|
| 404 |
+
def url(self) -> str:
|
| 405 |
+
return self._url
|
| 406 |
+
|
| 407 |
+
@property
|
| 408 |
+
def filename(self) -> str:
|
| 409 |
+
path = self.path.rstrip("/")
|
| 410 |
+
name = posixpath.basename(path)
|
| 411 |
+
if not name:
|
| 412 |
+
# Make sure we don't leak auth information if the netloc
|
| 413 |
+
# includes a username and password.
|
| 414 |
+
netloc, user_pass = split_auth_from_netloc(self.netloc)
|
| 415 |
+
return netloc
|
| 416 |
+
|
| 417 |
+
name = urllib.parse.unquote(name)
|
| 418 |
+
assert name, f"URL {self._url!r} produced no filename"
|
| 419 |
+
return name
|
| 420 |
+
|
| 421 |
+
@property
|
| 422 |
+
def file_path(self) -> str:
|
| 423 |
+
return url_to_path(self.url)
|
| 424 |
+
|
| 425 |
+
@property
|
| 426 |
+
def scheme(self) -> str:
|
| 427 |
+
return self._parsed_url.scheme
|
| 428 |
+
|
| 429 |
+
@property
|
| 430 |
+
def netloc(self) -> str:
|
| 431 |
+
"""
|
| 432 |
+
This can contain auth information.
|
| 433 |
+
"""
|
| 434 |
+
return self._parsed_url.netloc
|
| 435 |
+
|
| 436 |
+
@property
|
| 437 |
+
def path(self) -> str:
|
| 438 |
+
return self._path
|
| 439 |
+
|
| 440 |
+
def splitext(self) -> Tuple[str, str]:
|
| 441 |
+
return splitext(posixpath.basename(self.path.rstrip("/")))
|
| 442 |
+
|
| 443 |
+
@property
|
| 444 |
+
def ext(self) -> str:
|
| 445 |
+
return self.splitext()[1]
|
| 446 |
+
|
| 447 |
+
@property
|
| 448 |
+
def url_without_fragment(self) -> str:
|
| 449 |
+
scheme, netloc, path, query, fragment = self._parsed_url
|
| 450 |
+
return urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
|
| 451 |
+
|
| 452 |
+
_egg_fragment_re = re.compile(r"[#&]egg=([^&]*)")
|
| 453 |
+
|
| 454 |
+
# Per PEP 508.
|
| 455 |
+
_project_name_re = re.compile(
|
| 456 |
+
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
|
| 457 |
+
)
|
| 458 |
+
|
| 459 |
+
def _egg_fragment(self) -> Optional[str]:
|
| 460 |
+
match = self._egg_fragment_re.search(self._url)
|
| 461 |
+
if not match:
|
| 462 |
+
return None
|
| 463 |
+
|
| 464 |
+
# An egg fragment looks like a PEP 508 project name, along with
|
| 465 |
+
# an optional extras specifier. Anything else is invalid.
|
| 466 |
+
project_name = match.group(1)
|
| 467 |
+
if not self._project_name_re.match(project_name):
|
| 468 |
+
deprecated(
|
| 469 |
+
reason=f"{self} contains an egg fragment with a non-PEP 508 name.",
|
| 470 |
+
replacement="to use the req @ url syntax, and remove the egg fragment",
|
| 471 |
+
gone_in="25.1",
|
| 472 |
+
issue=13157,
|
| 473 |
+
)
|
| 474 |
+
|
| 475 |
+
return project_name
|
| 476 |
+
|
| 477 |
+
_subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)")
|
| 478 |
+
|
| 479 |
+
@property
|
| 480 |
+
def subdirectory_fragment(self) -> Optional[str]:
|
| 481 |
+
match = self._subdirectory_fragment_re.search(self._url)
|
| 482 |
+
if not match:
|
| 483 |
+
return None
|
| 484 |
+
return match.group(1)
|
| 485 |
+
|
| 486 |
+
def metadata_link(self) -> Optional["Link"]:
|
| 487 |
+
"""Return a link to the associated core metadata file (if any)."""
|
| 488 |
+
if self.metadata_file_data is None:
|
| 489 |
+
return None
|
| 490 |
+
metadata_url = f"{self.url_without_fragment}.metadata"
|
| 491 |
+
if self.metadata_file_data.hashes is None:
|
| 492 |
+
return Link(metadata_url)
|
| 493 |
+
return Link(metadata_url, hashes=self.metadata_file_data.hashes)
|
| 494 |
+
|
| 495 |
+
def as_hashes(self) -> Hashes:
|
| 496 |
+
return Hashes({k: [v] for k, v in self._hashes.items()})
|
| 497 |
+
|
| 498 |
+
@property
|
| 499 |
+
def hash(self) -> Optional[str]:
|
| 500 |
+
return next(iter(self._hashes.values()), None)
|
| 501 |
+
|
| 502 |
+
@property
|
| 503 |
+
def hash_name(self) -> Optional[str]:
|
| 504 |
+
return next(iter(self._hashes), None)
|
| 505 |
+
|
| 506 |
+
@property
|
| 507 |
+
def show_url(self) -> str:
|
| 508 |
+
return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0])
|
| 509 |
+
|
| 510 |
+
@property
|
| 511 |
+
def is_file(self) -> bool:
|
| 512 |
+
return self.scheme == "file"
|
| 513 |
+
|
| 514 |
+
def is_existing_dir(self) -> bool:
|
| 515 |
+
return self.is_file and os.path.isdir(self.file_path)
|
| 516 |
+
|
| 517 |
+
@property
|
| 518 |
+
def is_wheel(self) -> bool:
|
| 519 |
+
return self.ext == WHEEL_EXTENSION
|
| 520 |
+
|
| 521 |
+
@property
|
| 522 |
+
def is_vcs(self) -> bool:
|
| 523 |
+
from pip._internal.vcs import vcs
|
| 524 |
+
|
| 525 |
+
return self.scheme in vcs.all_schemes
|
| 526 |
+
|
| 527 |
+
@property
|
| 528 |
+
def is_yanked(self) -> bool:
|
| 529 |
+
return self.yanked_reason is not None
|
| 530 |
+
|
| 531 |
+
@property
|
| 532 |
+
def has_hash(self) -> bool:
|
| 533 |
+
return bool(self._hashes)
|
| 534 |
+
|
| 535 |
+
def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
|
| 536 |
+
"""
|
| 537 |
+
Return True if the link has a hash and it is allowed by `hashes`.
|
| 538 |
+
"""
|
| 539 |
+
if hashes is None:
|
| 540 |
+
return False
|
| 541 |
+
return any(hashes.is_hash_allowed(k, v) for k, v in self._hashes.items())
|
| 542 |
+
|
| 543 |
+
|
| 544 |
+
class _CleanResult(NamedTuple):
|
| 545 |
+
"""Convert link for equivalency check.
|
| 546 |
+
|
| 547 |
+
This is used in the resolver to check whether two URL-specified requirements
|
| 548 |
+
likely point to the same distribution and can be considered equivalent. This
|
| 549 |
+
equivalency logic avoids comparing URLs literally, which can be too strict
|
| 550 |
+
(e.g. "a=1&b=2" vs "b=2&a=1") and produce conflicts unexpecting to users.
|
| 551 |
+
|
| 552 |
+
Currently this does three things:
|
| 553 |
+
|
| 554 |
+
1. Drop the basic auth part. This is technically wrong since a server can
|
| 555 |
+
serve different content based on auth, but if it does that, it is even
|
| 556 |
+
impossible to guarantee two URLs without auth are equivalent, since
|
| 557 |
+
the user can input different auth information when prompted. So the
|
| 558 |
+
practical solution is to assume the auth doesn't affect the response.
|
| 559 |
+
2. Parse the query to avoid the ordering issue. Note that ordering under the
|
| 560 |
+
same key in the query are NOT cleaned; i.e. "a=1&a=2" and "a=2&a=1" are
|
| 561 |
+
still considered different.
|
| 562 |
+
3. Explicitly drop most of the fragment part, except ``subdirectory=`` and
|
| 563 |
+
hash values, since it should have no impact the downloaded content. Note
|
| 564 |
+
that this drops the "egg=" part historically used to denote the requested
|
| 565 |
+
project (and extras), which is wrong in the strictest sense, but too many
|
| 566 |
+
people are supplying it inconsistently to cause superfluous resolution
|
| 567 |
+
conflicts, so we choose to also ignore them.
|
| 568 |
+
"""
|
| 569 |
+
|
| 570 |
+
parsed: urllib.parse.SplitResult
|
| 571 |
+
query: Dict[str, List[str]]
|
| 572 |
+
subdirectory: str
|
| 573 |
+
hashes: Dict[str, str]
|
| 574 |
+
|
| 575 |
+
|
| 576 |
+
def _clean_link(link: Link) -> _CleanResult:
|
| 577 |
+
parsed = link._parsed_url
|
| 578 |
+
netloc = parsed.netloc.rsplit("@", 1)[-1]
|
| 579 |
+
# According to RFC 8089, an empty host in file: means localhost.
|
| 580 |
+
if parsed.scheme == "file" and not netloc:
|
| 581 |
+
netloc = "localhost"
|
| 582 |
+
fragment = urllib.parse.parse_qs(parsed.fragment)
|
| 583 |
+
if "egg" in fragment:
|
| 584 |
+
logger.debug("Ignoring egg= fragment in %s", link)
|
| 585 |
+
try:
|
| 586 |
+
# If there are multiple subdirectory values, use the first one.
|
| 587 |
+
# This matches the behavior of Link.subdirectory_fragment.
|
| 588 |
+
subdirectory = fragment["subdirectory"][0]
|
| 589 |
+
except (IndexError, KeyError):
|
| 590 |
+
subdirectory = ""
|
| 591 |
+
# If there are multiple hash values under the same algorithm, use the
|
| 592 |
+
# first one. This matches the behavior of Link.hash_value.
|
| 593 |
+
hashes = {k: fragment[k][0] for k in _SUPPORTED_HASHES if k in fragment}
|
| 594 |
+
return _CleanResult(
|
| 595 |
+
parsed=parsed._replace(netloc=netloc, query="", fragment=""),
|
| 596 |
+
query=urllib.parse.parse_qs(parsed.query),
|
| 597 |
+
subdirectory=subdirectory,
|
| 598 |
+
hashes=hashes,
|
| 599 |
+
)
|
| 600 |
+
|
| 601 |
+
|
| 602 |
+
@functools.lru_cache(maxsize=None)
|
| 603 |
+
def links_equivalent(link1: Link, link2: Link) -> bool:
|
| 604 |
+
return _clean_link(link1) == _clean_link(link2)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/scheme.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
For types associated with installation schemes.
|
| 3 |
+
|
| 4 |
+
For a general overview of available schemes and their context, see
|
| 5 |
+
https://docs.python.org/3/install/index.html#alternate-installation.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
from dataclasses import dataclass
|
| 9 |
+
|
| 10 |
+
SCHEME_KEYS = ["platlib", "purelib", "headers", "scripts", "data"]
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
@dataclass(frozen=True)
|
| 14 |
+
class Scheme:
|
| 15 |
+
"""A Scheme holds paths which are used as the base directories for
|
| 16 |
+
artifacts associated with a Python package.
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
__slots__ = SCHEME_KEYS
|
| 20 |
+
|
| 21 |
+
platlib: str
|
| 22 |
+
purelib: str
|
| 23 |
+
headers: str
|
| 24 |
+
scripts: str
|
| 25 |
+
data: str
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/search_scope.py
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import itertools
|
| 2 |
+
import logging
|
| 3 |
+
import os
|
| 4 |
+
import posixpath
|
| 5 |
+
import urllib.parse
|
| 6 |
+
from dataclasses import dataclass
|
| 7 |
+
from typing import List
|
| 8 |
+
|
| 9 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 10 |
+
|
| 11 |
+
from pip._internal.models.index import PyPI
|
| 12 |
+
from pip._internal.utils.compat import has_tls
|
| 13 |
+
from pip._internal.utils.misc import normalize_path, redact_auth_from_url
|
| 14 |
+
|
| 15 |
+
logger = logging.getLogger(__name__)
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
@dataclass(frozen=True)
|
| 19 |
+
class SearchScope:
|
| 20 |
+
"""
|
| 21 |
+
Encapsulates the locations that pip is configured to search.
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
__slots__ = ["find_links", "index_urls", "no_index"]
|
| 25 |
+
|
| 26 |
+
find_links: List[str]
|
| 27 |
+
index_urls: List[str]
|
| 28 |
+
no_index: bool
|
| 29 |
+
|
| 30 |
+
@classmethod
|
| 31 |
+
def create(
|
| 32 |
+
cls,
|
| 33 |
+
find_links: List[str],
|
| 34 |
+
index_urls: List[str],
|
| 35 |
+
no_index: bool,
|
| 36 |
+
) -> "SearchScope":
|
| 37 |
+
"""
|
| 38 |
+
Create a SearchScope object after normalizing the `find_links`.
|
| 39 |
+
"""
|
| 40 |
+
# Build find_links. If an argument starts with ~, it may be
|
| 41 |
+
# a local file relative to a home directory. So try normalizing
|
| 42 |
+
# it and if it exists, use the normalized version.
|
| 43 |
+
# This is deliberately conservative - it might be fine just to
|
| 44 |
+
# blindly normalize anything starting with a ~...
|
| 45 |
+
built_find_links: List[str] = []
|
| 46 |
+
for link in find_links:
|
| 47 |
+
if link.startswith("~"):
|
| 48 |
+
new_link = normalize_path(link)
|
| 49 |
+
if os.path.exists(new_link):
|
| 50 |
+
link = new_link
|
| 51 |
+
built_find_links.append(link)
|
| 52 |
+
|
| 53 |
+
# If we don't have TLS enabled, then WARN if anyplace we're looking
|
| 54 |
+
# relies on TLS.
|
| 55 |
+
if not has_tls():
|
| 56 |
+
for link in itertools.chain(index_urls, built_find_links):
|
| 57 |
+
parsed = urllib.parse.urlparse(link)
|
| 58 |
+
if parsed.scheme == "https":
|
| 59 |
+
logger.warning(
|
| 60 |
+
"pip is configured with locations that require "
|
| 61 |
+
"TLS/SSL, however the ssl module in Python is not "
|
| 62 |
+
"available."
|
| 63 |
+
)
|
| 64 |
+
break
|
| 65 |
+
|
| 66 |
+
return cls(
|
| 67 |
+
find_links=built_find_links,
|
| 68 |
+
index_urls=index_urls,
|
| 69 |
+
no_index=no_index,
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
def get_formatted_locations(self) -> str:
|
| 73 |
+
lines = []
|
| 74 |
+
redacted_index_urls = []
|
| 75 |
+
if self.index_urls and self.index_urls != [PyPI.simple_url]:
|
| 76 |
+
for url in self.index_urls:
|
| 77 |
+
redacted_index_url = redact_auth_from_url(url)
|
| 78 |
+
|
| 79 |
+
# Parse the URL
|
| 80 |
+
purl = urllib.parse.urlsplit(redacted_index_url)
|
| 81 |
+
|
| 82 |
+
# URL is generally invalid if scheme and netloc is missing
|
| 83 |
+
# there are issues with Python and URL parsing, so this test
|
| 84 |
+
# is a bit crude. See bpo-20271, bpo-23505. Python doesn't
|
| 85 |
+
# always parse invalid URLs correctly - it should raise
|
| 86 |
+
# exceptions for malformed URLs
|
| 87 |
+
if not purl.scheme and not purl.netloc:
|
| 88 |
+
logger.warning(
|
| 89 |
+
'The index url "%s" seems invalid, please provide a scheme.',
|
| 90 |
+
redacted_index_url,
|
| 91 |
+
)
|
| 92 |
+
|
| 93 |
+
redacted_index_urls.append(redacted_index_url)
|
| 94 |
+
|
| 95 |
+
lines.append(
|
| 96 |
+
"Looking in indexes: {}".format(", ".join(redacted_index_urls))
|
| 97 |
+
)
|
| 98 |
+
|
| 99 |
+
if self.find_links:
|
| 100 |
+
lines.append(
|
| 101 |
+
"Looking in links: {}".format(
|
| 102 |
+
", ".join(redact_auth_from_url(url) for url in self.find_links)
|
| 103 |
+
)
|
| 104 |
+
)
|
| 105 |
+
return "\n".join(lines)
|
| 106 |
+
|
| 107 |
+
def get_index_urls_locations(self, project_name: str) -> List[str]:
|
| 108 |
+
"""Returns the locations found via self.index_urls
|
| 109 |
+
|
| 110 |
+
Checks the url_name on the main (first in the list) index and
|
| 111 |
+
use this url_name to produce all locations
|
| 112 |
+
"""
|
| 113 |
+
|
| 114 |
+
def mkurl_pypi_url(url: str) -> str:
|
| 115 |
+
loc = posixpath.join(
|
| 116 |
+
url, urllib.parse.quote(canonicalize_name(project_name))
|
| 117 |
+
)
|
| 118 |
+
# For maximum compatibility with easy_install, ensure the path
|
| 119 |
+
# ends in a trailing slash. Although this isn't in the spec
|
| 120 |
+
# (and PyPI can handle it without the slash) some other index
|
| 121 |
+
# implementations might break if they relied on easy_install's
|
| 122 |
+
# behavior.
|
| 123 |
+
if not loc.endswith("/"):
|
| 124 |
+
loc = loc + "/"
|
| 125 |
+
return loc
|
| 126 |
+
|
| 127 |
+
return [mkurl_pypi_url(url) for url in self.index_urls]
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/selection_prefs.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Optional
|
| 2 |
+
|
| 3 |
+
from pip._internal.models.format_control import FormatControl
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
# TODO: This needs Python 3.10's improved slots support for dataclasses
|
| 7 |
+
# to be converted into a dataclass.
|
| 8 |
+
class SelectionPreferences:
|
| 9 |
+
"""
|
| 10 |
+
Encapsulates the candidate selection preferences for downloading
|
| 11 |
+
and installing files.
|
| 12 |
+
"""
|
| 13 |
+
|
| 14 |
+
__slots__ = [
|
| 15 |
+
"allow_yanked",
|
| 16 |
+
"allow_all_prereleases",
|
| 17 |
+
"format_control",
|
| 18 |
+
"prefer_binary",
|
| 19 |
+
"ignore_requires_python",
|
| 20 |
+
]
|
| 21 |
+
|
| 22 |
+
# Don't include an allow_yanked default value to make sure each call
|
| 23 |
+
# site considers whether yanked releases are allowed. This also causes
|
| 24 |
+
# that decision to be made explicit in the calling code, which helps
|
| 25 |
+
# people when reading the code.
|
| 26 |
+
def __init__(
|
| 27 |
+
self,
|
| 28 |
+
allow_yanked: bool,
|
| 29 |
+
allow_all_prereleases: bool = False,
|
| 30 |
+
format_control: Optional[FormatControl] = None,
|
| 31 |
+
prefer_binary: bool = False,
|
| 32 |
+
ignore_requires_python: Optional[bool] = None,
|
| 33 |
+
) -> None:
|
| 34 |
+
"""Create a SelectionPreferences object.
|
| 35 |
+
|
| 36 |
+
:param allow_yanked: Whether files marked as yanked (in the sense
|
| 37 |
+
of PEP 592) are permitted to be candidates for install.
|
| 38 |
+
:param format_control: A FormatControl object or None. Used to control
|
| 39 |
+
the selection of source packages / binary packages when consulting
|
| 40 |
+
the index and links.
|
| 41 |
+
:param prefer_binary: Whether to prefer an old, but valid, binary
|
| 42 |
+
dist over a new source dist.
|
| 43 |
+
:param ignore_requires_python: Whether to ignore incompatible
|
| 44 |
+
"Requires-Python" values in links. Defaults to False.
|
| 45 |
+
"""
|
| 46 |
+
if ignore_requires_python is None:
|
| 47 |
+
ignore_requires_python = False
|
| 48 |
+
|
| 49 |
+
self.allow_yanked = allow_yanked
|
| 50 |
+
self.allow_all_prereleases = allow_all_prereleases
|
| 51 |
+
self.format_control = format_control
|
| 52 |
+
self.prefer_binary = prefer_binary
|
| 53 |
+
self.ignore_requires_python = ignore_requires_python
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/target_python.py
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
from typing import List, Optional, Set, Tuple
|
| 3 |
+
|
| 4 |
+
from pip._vendor.packaging.tags import Tag
|
| 5 |
+
|
| 6 |
+
from pip._internal.utils.compatibility_tags import get_supported, version_info_to_nodot
|
| 7 |
+
from pip._internal.utils.misc import normalize_version_info
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class TargetPython:
|
| 11 |
+
"""
|
| 12 |
+
Encapsulates the properties of a Python interpreter one is targeting
|
| 13 |
+
for a package install, download, etc.
|
| 14 |
+
"""
|
| 15 |
+
|
| 16 |
+
__slots__ = [
|
| 17 |
+
"_given_py_version_info",
|
| 18 |
+
"abis",
|
| 19 |
+
"implementation",
|
| 20 |
+
"platforms",
|
| 21 |
+
"py_version",
|
| 22 |
+
"py_version_info",
|
| 23 |
+
"_valid_tags",
|
| 24 |
+
"_valid_tags_set",
|
| 25 |
+
]
|
| 26 |
+
|
| 27 |
+
def __init__(
|
| 28 |
+
self,
|
| 29 |
+
platforms: Optional[List[str]] = None,
|
| 30 |
+
py_version_info: Optional[Tuple[int, ...]] = None,
|
| 31 |
+
abis: Optional[List[str]] = None,
|
| 32 |
+
implementation: Optional[str] = None,
|
| 33 |
+
) -> None:
|
| 34 |
+
"""
|
| 35 |
+
:param platforms: A list of strings or None. If None, searches for
|
| 36 |
+
packages that are supported by the current system. Otherwise, will
|
| 37 |
+
find packages that can be built on the platforms passed in. These
|
| 38 |
+
packages will only be downloaded for distribution: they will
|
| 39 |
+
not be built locally.
|
| 40 |
+
:param py_version_info: An optional tuple of ints representing the
|
| 41 |
+
Python version information to use (e.g. `sys.version_info[:3]`).
|
| 42 |
+
This can have length 1, 2, or 3 when provided.
|
| 43 |
+
:param abis: A list of strings or None. This is passed to
|
| 44 |
+
compatibility_tags.py's get_supported() function as is.
|
| 45 |
+
:param implementation: A string or None. This is passed to
|
| 46 |
+
compatibility_tags.py's get_supported() function as is.
|
| 47 |
+
"""
|
| 48 |
+
# Store the given py_version_info for when we call get_supported().
|
| 49 |
+
self._given_py_version_info = py_version_info
|
| 50 |
+
|
| 51 |
+
if py_version_info is None:
|
| 52 |
+
py_version_info = sys.version_info[:3]
|
| 53 |
+
else:
|
| 54 |
+
py_version_info = normalize_version_info(py_version_info)
|
| 55 |
+
|
| 56 |
+
py_version = ".".join(map(str, py_version_info[:2]))
|
| 57 |
+
|
| 58 |
+
self.abis = abis
|
| 59 |
+
self.implementation = implementation
|
| 60 |
+
self.platforms = platforms
|
| 61 |
+
self.py_version = py_version
|
| 62 |
+
self.py_version_info = py_version_info
|
| 63 |
+
|
| 64 |
+
# This is used to cache the return value of get_(un)sorted_tags.
|
| 65 |
+
self._valid_tags: Optional[List[Tag]] = None
|
| 66 |
+
self._valid_tags_set: Optional[Set[Tag]] = None
|
| 67 |
+
|
| 68 |
+
def format_given(self) -> str:
|
| 69 |
+
"""
|
| 70 |
+
Format the given, non-None attributes for display.
|
| 71 |
+
"""
|
| 72 |
+
display_version = None
|
| 73 |
+
if self._given_py_version_info is not None:
|
| 74 |
+
display_version = ".".join(
|
| 75 |
+
str(part) for part in self._given_py_version_info
|
| 76 |
+
)
|
| 77 |
+
|
| 78 |
+
key_values = [
|
| 79 |
+
("platforms", self.platforms),
|
| 80 |
+
("version_info", display_version),
|
| 81 |
+
("abis", self.abis),
|
| 82 |
+
("implementation", self.implementation),
|
| 83 |
+
]
|
| 84 |
+
return " ".join(
|
| 85 |
+
f"{key}={value!r}" for key, value in key_values if value is not None
|
| 86 |
+
)
|
| 87 |
+
|
| 88 |
+
def get_sorted_tags(self) -> List[Tag]:
|
| 89 |
+
"""
|
| 90 |
+
Return the supported PEP 425 tags to check wheel candidates against.
|
| 91 |
+
|
| 92 |
+
The tags are returned in order of preference (most preferred first).
|
| 93 |
+
"""
|
| 94 |
+
if self._valid_tags is None:
|
| 95 |
+
# Pass versions=None if no py_version_info was given since
|
| 96 |
+
# versions=None uses special default logic.
|
| 97 |
+
py_version_info = self._given_py_version_info
|
| 98 |
+
if py_version_info is None:
|
| 99 |
+
version = None
|
| 100 |
+
else:
|
| 101 |
+
version = version_info_to_nodot(py_version_info)
|
| 102 |
+
|
| 103 |
+
tags = get_supported(
|
| 104 |
+
version=version,
|
| 105 |
+
platforms=self.platforms,
|
| 106 |
+
abis=self.abis,
|
| 107 |
+
impl=self.implementation,
|
| 108 |
+
)
|
| 109 |
+
self._valid_tags = tags
|
| 110 |
+
|
| 111 |
+
return self._valid_tags
|
| 112 |
+
|
| 113 |
+
def get_unsorted_tags(self) -> Set[Tag]:
|
| 114 |
+
"""Exactly the same as get_sorted_tags, but returns a set.
|
| 115 |
+
|
| 116 |
+
This is important for performance.
|
| 117 |
+
"""
|
| 118 |
+
if self._valid_tags_set is None:
|
| 119 |
+
self._valid_tags_set = set(self.get_sorted_tags())
|
| 120 |
+
|
| 121 |
+
return self._valid_tags_set
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/models/wheel.py
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Represents a wheel file and provides access to the various parts of the
|
| 2 |
+
name that have meaning.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import re
|
| 6 |
+
from typing import Dict, Iterable, List
|
| 7 |
+
|
| 8 |
+
from pip._vendor.packaging.tags import Tag
|
| 9 |
+
from pip._vendor.packaging.utils import (
|
| 10 |
+
InvalidWheelFilename as PackagingInvalidWheelName,
|
| 11 |
+
)
|
| 12 |
+
from pip._vendor.packaging.utils import parse_wheel_filename
|
| 13 |
+
|
| 14 |
+
from pip._internal.exceptions import InvalidWheelFilename
|
| 15 |
+
from pip._internal.utils.deprecation import deprecated
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class Wheel:
|
| 19 |
+
"""A wheel file"""
|
| 20 |
+
|
| 21 |
+
wheel_file_re = re.compile(
|
| 22 |
+
r"""^(?P<namever>(?P<name>[^\s-]+?)-(?P<ver>[^\s-]*?))
|
| 23 |
+
((-(?P<build>\d[^-]*?))?-(?P<pyver>[^\s-]+?)-(?P<abi>[^\s-]+?)-(?P<plat>[^\s-]+?)
|
| 24 |
+
\.whl|\.dist-info)$""",
|
| 25 |
+
re.VERBOSE,
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
def __init__(self, filename: str) -> None:
|
| 29 |
+
"""
|
| 30 |
+
:raises InvalidWheelFilename: when the filename is invalid for a wheel
|
| 31 |
+
"""
|
| 32 |
+
wheel_info = self.wheel_file_re.match(filename)
|
| 33 |
+
if not wheel_info:
|
| 34 |
+
raise InvalidWheelFilename(f"{filename} is not a valid wheel filename.")
|
| 35 |
+
self.filename = filename
|
| 36 |
+
self.name = wheel_info.group("name").replace("_", "-")
|
| 37 |
+
_version = wheel_info.group("ver")
|
| 38 |
+
if "_" in _version:
|
| 39 |
+
try:
|
| 40 |
+
parse_wheel_filename(filename)
|
| 41 |
+
except PackagingInvalidWheelName as e:
|
| 42 |
+
deprecated(
|
| 43 |
+
reason=(
|
| 44 |
+
f"Wheel filename {filename!r} is not correctly normalised. "
|
| 45 |
+
"Future versions of pip will raise the following error:\n"
|
| 46 |
+
f"{e.args[0]}\n\n"
|
| 47 |
+
),
|
| 48 |
+
replacement=(
|
| 49 |
+
"to rename the wheel to use a correctly normalised "
|
| 50 |
+
"name (this may require updating the version in "
|
| 51 |
+
"the project metadata)"
|
| 52 |
+
),
|
| 53 |
+
gone_in="25.1",
|
| 54 |
+
issue=12938,
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
_version = _version.replace("_", "-")
|
| 58 |
+
|
| 59 |
+
self.version = _version
|
| 60 |
+
self.build_tag = wheel_info.group("build")
|
| 61 |
+
self.pyversions = wheel_info.group("pyver").split(".")
|
| 62 |
+
self.abis = wheel_info.group("abi").split(".")
|
| 63 |
+
self.plats = wheel_info.group("plat").split(".")
|
| 64 |
+
|
| 65 |
+
# All the tag combinations from this file
|
| 66 |
+
self.file_tags = {
|
| 67 |
+
Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats
|
| 68 |
+
}
|
| 69 |
+
|
| 70 |
+
def get_formatted_file_tags(self) -> List[str]:
|
| 71 |
+
"""Return the wheel's tags as a sorted list of strings."""
|
| 72 |
+
return sorted(str(tag) for tag in self.file_tags)
|
| 73 |
+
|
| 74 |
+
def support_index_min(self, tags: List[Tag]) -> int:
|
| 75 |
+
"""Return the lowest index that one of the wheel's file_tag combinations
|
| 76 |
+
achieves in the given list of supported tags.
|
| 77 |
+
|
| 78 |
+
For example, if there are 8 supported tags and one of the file tags
|
| 79 |
+
is first in the list, then return 0.
|
| 80 |
+
|
| 81 |
+
:param tags: the PEP 425 tags to check the wheel against, in order
|
| 82 |
+
with most preferred first.
|
| 83 |
+
|
| 84 |
+
:raises ValueError: If none of the wheel's file tags match one of
|
| 85 |
+
the supported tags.
|
| 86 |
+
"""
|
| 87 |
+
try:
|
| 88 |
+
return next(i for i, t in enumerate(tags) if t in self.file_tags)
|
| 89 |
+
except StopIteration:
|
| 90 |
+
raise ValueError()
|
| 91 |
+
|
| 92 |
+
def find_most_preferred_tag(
|
| 93 |
+
self, tags: List[Tag], tag_to_priority: Dict[Tag, int]
|
| 94 |
+
) -> int:
|
| 95 |
+
"""Return the priority of the most preferred tag that one of the wheel's file
|
| 96 |
+
tag combinations achieves in the given list of supported tags using the given
|
| 97 |
+
tag_to_priority mapping, where lower priorities are more-preferred.
|
| 98 |
+
|
| 99 |
+
This is used in place of support_index_min in some cases in order to avoid
|
| 100 |
+
an expensive linear scan of a large list of tags.
|
| 101 |
+
|
| 102 |
+
:param tags: the PEP 425 tags to check the wheel against.
|
| 103 |
+
:param tag_to_priority: a mapping from tag to priority of that tag, where
|
| 104 |
+
lower is more preferred.
|
| 105 |
+
|
| 106 |
+
:raises ValueError: If none of the wheel's file tags match one of
|
| 107 |
+
the supported tags.
|
| 108 |
+
"""
|
| 109 |
+
return min(
|
| 110 |
+
tag_to_priority[tag] for tag in self.file_tags if tag in tag_to_priority
|
| 111 |
+
)
|
| 112 |
+
|
| 113 |
+
def supported(self, tags: Iterable[Tag]) -> bool:
|
| 114 |
+
"""Return whether the wheel is compatible with one of the given tags.
|
| 115 |
+
|
| 116 |
+
:param tags: the PEP 425 tags to check the wheel against.
|
| 117 |
+
"""
|
| 118 |
+
return not self.file_tags.isdisjoint(tags)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/__init__.py
ADDED
|
File without changes
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (183 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/build/__init__.py
ADDED
|
File without changes
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-310.pyc
ADDED
|
Binary file (1.21 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-310.pyc
ADDED
|
Binary file (1.44 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/build/build_tracker.py
ADDED
|
@@ -0,0 +1,138 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import hashlib
|
| 3 |
+
import logging
|
| 4 |
+
import os
|
| 5 |
+
from types import TracebackType
|
| 6 |
+
from typing import Dict, Generator, Optional, Type, Union
|
| 7 |
+
|
| 8 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 9 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
| 10 |
+
|
| 11 |
+
logger = logging.getLogger(__name__)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@contextlib.contextmanager
|
| 15 |
+
def update_env_context_manager(**changes: str) -> Generator[None, None, None]:
|
| 16 |
+
target = os.environ
|
| 17 |
+
|
| 18 |
+
# Save values from the target and change them.
|
| 19 |
+
non_existent_marker = object()
|
| 20 |
+
saved_values: Dict[str, Union[object, str]] = {}
|
| 21 |
+
for name, new_value in changes.items():
|
| 22 |
+
try:
|
| 23 |
+
saved_values[name] = target[name]
|
| 24 |
+
except KeyError:
|
| 25 |
+
saved_values[name] = non_existent_marker
|
| 26 |
+
target[name] = new_value
|
| 27 |
+
|
| 28 |
+
try:
|
| 29 |
+
yield
|
| 30 |
+
finally:
|
| 31 |
+
# Restore original values in the target.
|
| 32 |
+
for name, original_value in saved_values.items():
|
| 33 |
+
if original_value is non_existent_marker:
|
| 34 |
+
del target[name]
|
| 35 |
+
else:
|
| 36 |
+
assert isinstance(original_value, str) # for mypy
|
| 37 |
+
target[name] = original_value
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
@contextlib.contextmanager
|
| 41 |
+
def get_build_tracker() -> Generator["BuildTracker", None, None]:
|
| 42 |
+
root = os.environ.get("PIP_BUILD_TRACKER")
|
| 43 |
+
with contextlib.ExitStack() as ctx:
|
| 44 |
+
if root is None:
|
| 45 |
+
root = ctx.enter_context(TempDirectory(kind="build-tracker")).path
|
| 46 |
+
ctx.enter_context(update_env_context_manager(PIP_BUILD_TRACKER=root))
|
| 47 |
+
logger.debug("Initialized build tracking at %s", root)
|
| 48 |
+
|
| 49 |
+
with BuildTracker(root) as tracker:
|
| 50 |
+
yield tracker
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class TrackerId(str):
|
| 54 |
+
"""Uniquely identifying string provided to the build tracker."""
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
class BuildTracker:
|
| 58 |
+
"""Ensure that an sdist cannot request itself as a setup requirement.
|
| 59 |
+
|
| 60 |
+
When an sdist is prepared, it identifies its setup requirements in the
|
| 61 |
+
context of ``BuildTracker.track()``. If a requirement shows up recursively, this
|
| 62 |
+
raises an exception.
|
| 63 |
+
|
| 64 |
+
This stops fork bombs embedded in malicious packages."""
|
| 65 |
+
|
| 66 |
+
def __init__(self, root: str) -> None:
|
| 67 |
+
self._root = root
|
| 68 |
+
self._entries: Dict[TrackerId, InstallRequirement] = {}
|
| 69 |
+
logger.debug("Created build tracker: %s", self._root)
|
| 70 |
+
|
| 71 |
+
def __enter__(self) -> "BuildTracker":
|
| 72 |
+
logger.debug("Entered build tracker: %s", self._root)
|
| 73 |
+
return self
|
| 74 |
+
|
| 75 |
+
def __exit__(
|
| 76 |
+
self,
|
| 77 |
+
exc_type: Optional[Type[BaseException]],
|
| 78 |
+
exc_val: Optional[BaseException],
|
| 79 |
+
exc_tb: Optional[TracebackType],
|
| 80 |
+
) -> None:
|
| 81 |
+
self.cleanup()
|
| 82 |
+
|
| 83 |
+
def _entry_path(self, key: TrackerId) -> str:
|
| 84 |
+
hashed = hashlib.sha224(key.encode()).hexdigest()
|
| 85 |
+
return os.path.join(self._root, hashed)
|
| 86 |
+
|
| 87 |
+
def add(self, req: InstallRequirement, key: TrackerId) -> None:
|
| 88 |
+
"""Add an InstallRequirement to build tracking."""
|
| 89 |
+
|
| 90 |
+
# Get the file to write information about this requirement.
|
| 91 |
+
entry_path = self._entry_path(key)
|
| 92 |
+
|
| 93 |
+
# Try reading from the file. If it exists and can be read from, a build
|
| 94 |
+
# is already in progress, so a LookupError is raised.
|
| 95 |
+
try:
|
| 96 |
+
with open(entry_path) as fp:
|
| 97 |
+
contents = fp.read()
|
| 98 |
+
except FileNotFoundError:
|
| 99 |
+
pass
|
| 100 |
+
else:
|
| 101 |
+
message = f"{req.link} is already being built: {contents}"
|
| 102 |
+
raise LookupError(message)
|
| 103 |
+
|
| 104 |
+
# If we're here, req should really not be building already.
|
| 105 |
+
assert key not in self._entries
|
| 106 |
+
|
| 107 |
+
# Start tracking this requirement.
|
| 108 |
+
with open(entry_path, "w", encoding="utf-8") as fp:
|
| 109 |
+
fp.write(str(req))
|
| 110 |
+
self._entries[key] = req
|
| 111 |
+
|
| 112 |
+
logger.debug("Added %s to build tracker %r", req, self._root)
|
| 113 |
+
|
| 114 |
+
def remove(self, req: InstallRequirement, key: TrackerId) -> None:
|
| 115 |
+
"""Remove an InstallRequirement from build tracking."""
|
| 116 |
+
|
| 117 |
+
# Delete the created file and the corresponding entry.
|
| 118 |
+
os.unlink(self._entry_path(key))
|
| 119 |
+
del self._entries[key]
|
| 120 |
+
|
| 121 |
+
logger.debug("Removed %s from build tracker %r", req, self._root)
|
| 122 |
+
|
| 123 |
+
def cleanup(self) -> None:
|
| 124 |
+
for key, req in list(self._entries.items()):
|
| 125 |
+
self.remove(req, key)
|
| 126 |
+
|
| 127 |
+
logger.debug("Removed build tracker: %r", self._root)
|
| 128 |
+
|
| 129 |
+
@contextlib.contextmanager
|
| 130 |
+
def track(self, req: InstallRequirement, key: str) -> Generator[None, None, None]:
|
| 131 |
+
"""Ensure that `key` cannot install itself as a setup requirement.
|
| 132 |
+
|
| 133 |
+
:raises LookupError: If `key` was already provided in a parent invocation of
|
| 134 |
+
the context introduced by this method."""
|
| 135 |
+
tracker_id = TrackerId(key)
|
| 136 |
+
self.add(req, tracker_id)
|
| 137 |
+
yield
|
| 138 |
+
self.remove(req, tracker_id)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/build/metadata.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Metadata generation logic for source distributions.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import os
|
| 5 |
+
|
| 6 |
+
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
|
| 7 |
+
|
| 8 |
+
from pip._internal.build_env import BuildEnvironment
|
| 9 |
+
from pip._internal.exceptions import (
|
| 10 |
+
InstallationSubprocessError,
|
| 11 |
+
MetadataGenerationFailed,
|
| 12 |
+
)
|
| 13 |
+
from pip._internal.utils.subprocess import runner_with_spinner_message
|
| 14 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def generate_metadata(
|
| 18 |
+
build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str
|
| 19 |
+
) -> str:
|
| 20 |
+
"""Generate metadata using mechanisms described in PEP 517.
|
| 21 |
+
|
| 22 |
+
Returns the generated metadata directory.
|
| 23 |
+
"""
|
| 24 |
+
metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
|
| 25 |
+
|
| 26 |
+
metadata_dir = metadata_tmpdir.path
|
| 27 |
+
|
| 28 |
+
with build_env:
|
| 29 |
+
# Note that BuildBackendHookCaller implements a fallback for
|
| 30 |
+
# prepare_metadata_for_build_wheel, so we don't have to
|
| 31 |
+
# consider the possibility that this hook doesn't exist.
|
| 32 |
+
runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)")
|
| 33 |
+
with backend.subprocess_runner(runner):
|
| 34 |
+
try:
|
| 35 |
+
distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir)
|
| 36 |
+
except InstallationSubprocessError as error:
|
| 37 |
+
raise MetadataGenerationFailed(package_details=details) from error
|
| 38 |
+
|
| 39 |
+
return os.path.join(metadata_dir, distinfo_dir)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_editable.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Metadata generation logic for source distributions.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import os
|
| 5 |
+
|
| 6 |
+
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
|
| 7 |
+
|
| 8 |
+
from pip._internal.build_env import BuildEnvironment
|
| 9 |
+
from pip._internal.exceptions import (
|
| 10 |
+
InstallationSubprocessError,
|
| 11 |
+
MetadataGenerationFailed,
|
| 12 |
+
)
|
| 13 |
+
from pip._internal.utils.subprocess import runner_with_spinner_message
|
| 14 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def generate_editable_metadata(
|
| 18 |
+
build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str
|
| 19 |
+
) -> str:
|
| 20 |
+
"""Generate metadata using mechanisms described in PEP 660.
|
| 21 |
+
|
| 22 |
+
Returns the generated metadata directory.
|
| 23 |
+
"""
|
| 24 |
+
metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
|
| 25 |
+
|
| 26 |
+
metadata_dir = metadata_tmpdir.path
|
| 27 |
+
|
| 28 |
+
with build_env:
|
| 29 |
+
# Note that BuildBackendHookCaller implements a fallback for
|
| 30 |
+
# prepare_metadata_for_build_wheel/editable, so we don't have to
|
| 31 |
+
# consider the possibility that this hook doesn't exist.
|
| 32 |
+
runner = runner_with_spinner_message(
|
| 33 |
+
"Preparing editable metadata (pyproject.toml)"
|
| 34 |
+
)
|
| 35 |
+
with backend.subprocess_runner(runner):
|
| 36 |
+
try:
|
| 37 |
+
distinfo_dir = backend.prepare_metadata_for_build_editable(metadata_dir)
|
| 38 |
+
except InstallationSubprocessError as error:
|
| 39 |
+
raise MetadataGenerationFailed(package_details=details) from error
|
| 40 |
+
|
| 41 |
+
assert distinfo_dir is not None
|
| 42 |
+
return os.path.join(metadata_dir, distinfo_dir)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_legacy.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Metadata generation logic for legacy source distributions.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import logging
|
| 5 |
+
import os
|
| 6 |
+
|
| 7 |
+
from pip._internal.build_env import BuildEnvironment
|
| 8 |
+
from pip._internal.cli.spinners import open_spinner
|
| 9 |
+
from pip._internal.exceptions import (
|
| 10 |
+
InstallationError,
|
| 11 |
+
InstallationSubprocessError,
|
| 12 |
+
MetadataGenerationFailed,
|
| 13 |
+
)
|
| 14 |
+
from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
|
| 15 |
+
from pip._internal.utils.subprocess import call_subprocess
|
| 16 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
| 17 |
+
|
| 18 |
+
logger = logging.getLogger(__name__)
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def _find_egg_info(directory: str) -> str:
|
| 22 |
+
"""Find an .egg-info subdirectory in `directory`."""
|
| 23 |
+
filenames = [f for f in os.listdir(directory) if f.endswith(".egg-info")]
|
| 24 |
+
|
| 25 |
+
if not filenames:
|
| 26 |
+
raise InstallationError(f"No .egg-info directory found in {directory}")
|
| 27 |
+
|
| 28 |
+
if len(filenames) > 1:
|
| 29 |
+
raise InstallationError(
|
| 30 |
+
f"More than one .egg-info directory found in {directory}"
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
return os.path.join(directory, filenames[0])
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def generate_metadata(
|
| 37 |
+
build_env: BuildEnvironment,
|
| 38 |
+
setup_py_path: str,
|
| 39 |
+
source_dir: str,
|
| 40 |
+
isolated: bool,
|
| 41 |
+
details: str,
|
| 42 |
+
) -> str:
|
| 43 |
+
"""Generate metadata using setup.py-based defacto mechanisms.
|
| 44 |
+
|
| 45 |
+
Returns the generated metadata directory.
|
| 46 |
+
"""
|
| 47 |
+
logger.debug(
|
| 48 |
+
"Running setup.py (path:%s) egg_info for package %s",
|
| 49 |
+
setup_py_path,
|
| 50 |
+
details,
|
| 51 |
+
)
|
| 52 |
+
|
| 53 |
+
egg_info_dir = TempDirectory(kind="pip-egg-info", globally_managed=True).path
|
| 54 |
+
|
| 55 |
+
args = make_setuptools_egg_info_args(
|
| 56 |
+
setup_py_path,
|
| 57 |
+
egg_info_dir=egg_info_dir,
|
| 58 |
+
no_user_config=isolated,
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
with build_env:
|
| 62 |
+
with open_spinner("Preparing metadata (setup.py)") as spinner:
|
| 63 |
+
try:
|
| 64 |
+
call_subprocess(
|
| 65 |
+
args,
|
| 66 |
+
cwd=source_dir,
|
| 67 |
+
command_desc="python setup.py egg_info",
|
| 68 |
+
spinner=spinner,
|
| 69 |
+
)
|
| 70 |
+
except InstallationSubprocessError as error:
|
| 71 |
+
raise MetadataGenerationFailed(package_details=details) from error
|
| 72 |
+
|
| 73 |
+
# Return the .egg-info directory.
|
| 74 |
+
return _find_egg_info(egg_info_dir)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_legacy.py
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os.path
|
| 3 |
+
from typing import List, Optional
|
| 4 |
+
|
| 5 |
+
from pip._internal.cli.spinners import open_spinner
|
| 6 |
+
from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args
|
| 7 |
+
from pip._internal.utils.subprocess import call_subprocess, format_command_args
|
| 8 |
+
|
| 9 |
+
logger = logging.getLogger(__name__)
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def format_command_result(
|
| 13 |
+
command_args: List[str],
|
| 14 |
+
command_output: str,
|
| 15 |
+
) -> str:
|
| 16 |
+
"""Format command information for logging."""
|
| 17 |
+
command_desc = format_command_args(command_args)
|
| 18 |
+
text = f"Command arguments: {command_desc}\n"
|
| 19 |
+
|
| 20 |
+
if not command_output:
|
| 21 |
+
text += "Command output: None"
|
| 22 |
+
elif logger.getEffectiveLevel() > logging.DEBUG:
|
| 23 |
+
text += "Command output: [use --verbose to show]"
|
| 24 |
+
else:
|
| 25 |
+
if not command_output.endswith("\n"):
|
| 26 |
+
command_output += "\n"
|
| 27 |
+
text += f"Command output:\n{command_output}"
|
| 28 |
+
|
| 29 |
+
return text
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def get_legacy_build_wheel_path(
|
| 33 |
+
names: List[str],
|
| 34 |
+
temp_dir: str,
|
| 35 |
+
name: str,
|
| 36 |
+
command_args: List[str],
|
| 37 |
+
command_output: str,
|
| 38 |
+
) -> Optional[str]:
|
| 39 |
+
"""Return the path to the wheel in the temporary build directory."""
|
| 40 |
+
# Sort for determinism.
|
| 41 |
+
names = sorted(names)
|
| 42 |
+
if not names:
|
| 43 |
+
msg = f"Legacy build of wheel for {name!r} created no files.\n"
|
| 44 |
+
msg += format_command_result(command_args, command_output)
|
| 45 |
+
logger.warning(msg)
|
| 46 |
+
return None
|
| 47 |
+
|
| 48 |
+
if len(names) > 1:
|
| 49 |
+
msg = (
|
| 50 |
+
f"Legacy build of wheel for {name!r} created more than one file.\n"
|
| 51 |
+
f"Filenames (choosing first): {names}\n"
|
| 52 |
+
)
|
| 53 |
+
msg += format_command_result(command_args, command_output)
|
| 54 |
+
logger.warning(msg)
|
| 55 |
+
|
| 56 |
+
return os.path.join(temp_dir, names[0])
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def build_wheel_legacy(
|
| 60 |
+
name: str,
|
| 61 |
+
setup_py_path: str,
|
| 62 |
+
source_dir: str,
|
| 63 |
+
global_options: List[str],
|
| 64 |
+
build_options: List[str],
|
| 65 |
+
tempd: str,
|
| 66 |
+
) -> Optional[str]:
|
| 67 |
+
"""Build one unpacked package using the "legacy" build process.
|
| 68 |
+
|
| 69 |
+
Returns path to wheel if successfully built. Otherwise, returns None.
|
| 70 |
+
"""
|
| 71 |
+
wheel_args = make_setuptools_bdist_wheel_args(
|
| 72 |
+
setup_py_path,
|
| 73 |
+
global_options=global_options,
|
| 74 |
+
build_options=build_options,
|
| 75 |
+
destination_dir=tempd,
|
| 76 |
+
)
|
| 77 |
+
|
| 78 |
+
spin_message = f"Building wheel for {name} (setup.py)"
|
| 79 |
+
with open_spinner(spin_message) as spinner:
|
| 80 |
+
logger.debug("Destination directory: %s", tempd)
|
| 81 |
+
|
| 82 |
+
try:
|
| 83 |
+
output = call_subprocess(
|
| 84 |
+
wheel_args,
|
| 85 |
+
command_desc="python setup.py bdist_wheel",
|
| 86 |
+
cwd=source_dir,
|
| 87 |
+
spinner=spinner,
|
| 88 |
+
)
|
| 89 |
+
except Exception:
|
| 90 |
+
spinner.finish("error")
|
| 91 |
+
logger.error("Failed building wheel for %s", name)
|
| 92 |
+
return None
|
| 93 |
+
|
| 94 |
+
names = os.listdir(tempd)
|
| 95 |
+
wheel_path = get_legacy_build_wheel_path(
|
| 96 |
+
names=names,
|
| 97 |
+
temp_dir=tempd,
|
| 98 |
+
name=name,
|
| 99 |
+
command_args=wheel_args,
|
| 100 |
+
command_output=output,
|
| 101 |
+
)
|
| 102 |
+
return wheel_path
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/check.py
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Validation of dependencies of packages
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import logging
|
| 5 |
+
from contextlib import suppress
|
| 6 |
+
from email.parser import Parser
|
| 7 |
+
from functools import reduce
|
| 8 |
+
from typing import (
|
| 9 |
+
Callable,
|
| 10 |
+
Dict,
|
| 11 |
+
FrozenSet,
|
| 12 |
+
Generator,
|
| 13 |
+
Iterable,
|
| 14 |
+
List,
|
| 15 |
+
NamedTuple,
|
| 16 |
+
Optional,
|
| 17 |
+
Set,
|
| 18 |
+
Tuple,
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
from pip._vendor.packaging.requirements import Requirement
|
| 22 |
+
from pip._vendor.packaging.tags import Tag, parse_tag
|
| 23 |
+
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
| 24 |
+
from pip._vendor.packaging.version import Version
|
| 25 |
+
|
| 26 |
+
from pip._internal.distributions import make_distribution_for_install_requirement
|
| 27 |
+
from pip._internal.metadata import get_default_environment
|
| 28 |
+
from pip._internal.metadata.base import BaseDistribution
|
| 29 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 30 |
+
|
| 31 |
+
logger = logging.getLogger(__name__)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class PackageDetails(NamedTuple):
|
| 35 |
+
version: Version
|
| 36 |
+
dependencies: List[Requirement]
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
# Shorthands
|
| 40 |
+
PackageSet = Dict[NormalizedName, PackageDetails]
|
| 41 |
+
Missing = Tuple[NormalizedName, Requirement]
|
| 42 |
+
Conflicting = Tuple[NormalizedName, Version, Requirement]
|
| 43 |
+
|
| 44 |
+
MissingDict = Dict[NormalizedName, List[Missing]]
|
| 45 |
+
ConflictingDict = Dict[NormalizedName, List[Conflicting]]
|
| 46 |
+
CheckResult = Tuple[MissingDict, ConflictingDict]
|
| 47 |
+
ConflictDetails = Tuple[PackageSet, CheckResult]
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def create_package_set_from_installed() -> Tuple[PackageSet, bool]:
|
| 51 |
+
"""Converts a list of distributions into a PackageSet."""
|
| 52 |
+
package_set = {}
|
| 53 |
+
problems = False
|
| 54 |
+
env = get_default_environment()
|
| 55 |
+
for dist in env.iter_installed_distributions(local_only=False, skip=()):
|
| 56 |
+
name = dist.canonical_name
|
| 57 |
+
try:
|
| 58 |
+
dependencies = list(dist.iter_dependencies())
|
| 59 |
+
package_set[name] = PackageDetails(dist.version, dependencies)
|
| 60 |
+
except (OSError, ValueError) as e:
|
| 61 |
+
# Don't crash on unreadable or broken metadata.
|
| 62 |
+
logger.warning("Error parsing dependencies of %s: %s", name, e)
|
| 63 |
+
problems = True
|
| 64 |
+
return package_set, problems
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def check_package_set(
|
| 68 |
+
package_set: PackageSet, should_ignore: Optional[Callable[[str], bool]] = None
|
| 69 |
+
) -> CheckResult:
|
| 70 |
+
"""Check if a package set is consistent
|
| 71 |
+
|
| 72 |
+
If should_ignore is passed, it should be a callable that takes a
|
| 73 |
+
package name and returns a boolean.
|
| 74 |
+
"""
|
| 75 |
+
|
| 76 |
+
missing = {}
|
| 77 |
+
conflicting = {}
|
| 78 |
+
|
| 79 |
+
for package_name, package_detail in package_set.items():
|
| 80 |
+
# Info about dependencies of package_name
|
| 81 |
+
missing_deps: Set[Missing] = set()
|
| 82 |
+
conflicting_deps: Set[Conflicting] = set()
|
| 83 |
+
|
| 84 |
+
if should_ignore and should_ignore(package_name):
|
| 85 |
+
continue
|
| 86 |
+
|
| 87 |
+
for req in package_detail.dependencies:
|
| 88 |
+
name = canonicalize_name(req.name)
|
| 89 |
+
|
| 90 |
+
# Check if it's missing
|
| 91 |
+
if name not in package_set:
|
| 92 |
+
missed = True
|
| 93 |
+
if req.marker is not None:
|
| 94 |
+
missed = req.marker.evaluate({"extra": ""})
|
| 95 |
+
if missed:
|
| 96 |
+
missing_deps.add((name, req))
|
| 97 |
+
continue
|
| 98 |
+
|
| 99 |
+
# Check if there's a conflict
|
| 100 |
+
version = package_set[name].version
|
| 101 |
+
if not req.specifier.contains(version, prereleases=True):
|
| 102 |
+
conflicting_deps.add((name, version, req))
|
| 103 |
+
|
| 104 |
+
if missing_deps:
|
| 105 |
+
missing[package_name] = sorted(missing_deps, key=str)
|
| 106 |
+
if conflicting_deps:
|
| 107 |
+
conflicting[package_name] = sorted(conflicting_deps, key=str)
|
| 108 |
+
|
| 109 |
+
return missing, conflicting
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
def check_install_conflicts(to_install: List[InstallRequirement]) -> ConflictDetails:
|
| 113 |
+
"""For checking if the dependency graph would be consistent after \
|
| 114 |
+
installing given requirements
|
| 115 |
+
"""
|
| 116 |
+
# Start from the current state
|
| 117 |
+
package_set, _ = create_package_set_from_installed()
|
| 118 |
+
# Install packages
|
| 119 |
+
would_be_installed = _simulate_installation_of(to_install, package_set)
|
| 120 |
+
|
| 121 |
+
# Only warn about directly-dependent packages; create a whitelist of them
|
| 122 |
+
whitelist = _create_whitelist(would_be_installed, package_set)
|
| 123 |
+
|
| 124 |
+
return (
|
| 125 |
+
package_set,
|
| 126 |
+
check_package_set(
|
| 127 |
+
package_set, should_ignore=lambda name: name not in whitelist
|
| 128 |
+
),
|
| 129 |
+
)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
def check_unsupported(
|
| 133 |
+
packages: Iterable[BaseDistribution],
|
| 134 |
+
supported_tags: Iterable[Tag],
|
| 135 |
+
) -> Generator[BaseDistribution, None, None]:
|
| 136 |
+
for p in packages:
|
| 137 |
+
with suppress(FileNotFoundError):
|
| 138 |
+
wheel_file = p.read_text("WHEEL")
|
| 139 |
+
wheel_tags: FrozenSet[Tag] = reduce(
|
| 140 |
+
frozenset.union,
|
| 141 |
+
map(parse_tag, Parser().parsestr(wheel_file).get_all("Tag", [])),
|
| 142 |
+
frozenset(),
|
| 143 |
+
)
|
| 144 |
+
if wheel_tags.isdisjoint(supported_tags):
|
| 145 |
+
yield p
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def _simulate_installation_of(
|
| 149 |
+
to_install: List[InstallRequirement], package_set: PackageSet
|
| 150 |
+
) -> Set[NormalizedName]:
|
| 151 |
+
"""Computes the version of packages after installing to_install."""
|
| 152 |
+
# Keep track of packages that were installed
|
| 153 |
+
installed = set()
|
| 154 |
+
|
| 155 |
+
# Modify it as installing requirement_set would (assuming no errors)
|
| 156 |
+
for inst_req in to_install:
|
| 157 |
+
abstract_dist = make_distribution_for_install_requirement(inst_req)
|
| 158 |
+
dist = abstract_dist.get_metadata_distribution()
|
| 159 |
+
name = dist.canonical_name
|
| 160 |
+
package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies()))
|
| 161 |
+
|
| 162 |
+
installed.add(name)
|
| 163 |
+
|
| 164 |
+
return installed
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
def _create_whitelist(
|
| 168 |
+
would_be_installed: Set[NormalizedName], package_set: PackageSet
|
| 169 |
+
) -> Set[NormalizedName]:
|
| 170 |
+
packages_affected = set(would_be_installed)
|
| 171 |
+
|
| 172 |
+
for package_name in package_set:
|
| 173 |
+
if package_name in packages_affected:
|
| 174 |
+
continue
|
| 175 |
+
|
| 176 |
+
for req in package_set[package_name].dependencies:
|
| 177 |
+
if canonicalize_name(req.name) in packages_affected:
|
| 178 |
+
packages_affected.add(package_name)
|
| 179 |
+
break
|
| 180 |
+
|
| 181 |
+
return packages_affected
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/freeze.py
ADDED
|
@@ -0,0 +1,256 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
import logging
|
| 3 |
+
import os
|
| 4 |
+
from dataclasses import dataclass, field
|
| 5 |
+
from typing import Container, Dict, Generator, Iterable, List, NamedTuple, Optional, Set
|
| 6 |
+
|
| 7 |
+
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
| 8 |
+
from pip._vendor.packaging.version import InvalidVersion
|
| 9 |
+
|
| 10 |
+
from pip._internal.exceptions import BadCommand, InstallationError
|
| 11 |
+
from pip._internal.metadata import BaseDistribution, get_environment
|
| 12 |
+
from pip._internal.req.constructors import (
|
| 13 |
+
install_req_from_editable,
|
| 14 |
+
install_req_from_line,
|
| 15 |
+
)
|
| 16 |
+
from pip._internal.req.req_file import COMMENT_RE
|
| 17 |
+
from pip._internal.utils.direct_url_helpers import direct_url_as_pep440_direct_reference
|
| 18 |
+
|
| 19 |
+
logger = logging.getLogger(__name__)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class _EditableInfo(NamedTuple):
|
| 23 |
+
requirement: str
|
| 24 |
+
comments: List[str]
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def freeze(
|
| 28 |
+
requirement: Optional[List[str]] = None,
|
| 29 |
+
local_only: bool = False,
|
| 30 |
+
user_only: bool = False,
|
| 31 |
+
paths: Optional[List[str]] = None,
|
| 32 |
+
isolated: bool = False,
|
| 33 |
+
exclude_editable: bool = False,
|
| 34 |
+
skip: Container[str] = (),
|
| 35 |
+
) -> Generator[str, None, None]:
|
| 36 |
+
installations: Dict[str, FrozenRequirement] = {}
|
| 37 |
+
|
| 38 |
+
dists = get_environment(paths).iter_installed_distributions(
|
| 39 |
+
local_only=local_only,
|
| 40 |
+
skip=(),
|
| 41 |
+
user_only=user_only,
|
| 42 |
+
)
|
| 43 |
+
for dist in dists:
|
| 44 |
+
req = FrozenRequirement.from_dist(dist)
|
| 45 |
+
if exclude_editable and req.editable:
|
| 46 |
+
continue
|
| 47 |
+
installations[req.canonical_name] = req
|
| 48 |
+
|
| 49 |
+
if requirement:
|
| 50 |
+
# the options that don't get turned into an InstallRequirement
|
| 51 |
+
# should only be emitted once, even if the same option is in multiple
|
| 52 |
+
# requirements files, so we need to keep track of what has been emitted
|
| 53 |
+
# so that we don't emit it again if it's seen again
|
| 54 |
+
emitted_options: Set[str] = set()
|
| 55 |
+
# keep track of which files a requirement is in so that we can
|
| 56 |
+
# give an accurate warning if a requirement appears multiple times.
|
| 57 |
+
req_files: Dict[str, List[str]] = collections.defaultdict(list)
|
| 58 |
+
for req_file_path in requirement:
|
| 59 |
+
with open(req_file_path) as req_file:
|
| 60 |
+
for line in req_file:
|
| 61 |
+
if (
|
| 62 |
+
not line.strip()
|
| 63 |
+
or line.strip().startswith("#")
|
| 64 |
+
or line.startswith(
|
| 65 |
+
(
|
| 66 |
+
"-r",
|
| 67 |
+
"--requirement",
|
| 68 |
+
"-f",
|
| 69 |
+
"--find-links",
|
| 70 |
+
"-i",
|
| 71 |
+
"--index-url",
|
| 72 |
+
"--pre",
|
| 73 |
+
"--trusted-host",
|
| 74 |
+
"--process-dependency-links",
|
| 75 |
+
"--extra-index-url",
|
| 76 |
+
"--use-feature",
|
| 77 |
+
)
|
| 78 |
+
)
|
| 79 |
+
):
|
| 80 |
+
line = line.rstrip()
|
| 81 |
+
if line not in emitted_options:
|
| 82 |
+
emitted_options.add(line)
|
| 83 |
+
yield line
|
| 84 |
+
continue
|
| 85 |
+
|
| 86 |
+
if line.startswith("-e") or line.startswith("--editable"):
|
| 87 |
+
if line.startswith("-e"):
|
| 88 |
+
line = line[2:].strip()
|
| 89 |
+
else:
|
| 90 |
+
line = line[len("--editable") :].strip().lstrip("=")
|
| 91 |
+
line_req = install_req_from_editable(
|
| 92 |
+
line,
|
| 93 |
+
isolated=isolated,
|
| 94 |
+
)
|
| 95 |
+
else:
|
| 96 |
+
line_req = install_req_from_line(
|
| 97 |
+
COMMENT_RE.sub("", line).strip(),
|
| 98 |
+
isolated=isolated,
|
| 99 |
+
)
|
| 100 |
+
|
| 101 |
+
if not line_req.name:
|
| 102 |
+
logger.info(
|
| 103 |
+
"Skipping line in requirement file [%s] because "
|
| 104 |
+
"it's not clear what it would install: %s",
|
| 105 |
+
req_file_path,
|
| 106 |
+
line.strip(),
|
| 107 |
+
)
|
| 108 |
+
logger.info(
|
| 109 |
+
" (add #egg=PackageName to the URL to avoid"
|
| 110 |
+
" this warning)"
|
| 111 |
+
)
|
| 112 |
+
else:
|
| 113 |
+
line_req_canonical_name = canonicalize_name(line_req.name)
|
| 114 |
+
if line_req_canonical_name not in installations:
|
| 115 |
+
# either it's not installed, or it is installed
|
| 116 |
+
# but has been processed already
|
| 117 |
+
if not req_files[line_req.name]:
|
| 118 |
+
logger.warning(
|
| 119 |
+
"Requirement file [%s] contains %s, but "
|
| 120 |
+
"package %r is not installed",
|
| 121 |
+
req_file_path,
|
| 122 |
+
COMMENT_RE.sub("", line).strip(),
|
| 123 |
+
line_req.name,
|
| 124 |
+
)
|
| 125 |
+
else:
|
| 126 |
+
req_files[line_req.name].append(req_file_path)
|
| 127 |
+
else:
|
| 128 |
+
yield str(installations[line_req_canonical_name]).rstrip()
|
| 129 |
+
del installations[line_req_canonical_name]
|
| 130 |
+
req_files[line_req.name].append(req_file_path)
|
| 131 |
+
|
| 132 |
+
# Warn about requirements that were included multiple times (in a
|
| 133 |
+
# single requirements file or in different requirements files).
|
| 134 |
+
for name, files in req_files.items():
|
| 135 |
+
if len(files) > 1:
|
| 136 |
+
logger.warning(
|
| 137 |
+
"Requirement %s included multiple times [%s]",
|
| 138 |
+
name,
|
| 139 |
+
", ".join(sorted(set(files))),
|
| 140 |
+
)
|
| 141 |
+
|
| 142 |
+
yield ("## The following requirements were added by pip freeze:")
|
| 143 |
+
for installation in sorted(installations.values(), key=lambda x: x.name.lower()):
|
| 144 |
+
if installation.canonical_name not in skip:
|
| 145 |
+
yield str(installation).rstrip()
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def _format_as_name_version(dist: BaseDistribution) -> str:
|
| 149 |
+
try:
|
| 150 |
+
dist_version = dist.version
|
| 151 |
+
except InvalidVersion:
|
| 152 |
+
# legacy version
|
| 153 |
+
return f"{dist.raw_name}==={dist.raw_version}"
|
| 154 |
+
else:
|
| 155 |
+
return f"{dist.raw_name}=={dist_version}"
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:
|
| 159 |
+
"""
|
| 160 |
+
Compute and return values (req, comments) for use in
|
| 161 |
+
FrozenRequirement.from_dist().
|
| 162 |
+
"""
|
| 163 |
+
editable_project_location = dist.editable_project_location
|
| 164 |
+
assert editable_project_location
|
| 165 |
+
location = os.path.normcase(os.path.abspath(editable_project_location))
|
| 166 |
+
|
| 167 |
+
from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs
|
| 168 |
+
|
| 169 |
+
vcs_backend = vcs.get_backend_for_dir(location)
|
| 170 |
+
|
| 171 |
+
if vcs_backend is None:
|
| 172 |
+
display = _format_as_name_version(dist)
|
| 173 |
+
logger.debug(
|
| 174 |
+
'No VCS found for editable requirement "%s" in: %r',
|
| 175 |
+
display,
|
| 176 |
+
location,
|
| 177 |
+
)
|
| 178 |
+
return _EditableInfo(
|
| 179 |
+
requirement=location,
|
| 180 |
+
comments=[f"# Editable install with no version control ({display})"],
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
vcs_name = type(vcs_backend).__name__
|
| 184 |
+
|
| 185 |
+
try:
|
| 186 |
+
req = vcs_backend.get_src_requirement(location, dist.raw_name)
|
| 187 |
+
except RemoteNotFoundError:
|
| 188 |
+
display = _format_as_name_version(dist)
|
| 189 |
+
return _EditableInfo(
|
| 190 |
+
requirement=location,
|
| 191 |
+
comments=[f"# Editable {vcs_name} install with no remote ({display})"],
|
| 192 |
+
)
|
| 193 |
+
except RemoteNotValidError as ex:
|
| 194 |
+
display = _format_as_name_version(dist)
|
| 195 |
+
return _EditableInfo(
|
| 196 |
+
requirement=location,
|
| 197 |
+
comments=[
|
| 198 |
+
f"# Editable {vcs_name} install ({display}) with either a deleted "
|
| 199 |
+
f"local remote or invalid URI:",
|
| 200 |
+
f"# '{ex.url}'",
|
| 201 |
+
],
|
| 202 |
+
)
|
| 203 |
+
except BadCommand:
|
| 204 |
+
logger.warning(
|
| 205 |
+
"cannot determine version of editable source in %s "
|
| 206 |
+
"(%s command not found in path)",
|
| 207 |
+
location,
|
| 208 |
+
vcs_backend.name,
|
| 209 |
+
)
|
| 210 |
+
return _EditableInfo(requirement=location, comments=[])
|
| 211 |
+
except InstallationError as exc:
|
| 212 |
+
logger.warning("Error when trying to get requirement for VCS system %s", exc)
|
| 213 |
+
else:
|
| 214 |
+
return _EditableInfo(requirement=req, comments=[])
|
| 215 |
+
|
| 216 |
+
logger.warning("Could not determine repository location of %s", location)
|
| 217 |
+
|
| 218 |
+
return _EditableInfo(
|
| 219 |
+
requirement=location,
|
| 220 |
+
comments=["## !! Could not determine repository location"],
|
| 221 |
+
)
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
@dataclass(frozen=True)
|
| 225 |
+
class FrozenRequirement:
|
| 226 |
+
name: str
|
| 227 |
+
req: str
|
| 228 |
+
editable: bool
|
| 229 |
+
comments: Iterable[str] = field(default_factory=tuple)
|
| 230 |
+
|
| 231 |
+
@property
|
| 232 |
+
def canonical_name(self) -> NormalizedName:
|
| 233 |
+
return canonicalize_name(self.name)
|
| 234 |
+
|
| 235 |
+
@classmethod
|
| 236 |
+
def from_dist(cls, dist: BaseDistribution) -> "FrozenRequirement":
|
| 237 |
+
editable = dist.editable
|
| 238 |
+
if editable:
|
| 239 |
+
req, comments = _get_editable_info(dist)
|
| 240 |
+
else:
|
| 241 |
+
comments = []
|
| 242 |
+
direct_url = dist.direct_url
|
| 243 |
+
if direct_url:
|
| 244 |
+
# if PEP 610 metadata is present, use it
|
| 245 |
+
req = direct_url_as_pep440_direct_reference(direct_url, dist.raw_name)
|
| 246 |
+
else:
|
| 247 |
+
# name==version requirement
|
| 248 |
+
req = _format_as_name_version(dist)
|
| 249 |
+
|
| 250 |
+
return cls(dist.raw_name, req, editable, comments=comments)
|
| 251 |
+
|
| 252 |
+
def __str__(self) -> str:
|
| 253 |
+
req = self.req
|
| 254 |
+
if self.editable:
|
| 255 |
+
req = f"-e {req}"
|
| 256 |
+
return "\n".join(list(self.comments) + [str(req)]) + "\n"
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/install/__init__.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""For modules related to installing packages.
|
| 2 |
+
"""
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (247 Bytes). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-310.pyc
ADDED
|
Binary file (1.48 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-310.pyc
ADDED
|
Binary file (21.5 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/install/editable_legacy.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Legacy editable installation process, i.e. `setup.py develop`.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import logging
|
| 5 |
+
from typing import Optional, Sequence
|
| 6 |
+
|
| 7 |
+
from pip._internal.build_env import BuildEnvironment
|
| 8 |
+
from pip._internal.utils.logging import indent_log
|
| 9 |
+
from pip._internal.utils.setuptools_build import make_setuptools_develop_args
|
| 10 |
+
from pip._internal.utils.subprocess import call_subprocess
|
| 11 |
+
|
| 12 |
+
logger = logging.getLogger(__name__)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def install_editable(
|
| 16 |
+
*,
|
| 17 |
+
global_options: Sequence[str],
|
| 18 |
+
prefix: Optional[str],
|
| 19 |
+
home: Optional[str],
|
| 20 |
+
use_user_site: bool,
|
| 21 |
+
name: str,
|
| 22 |
+
setup_py_path: str,
|
| 23 |
+
isolated: bool,
|
| 24 |
+
build_env: BuildEnvironment,
|
| 25 |
+
unpacked_source_directory: str,
|
| 26 |
+
) -> None:
|
| 27 |
+
"""Install a package in editable mode. Most arguments are pass-through
|
| 28 |
+
to setuptools.
|
| 29 |
+
"""
|
| 30 |
+
logger.info("Running setup.py develop for %s", name)
|
| 31 |
+
|
| 32 |
+
args = make_setuptools_develop_args(
|
| 33 |
+
setup_py_path,
|
| 34 |
+
global_options=global_options,
|
| 35 |
+
no_user_config=isolated,
|
| 36 |
+
prefix=prefix,
|
| 37 |
+
home=home,
|
| 38 |
+
use_user_site=use_user_site,
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
with indent_log():
|
| 42 |
+
with build_env:
|
| 43 |
+
call_subprocess(
|
| 44 |
+
args,
|
| 45 |
+
command_desc="python setup.py develop",
|
| 46 |
+
cwd=unpacked_source_directory,
|
| 47 |
+
)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/install/wheel.py
ADDED
|
@@ -0,0 +1,741 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Support for installing and building the "wheel" binary package format.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import collections
|
| 5 |
+
import compileall
|
| 6 |
+
import contextlib
|
| 7 |
+
import csv
|
| 8 |
+
import importlib
|
| 9 |
+
import logging
|
| 10 |
+
import os.path
|
| 11 |
+
import re
|
| 12 |
+
import shutil
|
| 13 |
+
import sys
|
| 14 |
+
import warnings
|
| 15 |
+
from base64 import urlsafe_b64encode
|
| 16 |
+
from email.message import Message
|
| 17 |
+
from itertools import chain, filterfalse, starmap
|
| 18 |
+
from typing import (
|
| 19 |
+
IO,
|
| 20 |
+
TYPE_CHECKING,
|
| 21 |
+
Any,
|
| 22 |
+
BinaryIO,
|
| 23 |
+
Callable,
|
| 24 |
+
Dict,
|
| 25 |
+
Generator,
|
| 26 |
+
Iterable,
|
| 27 |
+
Iterator,
|
| 28 |
+
List,
|
| 29 |
+
NewType,
|
| 30 |
+
Optional,
|
| 31 |
+
Protocol,
|
| 32 |
+
Sequence,
|
| 33 |
+
Set,
|
| 34 |
+
Tuple,
|
| 35 |
+
Union,
|
| 36 |
+
cast,
|
| 37 |
+
)
|
| 38 |
+
from zipfile import ZipFile, ZipInfo
|
| 39 |
+
|
| 40 |
+
from pip._vendor.distlib.scripts import ScriptMaker
|
| 41 |
+
from pip._vendor.distlib.util import get_export_entry
|
| 42 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 43 |
+
|
| 44 |
+
from pip._internal.exceptions import InstallationError
|
| 45 |
+
from pip._internal.locations import get_major_minor_version
|
| 46 |
+
from pip._internal.metadata import (
|
| 47 |
+
BaseDistribution,
|
| 48 |
+
FilesystemWheel,
|
| 49 |
+
get_wheel_distribution,
|
| 50 |
+
)
|
| 51 |
+
from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl
|
| 52 |
+
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
| 53 |
+
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
|
| 54 |
+
from pip._internal.utils.misc import StreamWrapper, ensure_dir, hash_file, partition
|
| 55 |
+
from pip._internal.utils.unpacking import (
|
| 56 |
+
current_umask,
|
| 57 |
+
is_within_directory,
|
| 58 |
+
set_extracted_file_to_default_mode_plus_executable,
|
| 59 |
+
zip_item_is_executable,
|
| 60 |
+
)
|
| 61 |
+
from pip._internal.utils.wheel import parse_wheel
|
| 62 |
+
|
| 63 |
+
if TYPE_CHECKING:
|
| 64 |
+
|
| 65 |
+
class File(Protocol):
|
| 66 |
+
src_record_path: "RecordPath"
|
| 67 |
+
dest_path: str
|
| 68 |
+
changed: bool
|
| 69 |
+
|
| 70 |
+
def save(self) -> None:
|
| 71 |
+
pass
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
logger = logging.getLogger(__name__)
|
| 75 |
+
|
| 76 |
+
RecordPath = NewType("RecordPath", str)
|
| 77 |
+
InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]]
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def rehash(path: str, blocksize: int = 1 << 20) -> Tuple[str, str]:
|
| 81 |
+
"""Return (encoded_digest, length) for path using hashlib.sha256()"""
|
| 82 |
+
h, length = hash_file(path, blocksize)
|
| 83 |
+
digest = "sha256=" + urlsafe_b64encode(h.digest()).decode("latin1").rstrip("=")
|
| 84 |
+
return (digest, str(length))
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def csv_io_kwargs(mode: str) -> Dict[str, Any]:
|
| 88 |
+
"""Return keyword arguments to properly open a CSV file
|
| 89 |
+
in the given mode.
|
| 90 |
+
"""
|
| 91 |
+
return {"mode": mode, "newline": "", "encoding": "utf-8"}
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def fix_script(path: str) -> bool:
|
| 95 |
+
"""Replace #!python with #!/path/to/python
|
| 96 |
+
Return True if file was changed.
|
| 97 |
+
"""
|
| 98 |
+
# XXX RECORD hashes will need to be updated
|
| 99 |
+
assert os.path.isfile(path)
|
| 100 |
+
|
| 101 |
+
with open(path, "rb") as script:
|
| 102 |
+
firstline = script.readline()
|
| 103 |
+
if not firstline.startswith(b"#!python"):
|
| 104 |
+
return False
|
| 105 |
+
exename = sys.executable.encode(sys.getfilesystemencoding())
|
| 106 |
+
firstline = b"#!" + exename + os.linesep.encode("ascii")
|
| 107 |
+
rest = script.read()
|
| 108 |
+
with open(path, "wb") as script:
|
| 109 |
+
script.write(firstline)
|
| 110 |
+
script.write(rest)
|
| 111 |
+
return True
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def wheel_root_is_purelib(metadata: Message) -> bool:
|
| 115 |
+
return metadata.get("Root-Is-Purelib", "").lower() == "true"
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
def get_entrypoints(dist: BaseDistribution) -> Tuple[Dict[str, str], Dict[str, str]]:
|
| 119 |
+
console_scripts = {}
|
| 120 |
+
gui_scripts = {}
|
| 121 |
+
for entry_point in dist.iter_entry_points():
|
| 122 |
+
if entry_point.group == "console_scripts":
|
| 123 |
+
console_scripts[entry_point.name] = entry_point.value
|
| 124 |
+
elif entry_point.group == "gui_scripts":
|
| 125 |
+
gui_scripts[entry_point.name] = entry_point.value
|
| 126 |
+
return console_scripts, gui_scripts
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]:
|
| 130 |
+
"""Determine if any scripts are not on PATH and format a warning.
|
| 131 |
+
Returns a warning message if one or more scripts are not on PATH,
|
| 132 |
+
otherwise None.
|
| 133 |
+
"""
|
| 134 |
+
if not scripts:
|
| 135 |
+
return None
|
| 136 |
+
|
| 137 |
+
# Group scripts by the path they were installed in
|
| 138 |
+
grouped_by_dir: Dict[str, Set[str]] = collections.defaultdict(set)
|
| 139 |
+
for destfile in scripts:
|
| 140 |
+
parent_dir = os.path.dirname(destfile)
|
| 141 |
+
script_name = os.path.basename(destfile)
|
| 142 |
+
grouped_by_dir[parent_dir].add(script_name)
|
| 143 |
+
|
| 144 |
+
# We don't want to warn for directories that are on PATH.
|
| 145 |
+
not_warn_dirs = [
|
| 146 |
+
os.path.normcase(os.path.normpath(i)).rstrip(os.sep)
|
| 147 |
+
for i in os.environ.get("PATH", "").split(os.pathsep)
|
| 148 |
+
]
|
| 149 |
+
# If an executable sits with sys.executable, we don't warn for it.
|
| 150 |
+
# This covers the case of venv invocations without activating the venv.
|
| 151 |
+
not_warn_dirs.append(
|
| 152 |
+
os.path.normcase(os.path.normpath(os.path.dirname(sys.executable)))
|
| 153 |
+
)
|
| 154 |
+
warn_for: Dict[str, Set[str]] = {
|
| 155 |
+
parent_dir: scripts
|
| 156 |
+
for parent_dir, scripts in grouped_by_dir.items()
|
| 157 |
+
if os.path.normcase(os.path.normpath(parent_dir)) not in not_warn_dirs
|
| 158 |
+
}
|
| 159 |
+
if not warn_for:
|
| 160 |
+
return None
|
| 161 |
+
|
| 162 |
+
# Format a message
|
| 163 |
+
msg_lines = []
|
| 164 |
+
for parent_dir, dir_scripts in warn_for.items():
|
| 165 |
+
sorted_scripts: List[str] = sorted(dir_scripts)
|
| 166 |
+
if len(sorted_scripts) == 1:
|
| 167 |
+
start_text = f"script {sorted_scripts[0]} is"
|
| 168 |
+
else:
|
| 169 |
+
start_text = "scripts {} are".format(
|
| 170 |
+
", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
|
| 171 |
+
)
|
| 172 |
+
|
| 173 |
+
msg_lines.append(
|
| 174 |
+
f"The {start_text} installed in '{parent_dir}' which is not on PATH."
|
| 175 |
+
)
|
| 176 |
+
|
| 177 |
+
last_line_fmt = (
|
| 178 |
+
"Consider adding {} to PATH or, if you prefer "
|
| 179 |
+
"to suppress this warning, use --no-warn-script-location."
|
| 180 |
+
)
|
| 181 |
+
if len(msg_lines) == 1:
|
| 182 |
+
msg_lines.append(last_line_fmt.format("this directory"))
|
| 183 |
+
else:
|
| 184 |
+
msg_lines.append(last_line_fmt.format("these directories"))
|
| 185 |
+
|
| 186 |
+
# Add a note if any directory starts with ~
|
| 187 |
+
warn_for_tilde = any(
|
| 188 |
+
i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i
|
| 189 |
+
)
|
| 190 |
+
if warn_for_tilde:
|
| 191 |
+
tilde_warning_msg = (
|
| 192 |
+
"NOTE: The current PATH contains path(s) starting with `~`, "
|
| 193 |
+
"which may not be expanded by all applications."
|
| 194 |
+
)
|
| 195 |
+
msg_lines.append(tilde_warning_msg)
|
| 196 |
+
|
| 197 |
+
# Returns the formatted multiline message
|
| 198 |
+
return "\n".join(msg_lines)
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
def _normalized_outrows(
|
| 202 |
+
outrows: Iterable[InstalledCSVRow],
|
| 203 |
+
) -> List[Tuple[str, str, str]]:
|
| 204 |
+
"""Normalize the given rows of a RECORD file.
|
| 205 |
+
|
| 206 |
+
Items in each row are converted into str. Rows are then sorted to make
|
| 207 |
+
the value more predictable for tests.
|
| 208 |
+
|
| 209 |
+
Each row is a 3-tuple (path, hash, size) and corresponds to a record of
|
| 210 |
+
a RECORD file (see PEP 376 and PEP 427 for details). For the rows
|
| 211 |
+
passed to this function, the size can be an integer as an int or string,
|
| 212 |
+
or the empty string.
|
| 213 |
+
"""
|
| 214 |
+
# Normally, there should only be one row per path, in which case the
|
| 215 |
+
# second and third elements don't come into play when sorting.
|
| 216 |
+
# However, in cases in the wild where a path might happen to occur twice,
|
| 217 |
+
# we don't want the sort operation to trigger an error (but still want
|
| 218 |
+
# determinism). Since the third element can be an int or string, we
|
| 219 |
+
# coerce each element to a string to avoid a TypeError in this case.
|
| 220 |
+
# For additional background, see--
|
| 221 |
+
# https://github.com/pypa/pip/issues/5868
|
| 222 |
+
return sorted(
|
| 223 |
+
(record_path, hash_, str(size)) for record_path, hash_, size in outrows
|
| 224 |
+
)
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
def _record_to_fs_path(record_path: RecordPath, lib_dir: str) -> str:
|
| 228 |
+
return os.path.join(lib_dir, record_path)
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
def _fs_to_record_path(path: str, lib_dir: str) -> RecordPath:
|
| 232 |
+
# On Windows, do not handle relative paths if they belong to different
|
| 233 |
+
# logical disks
|
| 234 |
+
if os.path.splitdrive(path)[0].lower() == os.path.splitdrive(lib_dir)[0].lower():
|
| 235 |
+
path = os.path.relpath(path, lib_dir)
|
| 236 |
+
|
| 237 |
+
path = path.replace(os.path.sep, "/")
|
| 238 |
+
return cast("RecordPath", path)
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
def get_csv_rows_for_installed(
|
| 242 |
+
old_csv_rows: List[List[str]],
|
| 243 |
+
installed: Dict[RecordPath, RecordPath],
|
| 244 |
+
changed: Set[RecordPath],
|
| 245 |
+
generated: List[str],
|
| 246 |
+
lib_dir: str,
|
| 247 |
+
) -> List[InstalledCSVRow]:
|
| 248 |
+
"""
|
| 249 |
+
:param installed: A map from archive RECORD path to installation RECORD
|
| 250 |
+
path.
|
| 251 |
+
"""
|
| 252 |
+
installed_rows: List[InstalledCSVRow] = []
|
| 253 |
+
for row in old_csv_rows:
|
| 254 |
+
if len(row) > 3:
|
| 255 |
+
logger.warning("RECORD line has more than three elements: %s", row)
|
| 256 |
+
old_record_path = cast("RecordPath", row[0])
|
| 257 |
+
new_record_path = installed.pop(old_record_path, old_record_path)
|
| 258 |
+
if new_record_path in changed:
|
| 259 |
+
digest, length = rehash(_record_to_fs_path(new_record_path, lib_dir))
|
| 260 |
+
else:
|
| 261 |
+
digest = row[1] if len(row) > 1 else ""
|
| 262 |
+
length = row[2] if len(row) > 2 else ""
|
| 263 |
+
installed_rows.append((new_record_path, digest, length))
|
| 264 |
+
for f in generated:
|
| 265 |
+
path = _fs_to_record_path(f, lib_dir)
|
| 266 |
+
digest, length = rehash(f)
|
| 267 |
+
installed_rows.append((path, digest, length))
|
| 268 |
+
return installed_rows + [
|
| 269 |
+
(installed_record_path, "", "") for installed_record_path in installed.values()
|
| 270 |
+
]
|
| 271 |
+
|
| 272 |
+
|
| 273 |
+
def get_console_script_specs(console: Dict[str, str]) -> List[str]:
|
| 274 |
+
"""
|
| 275 |
+
Given the mapping from entrypoint name to callable, return the relevant
|
| 276 |
+
console script specs.
|
| 277 |
+
"""
|
| 278 |
+
# Don't mutate caller's version
|
| 279 |
+
console = console.copy()
|
| 280 |
+
|
| 281 |
+
scripts_to_generate = []
|
| 282 |
+
|
| 283 |
+
# Special case pip and setuptools to generate versioned wrappers
|
| 284 |
+
#
|
| 285 |
+
# The issue is that some projects (specifically, pip and setuptools) use
|
| 286 |
+
# code in setup.py to create "versioned" entry points - pip2.7 on Python
|
| 287 |
+
# 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
|
| 288 |
+
# the wheel metadata at build time, and so if the wheel is installed with
|
| 289 |
+
# a *different* version of Python the entry points will be wrong. The
|
| 290 |
+
# correct fix for this is to enhance the metadata to be able to describe
|
| 291 |
+
# such versioned entry points.
|
| 292 |
+
# Currently, projects using versioned entry points will either have
|
| 293 |
+
# incorrect versioned entry points, or they will not be able to distribute
|
| 294 |
+
# "universal" wheels (i.e., they will need a wheel per Python version).
|
| 295 |
+
#
|
| 296 |
+
# Because setuptools and pip are bundled with _ensurepip and virtualenv,
|
| 297 |
+
# we need to use universal wheels. As a workaround, we
|
| 298 |
+
# override the versioned entry points in the wheel and generate the
|
| 299 |
+
# correct ones.
|
| 300 |
+
#
|
| 301 |
+
# To add the level of hack in this section of code, in order to support
|
| 302 |
+
# ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
|
| 303 |
+
# variable which will control which version scripts get installed.
|
| 304 |
+
#
|
| 305 |
+
# ENSUREPIP_OPTIONS=altinstall
|
| 306 |
+
# - Only pipX.Y and easy_install-X.Y will be generated and installed
|
| 307 |
+
# ENSUREPIP_OPTIONS=install
|
| 308 |
+
# - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
|
| 309 |
+
# that this option is technically if ENSUREPIP_OPTIONS is set and is
|
| 310 |
+
# not altinstall
|
| 311 |
+
# DEFAULT
|
| 312 |
+
# - The default behavior is to install pip, pipX, pipX.Y, easy_install
|
| 313 |
+
# and easy_install-X.Y.
|
| 314 |
+
pip_script = console.pop("pip", None)
|
| 315 |
+
if pip_script:
|
| 316 |
+
if "ENSUREPIP_OPTIONS" not in os.environ:
|
| 317 |
+
scripts_to_generate.append("pip = " + pip_script)
|
| 318 |
+
|
| 319 |
+
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
|
| 320 |
+
scripts_to_generate.append(f"pip{sys.version_info[0]} = {pip_script}")
|
| 321 |
+
|
| 322 |
+
scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}")
|
| 323 |
+
# Delete any other versioned pip entry points
|
| 324 |
+
pip_ep = [k for k in console if re.match(r"pip(\d+(\.\d+)?)?$", k)]
|
| 325 |
+
for k in pip_ep:
|
| 326 |
+
del console[k]
|
| 327 |
+
easy_install_script = console.pop("easy_install", None)
|
| 328 |
+
if easy_install_script:
|
| 329 |
+
if "ENSUREPIP_OPTIONS" not in os.environ:
|
| 330 |
+
scripts_to_generate.append("easy_install = " + easy_install_script)
|
| 331 |
+
|
| 332 |
+
scripts_to_generate.append(
|
| 333 |
+
f"easy_install-{get_major_minor_version()} = {easy_install_script}"
|
| 334 |
+
)
|
| 335 |
+
# Delete any other versioned easy_install entry points
|
| 336 |
+
easy_install_ep = [
|
| 337 |
+
k for k in console if re.match(r"easy_install(-\d+\.\d+)?$", k)
|
| 338 |
+
]
|
| 339 |
+
for k in easy_install_ep:
|
| 340 |
+
del console[k]
|
| 341 |
+
|
| 342 |
+
# Generate the console entry points specified in the wheel
|
| 343 |
+
scripts_to_generate.extend(starmap("{} = {}".format, console.items()))
|
| 344 |
+
|
| 345 |
+
return scripts_to_generate
|
| 346 |
+
|
| 347 |
+
|
| 348 |
+
class ZipBackedFile:
|
| 349 |
+
def __init__(
|
| 350 |
+
self, src_record_path: RecordPath, dest_path: str, zip_file: ZipFile
|
| 351 |
+
) -> None:
|
| 352 |
+
self.src_record_path = src_record_path
|
| 353 |
+
self.dest_path = dest_path
|
| 354 |
+
self._zip_file = zip_file
|
| 355 |
+
self.changed = False
|
| 356 |
+
|
| 357 |
+
def _getinfo(self) -> ZipInfo:
|
| 358 |
+
return self._zip_file.getinfo(self.src_record_path)
|
| 359 |
+
|
| 360 |
+
def save(self) -> None:
|
| 361 |
+
# When we open the output file below, any existing file is truncated
|
| 362 |
+
# before we start writing the new contents. This is fine in most
|
| 363 |
+
# cases, but can cause a segfault if pip has loaded a shared
|
| 364 |
+
# object (e.g. from pyopenssl through its vendored urllib3)
|
| 365 |
+
# Since the shared object is mmap'd an attempt to call a
|
| 366 |
+
# symbol in it will then cause a segfault. Unlinking the file
|
| 367 |
+
# allows writing of new contents while allowing the process to
|
| 368 |
+
# continue to use the old copy.
|
| 369 |
+
if os.path.exists(self.dest_path):
|
| 370 |
+
os.unlink(self.dest_path)
|
| 371 |
+
|
| 372 |
+
zipinfo = self._getinfo()
|
| 373 |
+
|
| 374 |
+
# optimization: the file is created by open(),
|
| 375 |
+
# skip the decompression when there is 0 bytes to decompress.
|
| 376 |
+
with open(self.dest_path, "wb") as dest:
|
| 377 |
+
if zipinfo.file_size > 0:
|
| 378 |
+
with self._zip_file.open(zipinfo) as f:
|
| 379 |
+
blocksize = min(zipinfo.file_size, 1024 * 1024)
|
| 380 |
+
shutil.copyfileobj(f, dest, blocksize)
|
| 381 |
+
|
| 382 |
+
if zip_item_is_executable(zipinfo):
|
| 383 |
+
set_extracted_file_to_default_mode_plus_executable(self.dest_path)
|
| 384 |
+
|
| 385 |
+
|
| 386 |
+
class ScriptFile:
|
| 387 |
+
def __init__(self, file: "File") -> None:
|
| 388 |
+
self._file = file
|
| 389 |
+
self.src_record_path = self._file.src_record_path
|
| 390 |
+
self.dest_path = self._file.dest_path
|
| 391 |
+
self.changed = False
|
| 392 |
+
|
| 393 |
+
def save(self) -> None:
|
| 394 |
+
self._file.save()
|
| 395 |
+
self.changed = fix_script(self.dest_path)
|
| 396 |
+
|
| 397 |
+
|
| 398 |
+
class MissingCallableSuffix(InstallationError):
|
| 399 |
+
def __init__(self, entry_point: str) -> None:
|
| 400 |
+
super().__init__(
|
| 401 |
+
f"Invalid script entry point: {entry_point} - A callable "
|
| 402 |
+
"suffix is required. Cf https://packaging.python.org/"
|
| 403 |
+
"specifications/entry-points/#use-for-scripts for more "
|
| 404 |
+
"information."
|
| 405 |
+
)
|
| 406 |
+
|
| 407 |
+
|
| 408 |
+
def _raise_for_invalid_entrypoint(specification: str) -> None:
|
| 409 |
+
entry = get_export_entry(specification)
|
| 410 |
+
if entry is not None and entry.suffix is None:
|
| 411 |
+
raise MissingCallableSuffix(str(entry))
|
| 412 |
+
|
| 413 |
+
|
| 414 |
+
class PipScriptMaker(ScriptMaker):
|
| 415 |
+
def make(
|
| 416 |
+
self, specification: str, options: Optional[Dict[str, Any]] = None
|
| 417 |
+
) -> List[str]:
|
| 418 |
+
_raise_for_invalid_entrypoint(specification)
|
| 419 |
+
return super().make(specification, options)
|
| 420 |
+
|
| 421 |
+
|
| 422 |
+
def _install_wheel( # noqa: C901, PLR0915 function is too long
|
| 423 |
+
name: str,
|
| 424 |
+
wheel_zip: ZipFile,
|
| 425 |
+
wheel_path: str,
|
| 426 |
+
scheme: Scheme,
|
| 427 |
+
pycompile: bool = True,
|
| 428 |
+
warn_script_location: bool = True,
|
| 429 |
+
direct_url: Optional[DirectUrl] = None,
|
| 430 |
+
requested: bool = False,
|
| 431 |
+
) -> None:
|
| 432 |
+
"""Install a wheel.
|
| 433 |
+
|
| 434 |
+
:param name: Name of the project to install
|
| 435 |
+
:param wheel_zip: open ZipFile for wheel being installed
|
| 436 |
+
:param scheme: Distutils scheme dictating the install directories
|
| 437 |
+
:param req_description: String used in place of the requirement, for
|
| 438 |
+
logging
|
| 439 |
+
:param pycompile: Whether to byte-compile installed Python files
|
| 440 |
+
:param warn_script_location: Whether to check that scripts are installed
|
| 441 |
+
into a directory on PATH
|
| 442 |
+
:raises UnsupportedWheel:
|
| 443 |
+
* when the directory holds an unpacked wheel with incompatible
|
| 444 |
+
Wheel-Version
|
| 445 |
+
* when the .dist-info dir does not match the wheel
|
| 446 |
+
"""
|
| 447 |
+
info_dir, metadata = parse_wheel(wheel_zip, name)
|
| 448 |
+
|
| 449 |
+
if wheel_root_is_purelib(metadata):
|
| 450 |
+
lib_dir = scheme.purelib
|
| 451 |
+
else:
|
| 452 |
+
lib_dir = scheme.platlib
|
| 453 |
+
|
| 454 |
+
# Record details of the files moved
|
| 455 |
+
# installed = files copied from the wheel to the destination
|
| 456 |
+
# changed = files changed while installing (scripts #! line typically)
|
| 457 |
+
# generated = files newly generated during the install (script wrappers)
|
| 458 |
+
installed: Dict[RecordPath, RecordPath] = {}
|
| 459 |
+
changed: Set[RecordPath] = set()
|
| 460 |
+
generated: List[str] = []
|
| 461 |
+
|
| 462 |
+
def record_installed(
|
| 463 |
+
srcfile: RecordPath, destfile: str, modified: bool = False
|
| 464 |
+
) -> None:
|
| 465 |
+
"""Map archive RECORD paths to installation RECORD paths."""
|
| 466 |
+
newpath = _fs_to_record_path(destfile, lib_dir)
|
| 467 |
+
installed[srcfile] = newpath
|
| 468 |
+
if modified:
|
| 469 |
+
changed.add(newpath)
|
| 470 |
+
|
| 471 |
+
def is_dir_path(path: RecordPath) -> bool:
|
| 472 |
+
return path.endswith("/")
|
| 473 |
+
|
| 474 |
+
def assert_no_path_traversal(dest_dir_path: str, target_path: str) -> None:
|
| 475 |
+
if not is_within_directory(dest_dir_path, target_path):
|
| 476 |
+
message = (
|
| 477 |
+
"The wheel {!r} has a file {!r} trying to install"
|
| 478 |
+
" outside the target directory {!r}"
|
| 479 |
+
)
|
| 480 |
+
raise InstallationError(
|
| 481 |
+
message.format(wheel_path, target_path, dest_dir_path)
|
| 482 |
+
)
|
| 483 |
+
|
| 484 |
+
def root_scheme_file_maker(
|
| 485 |
+
zip_file: ZipFile, dest: str
|
| 486 |
+
) -> Callable[[RecordPath], "File"]:
|
| 487 |
+
def make_root_scheme_file(record_path: RecordPath) -> "File":
|
| 488 |
+
normed_path = os.path.normpath(record_path)
|
| 489 |
+
dest_path = os.path.join(dest, normed_path)
|
| 490 |
+
assert_no_path_traversal(dest, dest_path)
|
| 491 |
+
return ZipBackedFile(record_path, dest_path, zip_file)
|
| 492 |
+
|
| 493 |
+
return make_root_scheme_file
|
| 494 |
+
|
| 495 |
+
def data_scheme_file_maker(
|
| 496 |
+
zip_file: ZipFile, scheme: Scheme
|
| 497 |
+
) -> Callable[[RecordPath], "File"]:
|
| 498 |
+
scheme_paths = {key: getattr(scheme, key) for key in SCHEME_KEYS}
|
| 499 |
+
|
| 500 |
+
def make_data_scheme_file(record_path: RecordPath) -> "File":
|
| 501 |
+
normed_path = os.path.normpath(record_path)
|
| 502 |
+
try:
|
| 503 |
+
_, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2)
|
| 504 |
+
except ValueError:
|
| 505 |
+
message = (
|
| 506 |
+
f"Unexpected file in {wheel_path}: {record_path!r}. .data directory"
|
| 507 |
+
" contents should be named like: '<scheme key>/<path>'."
|
| 508 |
+
)
|
| 509 |
+
raise InstallationError(message)
|
| 510 |
+
|
| 511 |
+
try:
|
| 512 |
+
scheme_path = scheme_paths[scheme_key]
|
| 513 |
+
except KeyError:
|
| 514 |
+
valid_scheme_keys = ", ".join(sorted(scheme_paths))
|
| 515 |
+
message = (
|
| 516 |
+
f"Unknown scheme key used in {wheel_path}: {scheme_key} "
|
| 517 |
+
f"(for file {record_path!r}). .data directory contents "
|
| 518 |
+
f"should be in subdirectories named with a valid scheme "
|
| 519 |
+
f"key ({valid_scheme_keys})"
|
| 520 |
+
)
|
| 521 |
+
raise InstallationError(message)
|
| 522 |
+
|
| 523 |
+
dest_path = os.path.join(scheme_path, dest_subpath)
|
| 524 |
+
assert_no_path_traversal(scheme_path, dest_path)
|
| 525 |
+
return ZipBackedFile(record_path, dest_path, zip_file)
|
| 526 |
+
|
| 527 |
+
return make_data_scheme_file
|
| 528 |
+
|
| 529 |
+
def is_data_scheme_path(path: RecordPath) -> bool:
|
| 530 |
+
return path.split("/", 1)[0].endswith(".data")
|
| 531 |
+
|
| 532 |
+
paths = cast(List[RecordPath], wheel_zip.namelist())
|
| 533 |
+
file_paths = filterfalse(is_dir_path, paths)
|
| 534 |
+
root_scheme_paths, data_scheme_paths = partition(is_data_scheme_path, file_paths)
|
| 535 |
+
|
| 536 |
+
make_root_scheme_file = root_scheme_file_maker(wheel_zip, lib_dir)
|
| 537 |
+
files: Iterator[File] = map(make_root_scheme_file, root_scheme_paths)
|
| 538 |
+
|
| 539 |
+
def is_script_scheme_path(path: RecordPath) -> bool:
|
| 540 |
+
parts = path.split("/", 2)
|
| 541 |
+
return len(parts) > 2 and parts[0].endswith(".data") and parts[1] == "scripts"
|
| 542 |
+
|
| 543 |
+
other_scheme_paths, script_scheme_paths = partition(
|
| 544 |
+
is_script_scheme_path, data_scheme_paths
|
| 545 |
+
)
|
| 546 |
+
|
| 547 |
+
make_data_scheme_file = data_scheme_file_maker(wheel_zip, scheme)
|
| 548 |
+
other_scheme_files = map(make_data_scheme_file, other_scheme_paths)
|
| 549 |
+
files = chain(files, other_scheme_files)
|
| 550 |
+
|
| 551 |
+
# Get the defined entry points
|
| 552 |
+
distribution = get_wheel_distribution(
|
| 553 |
+
FilesystemWheel(wheel_path),
|
| 554 |
+
canonicalize_name(name),
|
| 555 |
+
)
|
| 556 |
+
console, gui = get_entrypoints(distribution)
|
| 557 |
+
|
| 558 |
+
def is_entrypoint_wrapper(file: "File") -> bool:
|
| 559 |
+
# EP, EP.exe and EP-script.py are scripts generated for
|
| 560 |
+
# entry point EP by setuptools
|
| 561 |
+
path = file.dest_path
|
| 562 |
+
name = os.path.basename(path)
|
| 563 |
+
if name.lower().endswith(".exe"):
|
| 564 |
+
matchname = name[:-4]
|
| 565 |
+
elif name.lower().endswith("-script.py"):
|
| 566 |
+
matchname = name[:-10]
|
| 567 |
+
elif name.lower().endswith(".pya"):
|
| 568 |
+
matchname = name[:-4]
|
| 569 |
+
else:
|
| 570 |
+
matchname = name
|
| 571 |
+
# Ignore setuptools-generated scripts
|
| 572 |
+
return matchname in console or matchname in gui
|
| 573 |
+
|
| 574 |
+
script_scheme_files: Iterator[File] = map(
|
| 575 |
+
make_data_scheme_file, script_scheme_paths
|
| 576 |
+
)
|
| 577 |
+
script_scheme_files = filterfalse(is_entrypoint_wrapper, script_scheme_files)
|
| 578 |
+
script_scheme_files = map(ScriptFile, script_scheme_files)
|
| 579 |
+
files = chain(files, script_scheme_files)
|
| 580 |
+
|
| 581 |
+
existing_parents = set()
|
| 582 |
+
for file in files:
|
| 583 |
+
# directory creation is lazy and after file filtering
|
| 584 |
+
# to ensure we don't install empty dirs; empty dirs can't be
|
| 585 |
+
# uninstalled.
|
| 586 |
+
parent_dir = os.path.dirname(file.dest_path)
|
| 587 |
+
if parent_dir not in existing_parents:
|
| 588 |
+
ensure_dir(parent_dir)
|
| 589 |
+
existing_parents.add(parent_dir)
|
| 590 |
+
file.save()
|
| 591 |
+
record_installed(file.src_record_path, file.dest_path, file.changed)
|
| 592 |
+
|
| 593 |
+
def pyc_source_file_paths() -> Generator[str, None, None]:
|
| 594 |
+
# We de-duplicate installation paths, since there can be overlap (e.g.
|
| 595 |
+
# file in .data maps to same location as file in wheel root).
|
| 596 |
+
# Sorting installation paths makes it easier to reproduce and debug
|
| 597 |
+
# issues related to permissions on existing files.
|
| 598 |
+
for installed_path in sorted(set(installed.values())):
|
| 599 |
+
full_installed_path = os.path.join(lib_dir, installed_path)
|
| 600 |
+
if not os.path.isfile(full_installed_path):
|
| 601 |
+
continue
|
| 602 |
+
if not full_installed_path.endswith(".py"):
|
| 603 |
+
continue
|
| 604 |
+
yield full_installed_path
|
| 605 |
+
|
| 606 |
+
def pyc_output_path(path: str) -> str:
|
| 607 |
+
"""Return the path the pyc file would have been written to."""
|
| 608 |
+
return importlib.util.cache_from_source(path)
|
| 609 |
+
|
| 610 |
+
# Compile all of the pyc files for the installed files
|
| 611 |
+
if pycompile:
|
| 612 |
+
with contextlib.redirect_stdout(
|
| 613 |
+
StreamWrapper.from_stream(sys.stdout)
|
| 614 |
+
) as stdout:
|
| 615 |
+
with warnings.catch_warnings():
|
| 616 |
+
warnings.filterwarnings("ignore")
|
| 617 |
+
for path in pyc_source_file_paths():
|
| 618 |
+
success = compileall.compile_file(path, force=True, quiet=True)
|
| 619 |
+
if success:
|
| 620 |
+
pyc_path = pyc_output_path(path)
|
| 621 |
+
assert os.path.exists(pyc_path)
|
| 622 |
+
pyc_record_path = cast(
|
| 623 |
+
"RecordPath", pyc_path.replace(os.path.sep, "/")
|
| 624 |
+
)
|
| 625 |
+
record_installed(pyc_record_path, pyc_path)
|
| 626 |
+
logger.debug(stdout.getvalue())
|
| 627 |
+
|
| 628 |
+
maker = PipScriptMaker(None, scheme.scripts)
|
| 629 |
+
|
| 630 |
+
# Ensure old scripts are overwritten.
|
| 631 |
+
# See https://github.com/pypa/pip/issues/1800
|
| 632 |
+
maker.clobber = True
|
| 633 |
+
|
| 634 |
+
# Ensure we don't generate any variants for scripts because this is almost
|
| 635 |
+
# never what somebody wants.
|
| 636 |
+
# See https://bitbucket.org/pypa/distlib/issue/35/
|
| 637 |
+
maker.variants = {""}
|
| 638 |
+
|
| 639 |
+
# This is required because otherwise distlib creates scripts that are not
|
| 640 |
+
# executable.
|
| 641 |
+
# See https://bitbucket.org/pypa/distlib/issue/32/
|
| 642 |
+
maker.set_mode = True
|
| 643 |
+
|
| 644 |
+
# Generate the console and GUI entry points specified in the wheel
|
| 645 |
+
scripts_to_generate = get_console_script_specs(console)
|
| 646 |
+
|
| 647 |
+
gui_scripts_to_generate = list(starmap("{} = {}".format, gui.items()))
|
| 648 |
+
|
| 649 |
+
generated_console_scripts = maker.make_multiple(scripts_to_generate)
|
| 650 |
+
generated.extend(generated_console_scripts)
|
| 651 |
+
|
| 652 |
+
generated.extend(maker.make_multiple(gui_scripts_to_generate, {"gui": True}))
|
| 653 |
+
|
| 654 |
+
if warn_script_location:
|
| 655 |
+
msg = message_about_scripts_not_on_PATH(generated_console_scripts)
|
| 656 |
+
if msg is not None:
|
| 657 |
+
logger.warning(msg)
|
| 658 |
+
|
| 659 |
+
generated_file_mode = 0o666 & ~current_umask()
|
| 660 |
+
|
| 661 |
+
@contextlib.contextmanager
|
| 662 |
+
def _generate_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]:
|
| 663 |
+
with adjacent_tmp_file(path, **kwargs) as f:
|
| 664 |
+
yield f
|
| 665 |
+
os.chmod(f.name, generated_file_mode)
|
| 666 |
+
replace(f.name, path)
|
| 667 |
+
|
| 668 |
+
dest_info_dir = os.path.join(lib_dir, info_dir)
|
| 669 |
+
|
| 670 |
+
# Record pip as the installer
|
| 671 |
+
installer_path = os.path.join(dest_info_dir, "INSTALLER")
|
| 672 |
+
with _generate_file(installer_path) as installer_file:
|
| 673 |
+
installer_file.write(b"pip\n")
|
| 674 |
+
generated.append(installer_path)
|
| 675 |
+
|
| 676 |
+
# Record the PEP 610 direct URL reference
|
| 677 |
+
if direct_url is not None:
|
| 678 |
+
direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME)
|
| 679 |
+
with _generate_file(direct_url_path) as direct_url_file:
|
| 680 |
+
direct_url_file.write(direct_url.to_json().encode("utf-8"))
|
| 681 |
+
generated.append(direct_url_path)
|
| 682 |
+
|
| 683 |
+
# Record the REQUESTED file
|
| 684 |
+
if requested:
|
| 685 |
+
requested_path = os.path.join(dest_info_dir, "REQUESTED")
|
| 686 |
+
with open(requested_path, "wb"):
|
| 687 |
+
pass
|
| 688 |
+
generated.append(requested_path)
|
| 689 |
+
|
| 690 |
+
record_text = distribution.read_text("RECORD")
|
| 691 |
+
record_rows = list(csv.reader(record_text.splitlines()))
|
| 692 |
+
|
| 693 |
+
rows = get_csv_rows_for_installed(
|
| 694 |
+
record_rows,
|
| 695 |
+
installed=installed,
|
| 696 |
+
changed=changed,
|
| 697 |
+
generated=generated,
|
| 698 |
+
lib_dir=lib_dir,
|
| 699 |
+
)
|
| 700 |
+
|
| 701 |
+
# Record details of all files installed
|
| 702 |
+
record_path = os.path.join(dest_info_dir, "RECORD")
|
| 703 |
+
|
| 704 |
+
with _generate_file(record_path, **csv_io_kwargs("w")) as record_file:
|
| 705 |
+
# Explicitly cast to typing.IO[str] as a workaround for the mypy error:
|
| 706 |
+
# "writer" has incompatible type "BinaryIO"; expected "_Writer"
|
| 707 |
+
writer = csv.writer(cast("IO[str]", record_file))
|
| 708 |
+
writer.writerows(_normalized_outrows(rows))
|
| 709 |
+
|
| 710 |
+
|
| 711 |
+
@contextlib.contextmanager
|
| 712 |
+
def req_error_context(req_description: str) -> Generator[None, None, None]:
|
| 713 |
+
try:
|
| 714 |
+
yield
|
| 715 |
+
except InstallationError as e:
|
| 716 |
+
message = f"For req: {req_description}. {e.args[0]}"
|
| 717 |
+
raise InstallationError(message) from e
|
| 718 |
+
|
| 719 |
+
|
| 720 |
+
def install_wheel(
|
| 721 |
+
name: str,
|
| 722 |
+
wheel_path: str,
|
| 723 |
+
scheme: Scheme,
|
| 724 |
+
req_description: str,
|
| 725 |
+
pycompile: bool = True,
|
| 726 |
+
warn_script_location: bool = True,
|
| 727 |
+
direct_url: Optional[DirectUrl] = None,
|
| 728 |
+
requested: bool = False,
|
| 729 |
+
) -> None:
|
| 730 |
+
with ZipFile(wheel_path, allowZip64=True) as z:
|
| 731 |
+
with req_error_context(req_description):
|
| 732 |
+
_install_wheel(
|
| 733 |
+
name=name,
|
| 734 |
+
wheel_zip=z,
|
| 735 |
+
wheel_path=wheel_path,
|
| 736 |
+
scheme=scheme,
|
| 737 |
+
pycompile=pycompile,
|
| 738 |
+
warn_script_location=warn_script_location,
|
| 739 |
+
direct_url=direct_url,
|
| 740 |
+
requested=requested,
|
| 741 |
+
)
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/operations/prepare.py
ADDED
|
@@ -0,0 +1,732 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Prepares a distribution for installation
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
# The following comment should be removed at some point in the future.
|
| 5 |
+
# mypy: strict-optional=False
|
| 6 |
+
|
| 7 |
+
import mimetypes
|
| 8 |
+
import os
|
| 9 |
+
import shutil
|
| 10 |
+
from dataclasses import dataclass
|
| 11 |
+
from pathlib import Path
|
| 12 |
+
from typing import Dict, Iterable, List, Optional
|
| 13 |
+
|
| 14 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 15 |
+
|
| 16 |
+
from pip._internal.distributions import make_distribution_for_install_requirement
|
| 17 |
+
from pip._internal.distributions.installed import InstalledDistribution
|
| 18 |
+
from pip._internal.exceptions import (
|
| 19 |
+
DirectoryUrlHashUnsupported,
|
| 20 |
+
HashMismatch,
|
| 21 |
+
HashUnpinned,
|
| 22 |
+
InstallationError,
|
| 23 |
+
MetadataInconsistent,
|
| 24 |
+
NetworkConnectionError,
|
| 25 |
+
VcsHashUnsupported,
|
| 26 |
+
)
|
| 27 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 28 |
+
from pip._internal.metadata import BaseDistribution, get_metadata_distribution
|
| 29 |
+
from pip._internal.models.direct_url import ArchiveInfo
|
| 30 |
+
from pip._internal.models.link import Link
|
| 31 |
+
from pip._internal.models.wheel import Wheel
|
| 32 |
+
from pip._internal.network.download import BatchDownloader, Downloader
|
| 33 |
+
from pip._internal.network.lazy_wheel import (
|
| 34 |
+
HTTPRangeRequestUnsupported,
|
| 35 |
+
dist_from_wheel_url,
|
| 36 |
+
)
|
| 37 |
+
from pip._internal.network.session import PipSession
|
| 38 |
+
from pip._internal.operations.build.build_tracker import BuildTracker
|
| 39 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 40 |
+
from pip._internal.utils._log import getLogger
|
| 41 |
+
from pip._internal.utils.direct_url_helpers import (
|
| 42 |
+
direct_url_for_editable,
|
| 43 |
+
direct_url_from_link,
|
| 44 |
+
)
|
| 45 |
+
from pip._internal.utils.hashes import Hashes, MissingHashes
|
| 46 |
+
from pip._internal.utils.logging import indent_log
|
| 47 |
+
from pip._internal.utils.misc import (
|
| 48 |
+
display_path,
|
| 49 |
+
hash_file,
|
| 50 |
+
hide_url,
|
| 51 |
+
redact_auth_from_requirement,
|
| 52 |
+
)
|
| 53 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
| 54 |
+
from pip._internal.utils.unpacking import unpack_file
|
| 55 |
+
from pip._internal.vcs import vcs
|
| 56 |
+
|
| 57 |
+
logger = getLogger(__name__)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def _get_prepared_distribution(
|
| 61 |
+
req: InstallRequirement,
|
| 62 |
+
build_tracker: BuildTracker,
|
| 63 |
+
finder: PackageFinder,
|
| 64 |
+
build_isolation: bool,
|
| 65 |
+
check_build_deps: bool,
|
| 66 |
+
) -> BaseDistribution:
|
| 67 |
+
"""Prepare a distribution for installation."""
|
| 68 |
+
abstract_dist = make_distribution_for_install_requirement(req)
|
| 69 |
+
tracker_id = abstract_dist.build_tracker_id
|
| 70 |
+
if tracker_id is not None:
|
| 71 |
+
with build_tracker.track(req, tracker_id):
|
| 72 |
+
abstract_dist.prepare_distribution_metadata(
|
| 73 |
+
finder, build_isolation, check_build_deps
|
| 74 |
+
)
|
| 75 |
+
return abstract_dist.get_metadata_distribution()
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def unpack_vcs_link(link: Link, location: str, verbosity: int) -> None:
|
| 79 |
+
vcs_backend = vcs.get_backend_for_scheme(link.scheme)
|
| 80 |
+
assert vcs_backend is not None
|
| 81 |
+
vcs_backend.unpack(location, url=hide_url(link.url), verbosity=verbosity)
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
@dataclass
|
| 85 |
+
class File:
|
| 86 |
+
path: str
|
| 87 |
+
content_type: Optional[str] = None
|
| 88 |
+
|
| 89 |
+
def __post_init__(self) -> None:
|
| 90 |
+
if self.content_type is None:
|
| 91 |
+
self.content_type = mimetypes.guess_type(self.path)[0]
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def get_http_url(
|
| 95 |
+
link: Link,
|
| 96 |
+
download: Downloader,
|
| 97 |
+
download_dir: Optional[str] = None,
|
| 98 |
+
hashes: Optional[Hashes] = None,
|
| 99 |
+
) -> File:
|
| 100 |
+
temp_dir = TempDirectory(kind="unpack", globally_managed=True)
|
| 101 |
+
# If a download dir is specified, is the file already downloaded there?
|
| 102 |
+
already_downloaded_path = None
|
| 103 |
+
if download_dir:
|
| 104 |
+
already_downloaded_path = _check_download_dir(link, download_dir, hashes)
|
| 105 |
+
|
| 106 |
+
if already_downloaded_path:
|
| 107 |
+
from_path = already_downloaded_path
|
| 108 |
+
content_type = None
|
| 109 |
+
else:
|
| 110 |
+
# let's download to a tmp dir
|
| 111 |
+
from_path, content_type = download(link, temp_dir.path)
|
| 112 |
+
if hashes:
|
| 113 |
+
hashes.check_against_path(from_path)
|
| 114 |
+
|
| 115 |
+
return File(from_path, content_type)
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
def get_file_url(
|
| 119 |
+
link: Link, download_dir: Optional[str] = None, hashes: Optional[Hashes] = None
|
| 120 |
+
) -> File:
|
| 121 |
+
"""Get file and optionally check its hash."""
|
| 122 |
+
# If a download dir is specified, is the file already there and valid?
|
| 123 |
+
already_downloaded_path = None
|
| 124 |
+
if download_dir:
|
| 125 |
+
already_downloaded_path = _check_download_dir(link, download_dir, hashes)
|
| 126 |
+
|
| 127 |
+
if already_downloaded_path:
|
| 128 |
+
from_path = already_downloaded_path
|
| 129 |
+
else:
|
| 130 |
+
from_path = link.file_path
|
| 131 |
+
|
| 132 |
+
# If --require-hashes is off, `hashes` is either empty, the
|
| 133 |
+
# link's embedded hash, or MissingHashes; it is required to
|
| 134 |
+
# match. If --require-hashes is on, we are satisfied by any
|
| 135 |
+
# hash in `hashes` matching: a URL-based or an option-based
|
| 136 |
+
# one; no internet-sourced hash will be in `hashes`.
|
| 137 |
+
if hashes:
|
| 138 |
+
hashes.check_against_path(from_path)
|
| 139 |
+
return File(from_path, None)
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def unpack_url(
|
| 143 |
+
link: Link,
|
| 144 |
+
location: str,
|
| 145 |
+
download: Downloader,
|
| 146 |
+
verbosity: int,
|
| 147 |
+
download_dir: Optional[str] = None,
|
| 148 |
+
hashes: Optional[Hashes] = None,
|
| 149 |
+
) -> Optional[File]:
|
| 150 |
+
"""Unpack link into location, downloading if required.
|
| 151 |
+
|
| 152 |
+
:param hashes: A Hashes object, one of whose embedded hashes must match,
|
| 153 |
+
or HashMismatch will be raised. If the Hashes is empty, no matches are
|
| 154 |
+
required, and unhashable types of requirements (like VCS ones, which
|
| 155 |
+
would ordinarily raise HashUnsupported) are allowed.
|
| 156 |
+
"""
|
| 157 |
+
# non-editable vcs urls
|
| 158 |
+
if link.is_vcs:
|
| 159 |
+
unpack_vcs_link(link, location, verbosity=verbosity)
|
| 160 |
+
return None
|
| 161 |
+
|
| 162 |
+
assert not link.is_existing_dir()
|
| 163 |
+
|
| 164 |
+
# file urls
|
| 165 |
+
if link.is_file:
|
| 166 |
+
file = get_file_url(link, download_dir, hashes=hashes)
|
| 167 |
+
|
| 168 |
+
# http urls
|
| 169 |
+
else:
|
| 170 |
+
file = get_http_url(
|
| 171 |
+
link,
|
| 172 |
+
download,
|
| 173 |
+
download_dir,
|
| 174 |
+
hashes=hashes,
|
| 175 |
+
)
|
| 176 |
+
|
| 177 |
+
# unpack the archive to the build dir location. even when only downloading
|
| 178 |
+
# archives, they have to be unpacked to parse dependencies, except wheels
|
| 179 |
+
if not link.is_wheel:
|
| 180 |
+
unpack_file(file.path, location, file.content_type)
|
| 181 |
+
|
| 182 |
+
return file
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
def _check_download_dir(
|
| 186 |
+
link: Link,
|
| 187 |
+
download_dir: str,
|
| 188 |
+
hashes: Optional[Hashes],
|
| 189 |
+
warn_on_hash_mismatch: bool = True,
|
| 190 |
+
) -> Optional[str]:
|
| 191 |
+
"""Check download_dir for previously downloaded file with correct hash
|
| 192 |
+
If a correct file is found return its path else None
|
| 193 |
+
"""
|
| 194 |
+
download_path = os.path.join(download_dir, link.filename)
|
| 195 |
+
|
| 196 |
+
if not os.path.exists(download_path):
|
| 197 |
+
return None
|
| 198 |
+
|
| 199 |
+
# If already downloaded, does its hash match?
|
| 200 |
+
logger.info("File was already downloaded %s", download_path)
|
| 201 |
+
if hashes:
|
| 202 |
+
try:
|
| 203 |
+
hashes.check_against_path(download_path)
|
| 204 |
+
except HashMismatch:
|
| 205 |
+
if warn_on_hash_mismatch:
|
| 206 |
+
logger.warning(
|
| 207 |
+
"Previously-downloaded file %s has bad hash. Re-downloading.",
|
| 208 |
+
download_path,
|
| 209 |
+
)
|
| 210 |
+
os.unlink(download_path)
|
| 211 |
+
return None
|
| 212 |
+
return download_path
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
class RequirementPreparer:
|
| 216 |
+
"""Prepares a Requirement"""
|
| 217 |
+
|
| 218 |
+
def __init__(
|
| 219 |
+
self,
|
| 220 |
+
build_dir: str,
|
| 221 |
+
download_dir: Optional[str],
|
| 222 |
+
src_dir: str,
|
| 223 |
+
build_isolation: bool,
|
| 224 |
+
check_build_deps: bool,
|
| 225 |
+
build_tracker: BuildTracker,
|
| 226 |
+
session: PipSession,
|
| 227 |
+
progress_bar: str,
|
| 228 |
+
finder: PackageFinder,
|
| 229 |
+
require_hashes: bool,
|
| 230 |
+
use_user_site: bool,
|
| 231 |
+
lazy_wheel: bool,
|
| 232 |
+
verbosity: int,
|
| 233 |
+
legacy_resolver: bool,
|
| 234 |
+
) -> None:
|
| 235 |
+
super().__init__()
|
| 236 |
+
|
| 237 |
+
self.src_dir = src_dir
|
| 238 |
+
self.build_dir = build_dir
|
| 239 |
+
self.build_tracker = build_tracker
|
| 240 |
+
self._session = session
|
| 241 |
+
self._download = Downloader(session, progress_bar)
|
| 242 |
+
self._batch_download = BatchDownloader(session, progress_bar)
|
| 243 |
+
self.finder = finder
|
| 244 |
+
|
| 245 |
+
# Where still-packed archives should be written to. If None, they are
|
| 246 |
+
# not saved, and are deleted immediately after unpacking.
|
| 247 |
+
self.download_dir = download_dir
|
| 248 |
+
|
| 249 |
+
# Is build isolation allowed?
|
| 250 |
+
self.build_isolation = build_isolation
|
| 251 |
+
|
| 252 |
+
# Should check build dependencies?
|
| 253 |
+
self.check_build_deps = check_build_deps
|
| 254 |
+
|
| 255 |
+
# Should hash-checking be required?
|
| 256 |
+
self.require_hashes = require_hashes
|
| 257 |
+
|
| 258 |
+
# Should install in user site-packages?
|
| 259 |
+
self.use_user_site = use_user_site
|
| 260 |
+
|
| 261 |
+
# Should wheels be downloaded lazily?
|
| 262 |
+
self.use_lazy_wheel = lazy_wheel
|
| 263 |
+
|
| 264 |
+
# How verbose should underlying tooling be?
|
| 265 |
+
self.verbosity = verbosity
|
| 266 |
+
|
| 267 |
+
# Are we using the legacy resolver?
|
| 268 |
+
self.legacy_resolver = legacy_resolver
|
| 269 |
+
|
| 270 |
+
# Memoized downloaded files, as mapping of url: path.
|
| 271 |
+
self._downloaded: Dict[str, str] = {}
|
| 272 |
+
|
| 273 |
+
# Previous "header" printed for a link-based InstallRequirement
|
| 274 |
+
self._previous_requirement_header = ("", "")
|
| 275 |
+
|
| 276 |
+
def _log_preparing_link(self, req: InstallRequirement) -> None:
|
| 277 |
+
"""Provide context for the requirement being prepared."""
|
| 278 |
+
if req.link.is_file and not req.is_wheel_from_cache:
|
| 279 |
+
message = "Processing %s"
|
| 280 |
+
information = str(display_path(req.link.file_path))
|
| 281 |
+
else:
|
| 282 |
+
message = "Collecting %s"
|
| 283 |
+
information = redact_auth_from_requirement(req.req) if req.req else str(req)
|
| 284 |
+
|
| 285 |
+
# If we used req.req, inject requirement source if available (this
|
| 286 |
+
# would already be included if we used req directly)
|
| 287 |
+
if req.req and req.comes_from:
|
| 288 |
+
if isinstance(req.comes_from, str):
|
| 289 |
+
comes_from: Optional[str] = req.comes_from
|
| 290 |
+
else:
|
| 291 |
+
comes_from = req.comes_from.from_path()
|
| 292 |
+
if comes_from:
|
| 293 |
+
information += f" (from {comes_from})"
|
| 294 |
+
|
| 295 |
+
if (message, information) != self._previous_requirement_header:
|
| 296 |
+
self._previous_requirement_header = (message, information)
|
| 297 |
+
logger.info(message, information)
|
| 298 |
+
|
| 299 |
+
if req.is_wheel_from_cache:
|
| 300 |
+
with indent_log():
|
| 301 |
+
logger.info("Using cached %s", req.link.filename)
|
| 302 |
+
|
| 303 |
+
def _ensure_link_req_src_dir(
|
| 304 |
+
self, req: InstallRequirement, parallel_builds: bool
|
| 305 |
+
) -> None:
|
| 306 |
+
"""Ensure source_dir of a linked InstallRequirement."""
|
| 307 |
+
# Since source_dir is only set for editable requirements.
|
| 308 |
+
if req.link.is_wheel:
|
| 309 |
+
# We don't need to unpack wheels, so no need for a source
|
| 310 |
+
# directory.
|
| 311 |
+
return
|
| 312 |
+
assert req.source_dir is None
|
| 313 |
+
if req.link.is_existing_dir():
|
| 314 |
+
# build local directories in-tree
|
| 315 |
+
req.source_dir = req.link.file_path
|
| 316 |
+
return
|
| 317 |
+
|
| 318 |
+
# We always delete unpacked sdists after pip runs.
|
| 319 |
+
req.ensure_has_source_dir(
|
| 320 |
+
self.build_dir,
|
| 321 |
+
autodelete=True,
|
| 322 |
+
parallel_builds=parallel_builds,
|
| 323 |
+
)
|
| 324 |
+
req.ensure_pristine_source_checkout()
|
| 325 |
+
|
| 326 |
+
def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes:
|
| 327 |
+
# By the time this is called, the requirement's link should have
|
| 328 |
+
# been checked so we can tell what kind of requirements req is
|
| 329 |
+
# and raise some more informative errors than otherwise.
|
| 330 |
+
# (For example, we can raise VcsHashUnsupported for a VCS URL
|
| 331 |
+
# rather than HashMissing.)
|
| 332 |
+
if not self.require_hashes:
|
| 333 |
+
return req.hashes(trust_internet=True)
|
| 334 |
+
|
| 335 |
+
# We could check these first 2 conditions inside unpack_url
|
| 336 |
+
# and save repetition of conditions, but then we would
|
| 337 |
+
# report less-useful error messages for unhashable
|
| 338 |
+
# requirements, complaining that there's no hash provided.
|
| 339 |
+
if req.link.is_vcs:
|
| 340 |
+
raise VcsHashUnsupported()
|
| 341 |
+
if req.link.is_existing_dir():
|
| 342 |
+
raise DirectoryUrlHashUnsupported()
|
| 343 |
+
|
| 344 |
+
# Unpinned packages are asking for trouble when a new version
|
| 345 |
+
# is uploaded. This isn't a security check, but it saves users
|
| 346 |
+
# a surprising hash mismatch in the future.
|
| 347 |
+
# file:/// URLs aren't pinnable, so don't complain about them
|
| 348 |
+
# not being pinned.
|
| 349 |
+
if not req.is_direct and not req.is_pinned:
|
| 350 |
+
raise HashUnpinned()
|
| 351 |
+
|
| 352 |
+
# If known-good hashes are missing for this requirement,
|
| 353 |
+
# shim it with a facade object that will provoke hash
|
| 354 |
+
# computation and then raise a HashMissing exception
|
| 355 |
+
# showing the user what the hash should be.
|
| 356 |
+
return req.hashes(trust_internet=False) or MissingHashes()
|
| 357 |
+
|
| 358 |
+
def _fetch_metadata_only(
|
| 359 |
+
self,
|
| 360 |
+
req: InstallRequirement,
|
| 361 |
+
) -> Optional[BaseDistribution]:
|
| 362 |
+
if self.legacy_resolver:
|
| 363 |
+
logger.debug(
|
| 364 |
+
"Metadata-only fetching is not used in the legacy resolver",
|
| 365 |
+
)
|
| 366 |
+
return None
|
| 367 |
+
if self.require_hashes:
|
| 368 |
+
logger.debug(
|
| 369 |
+
"Metadata-only fetching is not used as hash checking is required",
|
| 370 |
+
)
|
| 371 |
+
return None
|
| 372 |
+
# Try PEP 658 metadata first, then fall back to lazy wheel if unavailable.
|
| 373 |
+
return self._fetch_metadata_using_link_data_attr(
|
| 374 |
+
req
|
| 375 |
+
) or self._fetch_metadata_using_lazy_wheel(req.link)
|
| 376 |
+
|
| 377 |
+
def _fetch_metadata_using_link_data_attr(
|
| 378 |
+
self,
|
| 379 |
+
req: InstallRequirement,
|
| 380 |
+
) -> Optional[BaseDistribution]:
|
| 381 |
+
"""Fetch metadata from the data-dist-info-metadata attribute, if possible."""
|
| 382 |
+
# (1) Get the link to the metadata file, if provided by the backend.
|
| 383 |
+
metadata_link = req.link.metadata_link()
|
| 384 |
+
if metadata_link is None:
|
| 385 |
+
return None
|
| 386 |
+
assert req.req is not None
|
| 387 |
+
logger.verbose(
|
| 388 |
+
"Obtaining dependency information for %s from %s",
|
| 389 |
+
req.req,
|
| 390 |
+
metadata_link,
|
| 391 |
+
)
|
| 392 |
+
# (2) Download the contents of the METADATA file, separate from the dist itself.
|
| 393 |
+
metadata_file = get_http_url(
|
| 394 |
+
metadata_link,
|
| 395 |
+
self._download,
|
| 396 |
+
hashes=metadata_link.as_hashes(),
|
| 397 |
+
)
|
| 398 |
+
with open(metadata_file.path, "rb") as f:
|
| 399 |
+
metadata_contents = f.read()
|
| 400 |
+
# (3) Generate a dist just from those file contents.
|
| 401 |
+
metadata_dist = get_metadata_distribution(
|
| 402 |
+
metadata_contents,
|
| 403 |
+
req.link.filename,
|
| 404 |
+
req.req.name,
|
| 405 |
+
)
|
| 406 |
+
# (4) Ensure the Name: field from the METADATA file matches the name from the
|
| 407 |
+
# install requirement.
|
| 408 |
+
#
|
| 409 |
+
# NB: raw_name will fall back to the name from the install requirement if
|
| 410 |
+
# the Name: field is not present, but it's noted in the raw_name docstring
|
| 411 |
+
# that that should NEVER happen anyway.
|
| 412 |
+
if canonicalize_name(metadata_dist.raw_name) != canonicalize_name(req.req.name):
|
| 413 |
+
raise MetadataInconsistent(
|
| 414 |
+
req, "Name", req.req.name, metadata_dist.raw_name
|
| 415 |
+
)
|
| 416 |
+
return metadata_dist
|
| 417 |
+
|
| 418 |
+
def _fetch_metadata_using_lazy_wheel(
|
| 419 |
+
self,
|
| 420 |
+
link: Link,
|
| 421 |
+
) -> Optional[BaseDistribution]:
|
| 422 |
+
"""Fetch metadata using lazy wheel, if possible."""
|
| 423 |
+
# --use-feature=fast-deps must be provided.
|
| 424 |
+
if not self.use_lazy_wheel:
|
| 425 |
+
return None
|
| 426 |
+
if link.is_file or not link.is_wheel:
|
| 427 |
+
logger.debug(
|
| 428 |
+
"Lazy wheel is not used as %r does not point to a remote wheel",
|
| 429 |
+
link,
|
| 430 |
+
)
|
| 431 |
+
return None
|
| 432 |
+
|
| 433 |
+
wheel = Wheel(link.filename)
|
| 434 |
+
name = canonicalize_name(wheel.name)
|
| 435 |
+
logger.info(
|
| 436 |
+
"Obtaining dependency information from %s %s",
|
| 437 |
+
name,
|
| 438 |
+
wheel.version,
|
| 439 |
+
)
|
| 440 |
+
url = link.url.split("#", 1)[0]
|
| 441 |
+
try:
|
| 442 |
+
return dist_from_wheel_url(name, url, self._session)
|
| 443 |
+
except HTTPRangeRequestUnsupported:
|
| 444 |
+
logger.debug("%s does not support range requests", url)
|
| 445 |
+
return None
|
| 446 |
+
|
| 447 |
+
def _complete_partial_requirements(
|
| 448 |
+
self,
|
| 449 |
+
partially_downloaded_reqs: Iterable[InstallRequirement],
|
| 450 |
+
parallel_builds: bool = False,
|
| 451 |
+
) -> None:
|
| 452 |
+
"""Download any requirements which were only fetched by metadata."""
|
| 453 |
+
# Download to a temporary directory. These will be copied over as
|
| 454 |
+
# needed for downstream 'download', 'wheel', and 'install' commands.
|
| 455 |
+
temp_dir = TempDirectory(kind="unpack", globally_managed=True).path
|
| 456 |
+
|
| 457 |
+
# Map each link to the requirement that owns it. This allows us to set
|
| 458 |
+
# `req.local_file_path` on the appropriate requirement after passing
|
| 459 |
+
# all the links at once into BatchDownloader.
|
| 460 |
+
links_to_fully_download: Dict[Link, InstallRequirement] = {}
|
| 461 |
+
for req in partially_downloaded_reqs:
|
| 462 |
+
assert req.link
|
| 463 |
+
links_to_fully_download[req.link] = req
|
| 464 |
+
|
| 465 |
+
batch_download = self._batch_download(
|
| 466 |
+
links_to_fully_download.keys(),
|
| 467 |
+
temp_dir,
|
| 468 |
+
)
|
| 469 |
+
for link, (filepath, _) in batch_download:
|
| 470 |
+
logger.debug("Downloading link %s to %s", link, filepath)
|
| 471 |
+
req = links_to_fully_download[link]
|
| 472 |
+
# Record the downloaded file path so wheel reqs can extract a Distribution
|
| 473 |
+
# in .get_dist().
|
| 474 |
+
req.local_file_path = filepath
|
| 475 |
+
# Record that the file is downloaded so we don't do it again in
|
| 476 |
+
# _prepare_linked_requirement().
|
| 477 |
+
self._downloaded[req.link.url] = filepath
|
| 478 |
+
|
| 479 |
+
# If this is an sdist, we need to unpack it after downloading, but the
|
| 480 |
+
# .source_dir won't be set up until we are in _prepare_linked_requirement().
|
| 481 |
+
# Add the downloaded archive to the install requirement to unpack after
|
| 482 |
+
# preparing the source dir.
|
| 483 |
+
if not req.is_wheel:
|
| 484 |
+
req.needs_unpacked_archive(Path(filepath))
|
| 485 |
+
|
| 486 |
+
# This step is necessary to ensure all lazy wheels are processed
|
| 487 |
+
# successfully by the 'download', 'wheel', and 'install' commands.
|
| 488 |
+
for req in partially_downloaded_reqs:
|
| 489 |
+
self._prepare_linked_requirement(req, parallel_builds)
|
| 490 |
+
|
| 491 |
+
def prepare_linked_requirement(
|
| 492 |
+
self, req: InstallRequirement, parallel_builds: bool = False
|
| 493 |
+
) -> BaseDistribution:
|
| 494 |
+
"""Prepare a requirement to be obtained from req.link."""
|
| 495 |
+
assert req.link
|
| 496 |
+
self._log_preparing_link(req)
|
| 497 |
+
with indent_log():
|
| 498 |
+
# Check if the relevant file is already available
|
| 499 |
+
# in the download directory
|
| 500 |
+
file_path = None
|
| 501 |
+
if self.download_dir is not None and req.link.is_wheel:
|
| 502 |
+
hashes = self._get_linked_req_hashes(req)
|
| 503 |
+
file_path = _check_download_dir(
|
| 504 |
+
req.link,
|
| 505 |
+
self.download_dir,
|
| 506 |
+
hashes,
|
| 507 |
+
# When a locally built wheel has been found in cache, we don't warn
|
| 508 |
+
# about re-downloading when the already downloaded wheel hash does
|
| 509 |
+
# not match. This is because the hash must be checked against the
|
| 510 |
+
# original link, not the cached link. It that case the already
|
| 511 |
+
# downloaded file will be removed and re-fetched from cache (which
|
| 512 |
+
# implies a hash check against the cache entry's origin.json).
|
| 513 |
+
warn_on_hash_mismatch=not req.is_wheel_from_cache,
|
| 514 |
+
)
|
| 515 |
+
|
| 516 |
+
if file_path is not None:
|
| 517 |
+
# The file is already available, so mark it as downloaded
|
| 518 |
+
self._downloaded[req.link.url] = file_path
|
| 519 |
+
else:
|
| 520 |
+
# The file is not available, attempt to fetch only metadata
|
| 521 |
+
metadata_dist = self._fetch_metadata_only(req)
|
| 522 |
+
if metadata_dist is not None:
|
| 523 |
+
req.needs_more_preparation = True
|
| 524 |
+
return metadata_dist
|
| 525 |
+
|
| 526 |
+
# None of the optimizations worked, fully prepare the requirement
|
| 527 |
+
return self._prepare_linked_requirement(req, parallel_builds)
|
| 528 |
+
|
| 529 |
+
def prepare_linked_requirements_more(
|
| 530 |
+
self, reqs: Iterable[InstallRequirement], parallel_builds: bool = False
|
| 531 |
+
) -> None:
|
| 532 |
+
"""Prepare linked requirements more, if needed."""
|
| 533 |
+
reqs = [req for req in reqs if req.needs_more_preparation]
|
| 534 |
+
for req in reqs:
|
| 535 |
+
# Determine if any of these requirements were already downloaded.
|
| 536 |
+
if self.download_dir is not None and req.link.is_wheel:
|
| 537 |
+
hashes = self._get_linked_req_hashes(req)
|
| 538 |
+
file_path = _check_download_dir(req.link, self.download_dir, hashes)
|
| 539 |
+
if file_path is not None:
|
| 540 |
+
self._downloaded[req.link.url] = file_path
|
| 541 |
+
req.needs_more_preparation = False
|
| 542 |
+
|
| 543 |
+
# Prepare requirements we found were already downloaded for some
|
| 544 |
+
# reason. The other downloads will be completed separately.
|
| 545 |
+
partially_downloaded_reqs: List[InstallRequirement] = []
|
| 546 |
+
for req in reqs:
|
| 547 |
+
if req.needs_more_preparation:
|
| 548 |
+
partially_downloaded_reqs.append(req)
|
| 549 |
+
else:
|
| 550 |
+
self._prepare_linked_requirement(req, parallel_builds)
|
| 551 |
+
|
| 552 |
+
# TODO: separate this part out from RequirementPreparer when the v1
|
| 553 |
+
# resolver can be removed!
|
| 554 |
+
self._complete_partial_requirements(
|
| 555 |
+
partially_downloaded_reqs,
|
| 556 |
+
parallel_builds=parallel_builds,
|
| 557 |
+
)
|
| 558 |
+
|
| 559 |
+
def _prepare_linked_requirement(
|
| 560 |
+
self, req: InstallRequirement, parallel_builds: bool
|
| 561 |
+
) -> BaseDistribution:
|
| 562 |
+
assert req.link
|
| 563 |
+
link = req.link
|
| 564 |
+
|
| 565 |
+
hashes = self._get_linked_req_hashes(req)
|
| 566 |
+
|
| 567 |
+
if hashes and req.is_wheel_from_cache:
|
| 568 |
+
assert req.download_info is not None
|
| 569 |
+
assert link.is_wheel
|
| 570 |
+
assert link.is_file
|
| 571 |
+
# We need to verify hashes, and we have found the requirement in the cache
|
| 572 |
+
# of locally built wheels.
|
| 573 |
+
if (
|
| 574 |
+
isinstance(req.download_info.info, ArchiveInfo)
|
| 575 |
+
and req.download_info.info.hashes
|
| 576 |
+
and hashes.has_one_of(req.download_info.info.hashes)
|
| 577 |
+
):
|
| 578 |
+
# At this point we know the requirement was built from a hashable source
|
| 579 |
+
# artifact, and we verified that the cache entry's hash of the original
|
| 580 |
+
# artifact matches one of the hashes we expect. We don't verify hashes
|
| 581 |
+
# against the cached wheel, because the wheel is not the original.
|
| 582 |
+
hashes = None
|
| 583 |
+
else:
|
| 584 |
+
logger.warning(
|
| 585 |
+
"The hashes of the source archive found in cache entry "
|
| 586 |
+
"don't match, ignoring cached built wheel "
|
| 587 |
+
"and re-downloading source."
|
| 588 |
+
)
|
| 589 |
+
req.link = req.cached_wheel_source_link
|
| 590 |
+
link = req.link
|
| 591 |
+
|
| 592 |
+
self._ensure_link_req_src_dir(req, parallel_builds)
|
| 593 |
+
|
| 594 |
+
if link.is_existing_dir():
|
| 595 |
+
local_file = None
|
| 596 |
+
elif link.url not in self._downloaded:
|
| 597 |
+
try:
|
| 598 |
+
local_file = unpack_url(
|
| 599 |
+
link,
|
| 600 |
+
req.source_dir,
|
| 601 |
+
self._download,
|
| 602 |
+
self.verbosity,
|
| 603 |
+
self.download_dir,
|
| 604 |
+
hashes,
|
| 605 |
+
)
|
| 606 |
+
except NetworkConnectionError as exc:
|
| 607 |
+
raise InstallationError(
|
| 608 |
+
f"Could not install requirement {req} because of HTTP "
|
| 609 |
+
f"error {exc} for URL {link}"
|
| 610 |
+
)
|
| 611 |
+
else:
|
| 612 |
+
file_path = self._downloaded[link.url]
|
| 613 |
+
if hashes:
|
| 614 |
+
hashes.check_against_path(file_path)
|
| 615 |
+
local_file = File(file_path, content_type=None)
|
| 616 |
+
|
| 617 |
+
# If download_info is set, we got it from the wheel cache.
|
| 618 |
+
if req.download_info is None:
|
| 619 |
+
# Editables don't go through this function (see
|
| 620 |
+
# prepare_editable_requirement).
|
| 621 |
+
assert not req.editable
|
| 622 |
+
req.download_info = direct_url_from_link(link, req.source_dir)
|
| 623 |
+
# Make sure we have a hash in download_info. If we got it as part of the
|
| 624 |
+
# URL, it will have been verified and we can rely on it. Otherwise we
|
| 625 |
+
# compute it from the downloaded file.
|
| 626 |
+
# FIXME: https://github.com/pypa/pip/issues/11943
|
| 627 |
+
if (
|
| 628 |
+
isinstance(req.download_info.info, ArchiveInfo)
|
| 629 |
+
and not req.download_info.info.hashes
|
| 630 |
+
and local_file
|
| 631 |
+
):
|
| 632 |
+
hash = hash_file(local_file.path)[0].hexdigest()
|
| 633 |
+
# We populate info.hash for backward compatibility.
|
| 634 |
+
# This will automatically populate info.hashes.
|
| 635 |
+
req.download_info.info.hash = f"sha256={hash}"
|
| 636 |
+
|
| 637 |
+
# For use in later processing,
|
| 638 |
+
# preserve the file path on the requirement.
|
| 639 |
+
if local_file:
|
| 640 |
+
req.local_file_path = local_file.path
|
| 641 |
+
|
| 642 |
+
dist = _get_prepared_distribution(
|
| 643 |
+
req,
|
| 644 |
+
self.build_tracker,
|
| 645 |
+
self.finder,
|
| 646 |
+
self.build_isolation,
|
| 647 |
+
self.check_build_deps,
|
| 648 |
+
)
|
| 649 |
+
return dist
|
| 650 |
+
|
| 651 |
+
def save_linked_requirement(self, req: InstallRequirement) -> None:
|
| 652 |
+
assert self.download_dir is not None
|
| 653 |
+
assert req.link is not None
|
| 654 |
+
link = req.link
|
| 655 |
+
if link.is_vcs or (link.is_existing_dir() and req.editable):
|
| 656 |
+
# Make a .zip of the source_dir we already created.
|
| 657 |
+
req.archive(self.download_dir)
|
| 658 |
+
return
|
| 659 |
+
|
| 660 |
+
if link.is_existing_dir():
|
| 661 |
+
logger.debug(
|
| 662 |
+
"Not copying link to destination directory "
|
| 663 |
+
"since it is a directory: %s",
|
| 664 |
+
link,
|
| 665 |
+
)
|
| 666 |
+
return
|
| 667 |
+
if req.local_file_path is None:
|
| 668 |
+
# No distribution was downloaded for this requirement.
|
| 669 |
+
return
|
| 670 |
+
|
| 671 |
+
download_location = os.path.join(self.download_dir, link.filename)
|
| 672 |
+
if not os.path.exists(download_location):
|
| 673 |
+
shutil.copy(req.local_file_path, download_location)
|
| 674 |
+
download_path = display_path(download_location)
|
| 675 |
+
logger.info("Saved %s", download_path)
|
| 676 |
+
|
| 677 |
+
def prepare_editable_requirement(
|
| 678 |
+
self,
|
| 679 |
+
req: InstallRequirement,
|
| 680 |
+
) -> BaseDistribution:
|
| 681 |
+
"""Prepare an editable requirement."""
|
| 682 |
+
assert req.editable, "cannot prepare a non-editable req as editable"
|
| 683 |
+
|
| 684 |
+
logger.info("Obtaining %s", req)
|
| 685 |
+
|
| 686 |
+
with indent_log():
|
| 687 |
+
if self.require_hashes:
|
| 688 |
+
raise InstallationError(
|
| 689 |
+
f"The editable requirement {req} cannot be installed when "
|
| 690 |
+
"requiring hashes, because there is no single file to "
|
| 691 |
+
"hash."
|
| 692 |
+
)
|
| 693 |
+
req.ensure_has_source_dir(self.src_dir)
|
| 694 |
+
req.update_editable()
|
| 695 |
+
assert req.source_dir
|
| 696 |
+
req.download_info = direct_url_for_editable(req.unpacked_source_directory)
|
| 697 |
+
|
| 698 |
+
dist = _get_prepared_distribution(
|
| 699 |
+
req,
|
| 700 |
+
self.build_tracker,
|
| 701 |
+
self.finder,
|
| 702 |
+
self.build_isolation,
|
| 703 |
+
self.check_build_deps,
|
| 704 |
+
)
|
| 705 |
+
|
| 706 |
+
req.check_if_exists(self.use_user_site)
|
| 707 |
+
|
| 708 |
+
return dist
|
| 709 |
+
|
| 710 |
+
def prepare_installed_requirement(
|
| 711 |
+
self,
|
| 712 |
+
req: InstallRequirement,
|
| 713 |
+
skip_reason: str,
|
| 714 |
+
) -> BaseDistribution:
|
| 715 |
+
"""Prepare an already-installed requirement."""
|
| 716 |
+
assert req.satisfied_by, "req should have been satisfied but isn't"
|
| 717 |
+
assert skip_reason is not None, (
|
| 718 |
+
"did not get skip reason skipped but req.satisfied_by "
|
| 719 |
+
f"is set to {req.satisfied_by}"
|
| 720 |
+
)
|
| 721 |
+
logger.info(
|
| 722 |
+
"Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version
|
| 723 |
+
)
|
| 724 |
+
with indent_log():
|
| 725 |
+
if self.require_hashes:
|
| 726 |
+
logger.debug(
|
| 727 |
+
"Since it is already installed, we are trusting this "
|
| 728 |
+
"package without checking its hash. To ensure a "
|
| 729 |
+
"completely repeatable environment, install into an "
|
| 730 |
+
"empty virtualenv."
|
| 731 |
+
)
|
| 732 |
+
return InstalledDistribution(req).get_metadata_distribution()
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/req/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (2.32 kB). View file
|
|
|
evalkit_llava/lib/python3.10/site-packages/pip/_internal/req/__pycache__/constructors.cpython-310.pyc
ADDED
|
Binary file (13.9 kB). View file
|
|
|