diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/distributions/__init__.py b/vllm/lib/python3.10/site-packages/pip/_internal/distributions/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..9a89a838b9a5cb264e9ae9d269fbedca6e2d6333
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/distributions/__init__.py
@@ -0,0 +1,21 @@
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.distributions.sdist import SourceDistribution
+from pip._internal.distributions.wheel import WheelDistribution
+from pip._internal.req.req_install import InstallRequirement
+
+
+def make_distribution_for_install_requirement(
+ install_req: InstallRequirement,
+) -> AbstractDistribution:
+ """Returns a Distribution for the given InstallRequirement"""
+ # Editable requirements will always be source distributions. They use the
+ # legacy logic until we create a modern standard for them.
+ if install_req.editable:
+ return SourceDistribution(install_req)
+
+ # If it's a wheel, it's a WheelDistribution
+ if install_req.is_wheel:
+ return WheelDistribution(install_req)
+
+ # Otherwise, a SourceDistribution
+ return SourceDistribution(install_req)
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..11074be79a058ed5cbf76573f52c104536542061
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/base.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/base.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2325d340b0c75cc8f8b0597051bbfe85485da382
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/base.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e2f6661032e632b3ad29e634b135cc3f14a134be
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bbabbf12a89350529ba72ef6c2d400c710fed761
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0a544eec96fb94f4370c695b2f1ba1f2a8d752fa
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/distributions/base.py b/vllm/lib/python3.10/site-packages/pip/_internal/distributions/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..6e4d0c91a901c46ab20be813af083cd19809318a
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/distributions/base.py
@@ -0,0 +1,53 @@
+import abc
+from typing import TYPE_CHECKING, Optional
+
+from pip._internal.metadata.base import BaseDistribution
+from pip._internal.req import InstallRequirement
+
+if TYPE_CHECKING:
+ from pip._internal.index.package_finder import PackageFinder
+
+
+class AbstractDistribution(metaclass=abc.ABCMeta):
+ """A base class for handling installable artifacts.
+
+ The requirements for anything installable are as follows:
+
+ - we must be able to determine the requirement name
+ (or we can't correctly handle the non-upgrade case).
+
+ - for packages with setup requirements, we must also be able
+ to determine their requirements without installing additional
+ packages (for the same reason as run-time dependencies)
+
+ - we must be able to create a Distribution object exposing the
+ above metadata.
+
+ - if we need to do work in the build tracker, we must be able to generate a unique
+ string to identify the requirement in the build tracker.
+ """
+
+ def __init__(self, req: InstallRequirement) -> None:
+ super().__init__()
+ self.req = req
+
+ @abc.abstractproperty
+ def build_tracker_id(self) -> Optional[str]:
+ """A string that uniquely identifies this requirement to the build tracker.
+
+ If None, then this dist has no work to do in the build tracker, and
+ ``.prepare_distribution_metadata()`` will not be called."""
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def get_metadata_distribution(self) -> BaseDistribution:
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def prepare_distribution_metadata(
+ self,
+ finder: "PackageFinder",
+ build_isolation: bool,
+ check_build_deps: bool,
+ ) -> None:
+ raise NotImplementedError()
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/distributions/installed.py b/vllm/lib/python3.10/site-packages/pip/_internal/distributions/installed.py
new file mode 100644
index 0000000000000000000000000000000000000000..ab8d53be7408626719c27aa29fdc2e143b7c380a
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/distributions/installed.py
@@ -0,0 +1,29 @@
+from typing import Optional
+
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import BaseDistribution
+
+
+class InstalledDistribution(AbstractDistribution):
+ """Represents an installed package.
+
+ This does not need any preparation as the required information has already
+ been computed.
+ """
+
+ @property
+ def build_tracker_id(self) -> Optional[str]:
+ return None
+
+ def get_metadata_distribution(self) -> BaseDistribution:
+ assert self.req.satisfied_by is not None, "not actually installed"
+ return self.req.satisfied_by
+
+ def prepare_distribution_metadata(
+ self,
+ finder: PackageFinder,
+ build_isolation: bool,
+ check_build_deps: bool,
+ ) -> None:
+ pass
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/distributions/sdist.py b/vllm/lib/python3.10/site-packages/pip/_internal/distributions/sdist.py
new file mode 100644
index 0000000000000000000000000000000000000000..28ea5cea16cdf9b740809553cbf2d3bf8d626e1e
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/distributions/sdist.py
@@ -0,0 +1,158 @@
+import logging
+from typing import TYPE_CHECKING, Iterable, Optional, Set, Tuple
+
+from pip._internal.build_env import BuildEnvironment
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.exceptions import InstallationError
+from pip._internal.metadata import BaseDistribution
+from pip._internal.utils.subprocess import runner_with_spinner_message
+
+if TYPE_CHECKING:
+ from pip._internal.index.package_finder import PackageFinder
+
+logger = logging.getLogger(__name__)
+
+
+class SourceDistribution(AbstractDistribution):
+ """Represents a source distribution.
+
+ The preparation step for these needs metadata for the packages to be
+ generated, either using PEP 517 or using the legacy `setup.py egg_info`.
+ """
+
+ @property
+ def build_tracker_id(self) -> Optional[str]:
+ """Identify this requirement uniquely by its link."""
+ assert self.req.link
+ return self.req.link.url_without_fragment
+
+ def get_metadata_distribution(self) -> BaseDistribution:
+ return self.req.get_dist()
+
+ def prepare_distribution_metadata(
+ self,
+ finder: "PackageFinder",
+ build_isolation: bool,
+ check_build_deps: bool,
+ ) -> None:
+ # Load pyproject.toml, to determine whether PEP 517 is to be used
+ self.req.load_pyproject_toml()
+
+ # Set up the build isolation, if this requirement should be isolated
+ should_isolate = self.req.use_pep517 and build_isolation
+ if should_isolate:
+ # Setup an isolated environment and install the build backend static
+ # requirements in it.
+ self._prepare_build_backend(finder)
+ # Check that if the requirement is editable, it either supports PEP 660 or
+ # has a setup.py or a setup.cfg. This cannot be done earlier because we need
+ # to setup the build backend to verify it supports build_editable, nor can
+ # it be done later, because we want to avoid installing build requirements
+ # needlessly. Doing it here also works around setuptools generating
+ # UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory
+ # without setup.py nor setup.cfg.
+ self.req.isolated_editable_sanity_check()
+ # Install the dynamic build requirements.
+ self._install_build_reqs(finder)
+ # Check if the current environment provides build dependencies
+ should_check_deps = self.req.use_pep517 and check_build_deps
+ if should_check_deps:
+ pyproject_requires = self.req.pyproject_requires
+ assert pyproject_requires is not None
+ conflicting, missing = self.req.build_env.check_requirements(
+ pyproject_requires
+ )
+ if conflicting:
+ self._raise_conflicts("the backend dependencies", conflicting)
+ if missing:
+ self._raise_missing_reqs(missing)
+ self.req.prepare_metadata()
+
+ def _prepare_build_backend(self, finder: "PackageFinder") -> None:
+ # Isolate in a BuildEnvironment and install the build-time
+ # requirements.
+ pyproject_requires = self.req.pyproject_requires
+ assert pyproject_requires is not None
+
+ self.req.build_env = BuildEnvironment()
+ self.req.build_env.install_requirements(
+ finder, pyproject_requires, "overlay", kind="build dependencies"
+ )
+ conflicting, missing = self.req.build_env.check_requirements(
+ self.req.requirements_to_check
+ )
+ if conflicting:
+ self._raise_conflicts("PEP 517/518 supported requirements", conflicting)
+ if missing:
+ logger.warning(
+ "Missing build requirements in pyproject.toml for %s.",
+ self.req,
+ )
+ logger.warning(
+ "The project does not specify a build backend, and "
+ "pip cannot fall back to setuptools without %s.",
+ " and ".join(map(repr, sorted(missing))),
+ )
+
+ def _get_build_requires_wheel(self) -> Iterable[str]:
+ with self.req.build_env:
+ runner = runner_with_spinner_message("Getting requirements to build wheel")
+ backend = self.req.pep517_backend
+ assert backend is not None
+ with backend.subprocess_runner(runner):
+ return backend.get_requires_for_build_wheel()
+
+ def _get_build_requires_editable(self) -> Iterable[str]:
+ with self.req.build_env:
+ runner = runner_with_spinner_message(
+ "Getting requirements to build editable"
+ )
+ backend = self.req.pep517_backend
+ assert backend is not None
+ with backend.subprocess_runner(runner):
+ return backend.get_requires_for_build_editable()
+
+ def _install_build_reqs(self, finder: "PackageFinder") -> None:
+ # Install any extra build dependencies that the backend requests.
+ # This must be done in a second pass, as the pyproject.toml
+ # dependencies must be installed before we can call the backend.
+ if (
+ self.req.editable
+ and self.req.permit_editable_wheels
+ and self.req.supports_pyproject_editable
+ ):
+ build_reqs = self._get_build_requires_editable()
+ else:
+ build_reqs = self._get_build_requires_wheel()
+ conflicting, missing = self.req.build_env.check_requirements(build_reqs)
+ if conflicting:
+ self._raise_conflicts("the backend dependencies", conflicting)
+ self.req.build_env.install_requirements(
+ finder, missing, "normal", kind="backend dependencies"
+ )
+
+ def _raise_conflicts(
+ self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]]
+ ) -> None:
+ format_string = (
+ "Some build dependencies for {requirement} "
+ "conflict with {conflicting_with}: {description}."
+ )
+ error_message = format_string.format(
+ requirement=self.req,
+ conflicting_with=conflicting_with,
+ description=", ".join(
+ f"{installed} is incompatible with {wanted}"
+ for installed, wanted in sorted(conflicting_reqs)
+ ),
+ )
+ raise InstallationError(error_message)
+
+ def _raise_missing_reqs(self, missing: Set[str]) -> None:
+ format_string = (
+ "Some build dependencies for {requirement} are missing: {missing}."
+ )
+ error_message = format_string.format(
+ requirement=self.req, missing=", ".join(map(repr, sorted(missing)))
+ )
+ raise InstallationError(error_message)
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/distributions/wheel.py b/vllm/lib/python3.10/site-packages/pip/_internal/distributions/wheel.py
new file mode 100644
index 0000000000000000000000000000000000000000..bfadd39dcb77dfdaa2cca24e8a6db7e5beac181e
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/distributions/wheel.py
@@ -0,0 +1,42 @@
+from typing import TYPE_CHECKING, Optional
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.distributions.base import AbstractDistribution
+from pip._internal.metadata import (
+ BaseDistribution,
+ FilesystemWheel,
+ get_wheel_distribution,
+)
+
+if TYPE_CHECKING:
+ from pip._internal.index.package_finder import PackageFinder
+
+
+class WheelDistribution(AbstractDistribution):
+ """Represents a wheel distribution.
+
+ This does not need any preparation as wheels can be directly unpacked.
+ """
+
+ @property
+ def build_tracker_id(self) -> Optional[str]:
+ return None
+
+ def get_metadata_distribution(self) -> BaseDistribution:
+ """Loads the metadata from the wheel file into memory and returns a
+ Distribution that uses it, not relying on the wheel file or
+ requirement.
+ """
+ assert self.req.local_file_path, "Set as part of preparation during download"
+ assert self.req.name, "Wheels are never unnamed"
+ wheel = FilesystemWheel(self.req.local_file_path)
+ return get_wheel_distribution(wheel, canonicalize_name(self.req.name))
+
+ def prepare_distribution_metadata(
+ self,
+ finder: "PackageFinder",
+ build_isolation: bool,
+ check_build_deps: bool,
+ ) -> None:
+ pass
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/__init__.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2780ef6616404b51d7e5c687633cb11d6698435f
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/__init__.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/candidate.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/candidate.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e7f9a6430efb973b4fefec8d44de6d94d8340219
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/candidate.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..80a494a90c4b1e8d5e525f8be29e96b1ea0f7d69
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/format_control.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/format_control.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cfddecab8119b93808e647880cc6f2b71a8af49d
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/format_control.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/link.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/link.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3cb753fce50f99d661a04bc1e8495d6b16f760bc
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/link.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/scheme.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/scheme.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8b2e98661547a201c25e79d810f52d12839253ce
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/scheme.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bd4a489d6ea6fbadc24615f810fa159788c29686
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..aa24cbf941b55335e4ed2f01991018977ca2e6e9
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/target_python.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/target_python.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..341e3ffe57925dc84d3175c2321a737d90f13dfc
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/target_python.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/wheel.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/wheel.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fc7d050dcdafde86f6b2ffc8d89365dc81c2a4a8
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/wheel.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/models/direct_url.py b/vllm/lib/python3.10/site-packages/pip/_internal/models/direct_url.py
new file mode 100644
index 0000000000000000000000000000000000000000..fc5ec8d4aa9b02b7264f7a5a0222e7e1fe215ad0
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/models/direct_url.py
@@ -0,0 +1,224 @@
+""" PEP 610 """
+
+import json
+import re
+import urllib.parse
+from dataclasses import dataclass
+from typing import Any, ClassVar, Dict, Iterable, Optional, Type, TypeVar, Union
+
+__all__ = [
+ "DirectUrl",
+ "DirectUrlValidationError",
+ "DirInfo",
+ "ArchiveInfo",
+ "VcsInfo",
+]
+
+T = TypeVar("T")
+
+DIRECT_URL_METADATA_NAME = "direct_url.json"
+ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$")
+
+
+class DirectUrlValidationError(Exception):
+ pass
+
+
+def _get(
+ d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
+) -> Optional[T]:
+ """Get value from dictionary and verify expected type."""
+ if key not in d:
+ return default
+ value = d[key]
+ if not isinstance(value, expected_type):
+ raise DirectUrlValidationError(
+ f"{value!r} has unexpected type for {key} (expected {expected_type})"
+ )
+ return value
+
+
+def _get_required(
+ d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
+) -> T:
+ value = _get(d, expected_type, key, default)
+ if value is None:
+ raise DirectUrlValidationError(f"{key} must have a value")
+ return value
+
+
+def _exactly_one_of(infos: Iterable[Optional["InfoType"]]) -> "InfoType":
+ infos = [info for info in infos if info is not None]
+ if not infos:
+ raise DirectUrlValidationError(
+ "missing one of archive_info, dir_info, vcs_info"
+ )
+ if len(infos) > 1:
+ raise DirectUrlValidationError(
+ "more than one of archive_info, dir_info, vcs_info"
+ )
+ assert infos[0] is not None
+ return infos[0]
+
+
+def _filter_none(**kwargs: Any) -> Dict[str, Any]:
+ """Make dict excluding None values."""
+ return {k: v for k, v in kwargs.items() if v is not None}
+
+
+@dataclass
+class VcsInfo:
+ name: ClassVar = "vcs_info"
+
+ vcs: str
+ commit_id: str
+ requested_revision: Optional[str] = None
+
+ @classmethod
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]:
+ if d is None:
+ return None
+ return cls(
+ vcs=_get_required(d, str, "vcs"),
+ commit_id=_get_required(d, str, "commit_id"),
+ requested_revision=_get(d, str, "requested_revision"),
+ )
+
+ def _to_dict(self) -> Dict[str, Any]:
+ return _filter_none(
+ vcs=self.vcs,
+ requested_revision=self.requested_revision,
+ commit_id=self.commit_id,
+ )
+
+
+class ArchiveInfo:
+ name = "archive_info"
+
+ def __init__(
+ self,
+ hash: Optional[str] = None,
+ hashes: Optional[Dict[str, str]] = None,
+ ) -> None:
+ # set hashes before hash, since the hash setter will further populate hashes
+ self.hashes = hashes
+ self.hash = hash
+
+ @property
+ def hash(self) -> Optional[str]:
+ return self._hash
+
+ @hash.setter
+ def hash(self, value: Optional[str]) -> None:
+ if value is not None:
+ # Auto-populate the hashes key to upgrade to the new format automatically.
+ # We don't back-populate the legacy hash key from hashes.
+ try:
+ hash_name, hash_value = value.split("=", 1)
+ except ValueError:
+ raise DirectUrlValidationError(
+ f"invalid archive_info.hash format: {value!r}"
+ )
+ if self.hashes is None:
+ self.hashes = {hash_name: hash_value}
+ elif hash_name not in self.hashes:
+ self.hashes = self.hashes.copy()
+ self.hashes[hash_name] = hash_value
+ self._hash = value
+
+ @classmethod
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]:
+ if d is None:
+ return None
+ return cls(hash=_get(d, str, "hash"), hashes=_get(d, dict, "hashes"))
+
+ def _to_dict(self) -> Dict[str, Any]:
+ return _filter_none(hash=self.hash, hashes=self.hashes)
+
+
+@dataclass
+class DirInfo:
+ name: ClassVar = "dir_info"
+
+ editable: bool = False
+
+ @classmethod
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]:
+ if d is None:
+ return None
+ return cls(editable=_get_required(d, bool, "editable", default=False))
+
+ def _to_dict(self) -> Dict[str, Any]:
+ return _filter_none(editable=self.editable or None)
+
+
+InfoType = Union[ArchiveInfo, DirInfo, VcsInfo]
+
+
+@dataclass
+class DirectUrl:
+ url: str
+ info: InfoType
+ subdirectory: Optional[str] = None
+
+ def _remove_auth_from_netloc(self, netloc: str) -> str:
+ if "@" not in netloc:
+ return netloc
+ user_pass, netloc_no_user_pass = netloc.split("@", 1)
+ if (
+ isinstance(self.info, VcsInfo)
+ and self.info.vcs == "git"
+ and user_pass == "git"
+ ):
+ return netloc
+ if ENV_VAR_RE.match(user_pass):
+ return netloc
+ return netloc_no_user_pass
+
+ @property
+ def redacted_url(self) -> str:
+ """url with user:password part removed unless it is formed with
+ environment variables as specified in PEP 610, or it is ``git``
+ in the case of a git URL.
+ """
+ purl = urllib.parse.urlsplit(self.url)
+ netloc = self._remove_auth_from_netloc(purl.netloc)
+ surl = urllib.parse.urlunsplit(
+ (purl.scheme, netloc, purl.path, purl.query, purl.fragment)
+ )
+ return surl
+
+ def validate(self) -> None:
+ self.from_dict(self.to_dict())
+
+ @classmethod
+ def from_dict(cls, d: Dict[str, Any]) -> "DirectUrl":
+ return DirectUrl(
+ url=_get_required(d, str, "url"),
+ subdirectory=_get(d, str, "subdirectory"),
+ info=_exactly_one_of(
+ [
+ ArchiveInfo._from_dict(_get(d, dict, "archive_info")),
+ DirInfo._from_dict(_get(d, dict, "dir_info")),
+ VcsInfo._from_dict(_get(d, dict, "vcs_info")),
+ ]
+ ),
+ )
+
+ def to_dict(self) -> Dict[str, Any]:
+ res = _filter_none(
+ url=self.redacted_url,
+ subdirectory=self.subdirectory,
+ )
+ res[self.info.name] = self.info._to_dict()
+ return res
+
+ @classmethod
+ def from_json(cls, s: str) -> "DirectUrl":
+ return cls.from_dict(json.loads(s))
+
+ def to_json(self) -> str:
+ return json.dumps(self.to_dict(), sort_keys=True)
+
+ def is_local_editable(self) -> bool:
+ return isinstance(self.info, DirInfo) and self.info.editable
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/models/link.py b/vllm/lib/python3.10/site-packages/pip/_internal/models/link.py
new file mode 100644
index 0000000000000000000000000000000000000000..27ad016090c565af4375d9a236d363c2be62532c
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/models/link.py
@@ -0,0 +1,604 @@
+import functools
+import itertools
+import logging
+import os
+import posixpath
+import re
+import urllib.parse
+from dataclasses import dataclass
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Dict,
+ List,
+ Mapping,
+ NamedTuple,
+ Optional,
+ Tuple,
+ Union,
+)
+
+from pip._internal.utils.deprecation import deprecated
+from pip._internal.utils.filetypes import WHEEL_EXTENSION
+from pip._internal.utils.hashes import Hashes
+from pip._internal.utils.misc import (
+ pairwise,
+ redact_auth_from_url,
+ split_auth_from_netloc,
+ splitext,
+)
+from pip._internal.utils.urls import path_to_url, url_to_path
+
+if TYPE_CHECKING:
+ from pip._internal.index.collector import IndexContent
+
+logger = logging.getLogger(__name__)
+
+
+# Order matters, earlier hashes have a precedence over later hashes for what
+# we will pick to use.
+_SUPPORTED_HASHES = ("sha512", "sha384", "sha256", "sha224", "sha1", "md5")
+
+
+@dataclass(frozen=True)
+class LinkHash:
+ """Links to content may have embedded hash values. This class parses those.
+
+ `name` must be any member of `_SUPPORTED_HASHES`.
+
+ This class can be converted to and from `ArchiveInfo`. While ArchiveInfo intends to
+ be JSON-serializable to conform to PEP 610, this class contains the logic for
+ parsing a hash name and value for correctness, and then checking whether that hash
+ conforms to a schema with `.is_hash_allowed()`."""
+
+ name: str
+ value: str
+
+ _hash_url_fragment_re = re.compile(
+ # NB: we do not validate that the second group (.*) is a valid hex
+ # digest. Instead, we simply keep that string in this class, and then check it
+ # against Hashes when hash-checking is needed. This is easier to debug than
+ # proactively discarding an invalid hex digest, as we handle incorrect hashes
+ # and malformed hashes in the same place.
+ r"[#&]({choices})=([^&]*)".format(
+ choices="|".join(re.escape(hash_name) for hash_name in _SUPPORTED_HASHES)
+ ),
+ )
+
+ def __post_init__(self) -> None:
+ assert self.name in _SUPPORTED_HASHES
+
+ @classmethod
+ @functools.lru_cache(maxsize=None)
+ def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]:
+ """Search a string for a checksum algorithm name and encoded output value."""
+ match = cls._hash_url_fragment_re.search(url)
+ if match is None:
+ return None
+ name, value = match.groups()
+ return cls(name=name, value=value)
+
+ def as_dict(self) -> Dict[str, str]:
+ return {self.name: self.value}
+
+ def as_hashes(self) -> Hashes:
+ """Return a Hashes instance which checks only for the current hash."""
+ return Hashes({self.name: [self.value]})
+
+ def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
+ """
+ Return True if the current hash is allowed by `hashes`.
+ """
+ if hashes is None:
+ return False
+ return hashes.is_hash_allowed(self.name, hex_digest=self.value)
+
+
+@dataclass(frozen=True)
+class MetadataFile:
+ """Information about a core metadata file associated with a distribution."""
+
+ hashes: Optional[Dict[str, str]]
+
+ def __post_init__(self) -> None:
+ if self.hashes is not None:
+ assert all(name in _SUPPORTED_HASHES for name in self.hashes)
+
+
+def supported_hashes(hashes: Optional[Dict[str, str]]) -> Optional[Dict[str, str]]:
+ # Remove any unsupported hash types from the mapping. If this leaves no
+ # supported hashes, return None
+ if hashes is None:
+ return None
+ hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES}
+ if not hashes:
+ return None
+ return hashes
+
+
+def _clean_url_path_part(part: str) -> str:
+ """
+ Clean a "part" of a URL path (i.e. after splitting on "@" characters).
+ """
+ # We unquote prior to quoting to make sure nothing is double quoted.
+ return urllib.parse.quote(urllib.parse.unquote(part))
+
+
+def _clean_file_url_path(part: str) -> str:
+ """
+ Clean the first part of a URL path that corresponds to a local
+ filesystem path (i.e. the first part after splitting on "@" characters).
+ """
+ # We unquote prior to quoting to make sure nothing is double quoted.
+ # Also, on Windows the path part might contain a drive letter which
+ # should not be quoted. On Linux where drive letters do not
+ # exist, the colon should be quoted. We rely on urllib.request
+ # to do the right thing here.
+ return urllib.request.pathname2url(urllib.request.url2pathname(part))
+
+
+# percent-encoded: /
+_reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE)
+
+
+def _clean_url_path(path: str, is_local_path: bool) -> str:
+ """
+ Clean the path portion of a URL.
+ """
+ if is_local_path:
+ clean_func = _clean_file_url_path
+ else:
+ clean_func = _clean_url_path_part
+
+ # Split on the reserved characters prior to cleaning so that
+ # revision strings in VCS URLs are properly preserved.
+ parts = _reserved_chars_re.split(path)
+
+ cleaned_parts = []
+ for to_clean, reserved in pairwise(itertools.chain(parts, [""])):
+ cleaned_parts.append(clean_func(to_clean))
+ # Normalize %xx escapes (e.g. %2f -> %2F)
+ cleaned_parts.append(reserved.upper())
+
+ return "".join(cleaned_parts)
+
+
+def _ensure_quoted_url(url: str) -> str:
+ """
+ Make sure a link is fully quoted.
+ For example, if ' ' occurs in the URL, it will be replaced with "%20",
+ and without double-quoting other characters.
+ """
+ # Split the URL into parts according to the general structure
+ # `scheme://netloc/path?query#fragment`.
+ result = urllib.parse.urlsplit(url)
+ # If the netloc is empty, then the URL refers to a local filesystem path.
+ is_local_path = not result.netloc
+ path = _clean_url_path(result.path, is_local_path=is_local_path)
+ return urllib.parse.urlunsplit(result._replace(path=path))
+
+
+def _absolute_link_url(base_url: str, url: str) -> str:
+ """
+ A faster implementation of urllib.parse.urljoin with a shortcut
+ for absolute http/https URLs.
+ """
+ if url.startswith(("https://", "http://")):
+ return url
+ else:
+ return urllib.parse.urljoin(base_url, url)
+
+
+@functools.total_ordering
+class Link:
+ """Represents a parsed link from a Package Index's simple URL"""
+
+ __slots__ = [
+ "_parsed_url",
+ "_url",
+ "_path",
+ "_hashes",
+ "comes_from",
+ "requires_python",
+ "yanked_reason",
+ "metadata_file_data",
+ "cache_link_parsing",
+ "egg_fragment",
+ ]
+
+ def __init__(
+ self,
+ url: str,
+ comes_from: Optional[Union[str, "IndexContent"]] = None,
+ requires_python: Optional[str] = None,
+ yanked_reason: Optional[str] = None,
+ metadata_file_data: Optional[MetadataFile] = None,
+ cache_link_parsing: bool = True,
+ hashes: Optional[Mapping[str, str]] = None,
+ ) -> None:
+ """
+ :param url: url of the resource pointed to (href of the link)
+ :param comes_from: instance of IndexContent where the link was found,
+ or string.
+ :param requires_python: String containing the `Requires-Python`
+ metadata field, specified in PEP 345. This may be specified by
+ a data-requires-python attribute in the HTML link tag, as
+ described in PEP 503.
+ :param yanked_reason: the reason the file has been yanked, if the
+ file has been yanked, or None if the file hasn't been yanked.
+ This is the value of the "data-yanked" attribute, if present, in
+ a simple repository HTML link. If the file has been yanked but
+ no reason was provided, this should be the empty string. See
+ PEP 592 for more information and the specification.
+ :param metadata_file_data: the metadata attached to the file, or None if
+ no such metadata is provided. This argument, if not None, indicates
+ that a separate metadata file exists, and also optionally supplies
+ hashes for that file.
+ :param cache_link_parsing: A flag that is used elsewhere to determine
+ whether resources retrieved from this link should be cached. PyPI
+ URLs should generally have this set to False, for example.
+ :param hashes: A mapping of hash names to digests to allow us to
+ determine the validity of a download.
+ """
+
+ # The comes_from, requires_python, and metadata_file_data arguments are
+ # only used by classmethods of this class, and are not used in client
+ # code directly.
+
+ # url can be a UNC windows share
+ if url.startswith("\\\\"):
+ url = path_to_url(url)
+
+ self._parsed_url = urllib.parse.urlsplit(url)
+ # Store the url as a private attribute to prevent accidentally
+ # trying to set a new value.
+ self._url = url
+ # The .path property is hot, so calculate its value ahead of time.
+ self._path = urllib.parse.unquote(self._parsed_url.path)
+
+ link_hash = LinkHash.find_hash_url_fragment(url)
+ hashes_from_link = {} if link_hash is None else link_hash.as_dict()
+ if hashes is None:
+ self._hashes = hashes_from_link
+ else:
+ self._hashes = {**hashes, **hashes_from_link}
+
+ self.comes_from = comes_from
+ self.requires_python = requires_python if requires_python else None
+ self.yanked_reason = yanked_reason
+ self.metadata_file_data = metadata_file_data
+
+ self.cache_link_parsing = cache_link_parsing
+ self.egg_fragment = self._egg_fragment()
+
+ @classmethod
+ def from_json(
+ cls,
+ file_data: Dict[str, Any],
+ page_url: str,
+ ) -> Optional["Link"]:
+ """
+ Convert an pypi json document from a simple repository page into a Link.
+ """
+ file_url = file_data.get("url")
+ if file_url is None:
+ return None
+
+ url = _ensure_quoted_url(_absolute_link_url(page_url, file_url))
+ pyrequire = file_data.get("requires-python")
+ yanked_reason = file_data.get("yanked")
+ hashes = file_data.get("hashes", {})
+
+ # PEP 714: Indexes must use the name core-metadata, but
+ # clients should support the old name as a fallback for compatibility.
+ metadata_info = file_data.get("core-metadata")
+ if metadata_info is None:
+ metadata_info = file_data.get("dist-info-metadata")
+
+ # The metadata info value may be a boolean, or a dict of hashes.
+ if isinstance(metadata_info, dict):
+ # The file exists, and hashes have been supplied
+ metadata_file_data = MetadataFile(supported_hashes(metadata_info))
+ elif metadata_info:
+ # The file exists, but there are no hashes
+ metadata_file_data = MetadataFile(None)
+ else:
+ # False or not present: the file does not exist
+ metadata_file_data = None
+
+ # The Link.yanked_reason expects an empty string instead of a boolean.
+ if yanked_reason and not isinstance(yanked_reason, str):
+ yanked_reason = ""
+ # The Link.yanked_reason expects None instead of False.
+ elif not yanked_reason:
+ yanked_reason = None
+
+ return cls(
+ url,
+ comes_from=page_url,
+ requires_python=pyrequire,
+ yanked_reason=yanked_reason,
+ hashes=hashes,
+ metadata_file_data=metadata_file_data,
+ )
+
+ @classmethod
+ def from_element(
+ cls,
+ anchor_attribs: Dict[str, Optional[str]],
+ page_url: str,
+ base_url: str,
+ ) -> Optional["Link"]:
+ """
+ Convert an anchor element's attributes in a simple repository page to a Link.
+ """
+ href = anchor_attribs.get("href")
+ if not href:
+ return None
+
+ url = _ensure_quoted_url(_absolute_link_url(base_url, href))
+ pyrequire = anchor_attribs.get("data-requires-python")
+ yanked_reason = anchor_attribs.get("data-yanked")
+
+ # PEP 714: Indexes must use the name data-core-metadata, but
+ # clients should support the old name as a fallback for compatibility.
+ metadata_info = anchor_attribs.get("data-core-metadata")
+ if metadata_info is None:
+ metadata_info = anchor_attribs.get("data-dist-info-metadata")
+ # The metadata info value may be the string "true", or a string of
+ # the form "hashname=hashval"
+ if metadata_info == "true":
+ # The file exists, but there are no hashes
+ metadata_file_data = MetadataFile(None)
+ elif metadata_info is None:
+ # The file does not exist
+ metadata_file_data = None
+ else:
+ # The file exists, and hashes have been supplied
+ hashname, sep, hashval = metadata_info.partition("=")
+ if sep == "=":
+ metadata_file_data = MetadataFile(supported_hashes({hashname: hashval}))
+ else:
+ # Error - data is wrong. Treat as no hashes supplied.
+ logger.debug(
+ "Index returned invalid data-dist-info-metadata value: %s",
+ metadata_info,
+ )
+ metadata_file_data = MetadataFile(None)
+
+ return cls(
+ url,
+ comes_from=page_url,
+ requires_python=pyrequire,
+ yanked_reason=yanked_reason,
+ metadata_file_data=metadata_file_data,
+ )
+
+ def __str__(self) -> str:
+ if self.requires_python:
+ rp = f" (requires-python:{self.requires_python})"
+ else:
+ rp = ""
+ if self.comes_from:
+ return f"{redact_auth_from_url(self._url)} (from {self.comes_from}){rp}"
+ else:
+ return redact_auth_from_url(str(self._url))
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __hash__(self) -> int:
+ return hash(self.url)
+
+ def __eq__(self, other: Any) -> bool:
+ if not isinstance(other, Link):
+ return NotImplemented
+ return self.url == other.url
+
+ def __lt__(self, other: Any) -> bool:
+ if not isinstance(other, Link):
+ return NotImplemented
+ return self.url < other.url
+
+ @property
+ def url(self) -> str:
+ return self._url
+
+ @property
+ def filename(self) -> str:
+ path = self.path.rstrip("/")
+ name = posixpath.basename(path)
+ if not name:
+ # Make sure we don't leak auth information if the netloc
+ # includes a username and password.
+ netloc, user_pass = split_auth_from_netloc(self.netloc)
+ return netloc
+
+ name = urllib.parse.unquote(name)
+ assert name, f"URL {self._url!r} produced no filename"
+ return name
+
+ @property
+ def file_path(self) -> str:
+ return url_to_path(self.url)
+
+ @property
+ def scheme(self) -> str:
+ return self._parsed_url.scheme
+
+ @property
+ def netloc(self) -> str:
+ """
+ This can contain auth information.
+ """
+ return self._parsed_url.netloc
+
+ @property
+ def path(self) -> str:
+ return self._path
+
+ def splitext(self) -> Tuple[str, str]:
+ return splitext(posixpath.basename(self.path.rstrip("/")))
+
+ @property
+ def ext(self) -> str:
+ return self.splitext()[1]
+
+ @property
+ def url_without_fragment(self) -> str:
+ scheme, netloc, path, query, fragment = self._parsed_url
+ return urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
+
+ _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)")
+
+ # Per PEP 508.
+ _project_name_re = re.compile(
+ r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
+ )
+
+ def _egg_fragment(self) -> Optional[str]:
+ match = self._egg_fragment_re.search(self._url)
+ if not match:
+ return None
+
+ # An egg fragment looks like a PEP 508 project name, along with
+ # an optional extras specifier. Anything else is invalid.
+ project_name = match.group(1)
+ if not self._project_name_re.match(project_name):
+ deprecated(
+ reason=f"{self} contains an egg fragment with a non-PEP 508 name.",
+ replacement="to use the req @ url syntax, and remove the egg fragment",
+ gone_in="25.1",
+ issue=13157,
+ )
+
+ return project_name
+
+ _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)")
+
+ @property
+ def subdirectory_fragment(self) -> Optional[str]:
+ match = self._subdirectory_fragment_re.search(self._url)
+ if not match:
+ return None
+ return match.group(1)
+
+ def metadata_link(self) -> Optional["Link"]:
+ """Return a link to the associated core metadata file (if any)."""
+ if self.metadata_file_data is None:
+ return None
+ metadata_url = f"{self.url_without_fragment}.metadata"
+ if self.metadata_file_data.hashes is None:
+ return Link(metadata_url)
+ return Link(metadata_url, hashes=self.metadata_file_data.hashes)
+
+ def as_hashes(self) -> Hashes:
+ return Hashes({k: [v] for k, v in self._hashes.items()})
+
+ @property
+ def hash(self) -> Optional[str]:
+ return next(iter(self._hashes.values()), None)
+
+ @property
+ def hash_name(self) -> Optional[str]:
+ return next(iter(self._hashes), None)
+
+ @property
+ def show_url(self) -> str:
+ return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0])
+
+ @property
+ def is_file(self) -> bool:
+ return self.scheme == "file"
+
+ def is_existing_dir(self) -> bool:
+ return self.is_file and os.path.isdir(self.file_path)
+
+ @property
+ def is_wheel(self) -> bool:
+ return self.ext == WHEEL_EXTENSION
+
+ @property
+ def is_vcs(self) -> bool:
+ from pip._internal.vcs import vcs
+
+ return self.scheme in vcs.all_schemes
+
+ @property
+ def is_yanked(self) -> bool:
+ return self.yanked_reason is not None
+
+ @property
+ def has_hash(self) -> bool:
+ return bool(self._hashes)
+
+ def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
+ """
+ Return True if the link has a hash and it is allowed by `hashes`.
+ """
+ if hashes is None:
+ return False
+ return any(hashes.is_hash_allowed(k, v) for k, v in self._hashes.items())
+
+
+class _CleanResult(NamedTuple):
+ """Convert link for equivalency check.
+
+ This is used in the resolver to check whether two URL-specified requirements
+ likely point to the same distribution and can be considered equivalent. This
+ equivalency logic avoids comparing URLs literally, which can be too strict
+ (e.g. "a=1&b=2" vs "b=2&a=1") and produce conflicts unexpecting to users.
+
+ Currently this does three things:
+
+ 1. Drop the basic auth part. This is technically wrong since a server can
+ serve different content based on auth, but if it does that, it is even
+ impossible to guarantee two URLs without auth are equivalent, since
+ the user can input different auth information when prompted. So the
+ practical solution is to assume the auth doesn't affect the response.
+ 2. Parse the query to avoid the ordering issue. Note that ordering under the
+ same key in the query are NOT cleaned; i.e. "a=1&a=2" and "a=2&a=1" are
+ still considered different.
+ 3. Explicitly drop most of the fragment part, except ``subdirectory=`` and
+ hash values, since it should have no impact the downloaded content. Note
+ that this drops the "egg=" part historically used to denote the requested
+ project (and extras), which is wrong in the strictest sense, but too many
+ people are supplying it inconsistently to cause superfluous resolution
+ conflicts, so we choose to also ignore them.
+ """
+
+ parsed: urllib.parse.SplitResult
+ query: Dict[str, List[str]]
+ subdirectory: str
+ hashes: Dict[str, str]
+
+
+def _clean_link(link: Link) -> _CleanResult:
+ parsed = link._parsed_url
+ netloc = parsed.netloc.rsplit("@", 1)[-1]
+ # According to RFC 8089, an empty host in file: means localhost.
+ if parsed.scheme == "file" and not netloc:
+ netloc = "localhost"
+ fragment = urllib.parse.parse_qs(parsed.fragment)
+ if "egg" in fragment:
+ logger.debug("Ignoring egg= fragment in %s", link)
+ try:
+ # If there are multiple subdirectory values, use the first one.
+ # This matches the behavior of Link.subdirectory_fragment.
+ subdirectory = fragment["subdirectory"][0]
+ except (IndexError, KeyError):
+ subdirectory = ""
+ # If there are multiple hash values under the same algorithm, use the
+ # first one. This matches the behavior of Link.hash_value.
+ hashes = {k: fragment[k][0] for k in _SUPPORTED_HASHES if k in fragment}
+ return _CleanResult(
+ parsed=parsed._replace(netloc=netloc, query="", fragment=""),
+ query=urllib.parse.parse_qs(parsed.query),
+ subdirectory=subdirectory,
+ hashes=hashes,
+ )
+
+
+@functools.lru_cache(maxsize=None)
+def links_equivalent(link1: Link, link2: Link) -> bool:
+ return _clean_link(link1) == _clean_link(link2)
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/models/target_python.py b/vllm/lib/python3.10/site-packages/pip/_internal/models/target_python.py
new file mode 100644
index 0000000000000000000000000000000000000000..88925a9fd01a440e6de970bc234c3503b7f09cc1
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/models/target_python.py
@@ -0,0 +1,121 @@
+import sys
+from typing import List, Optional, Set, Tuple
+
+from pip._vendor.packaging.tags import Tag
+
+from pip._internal.utils.compatibility_tags import get_supported, version_info_to_nodot
+from pip._internal.utils.misc import normalize_version_info
+
+
+class TargetPython:
+ """
+ Encapsulates the properties of a Python interpreter one is targeting
+ for a package install, download, etc.
+ """
+
+ __slots__ = [
+ "_given_py_version_info",
+ "abis",
+ "implementation",
+ "platforms",
+ "py_version",
+ "py_version_info",
+ "_valid_tags",
+ "_valid_tags_set",
+ ]
+
+ def __init__(
+ self,
+ platforms: Optional[List[str]] = None,
+ py_version_info: Optional[Tuple[int, ...]] = None,
+ abis: Optional[List[str]] = None,
+ implementation: Optional[str] = None,
+ ) -> None:
+ """
+ :param platforms: A list of strings or None. If None, searches for
+ packages that are supported by the current system. Otherwise, will
+ find packages that can be built on the platforms passed in. These
+ packages will only be downloaded for distribution: they will
+ not be built locally.
+ :param py_version_info: An optional tuple of ints representing the
+ Python version information to use (e.g. `sys.version_info[:3]`).
+ This can have length 1, 2, or 3 when provided.
+ :param abis: A list of strings or None. This is passed to
+ compatibility_tags.py's get_supported() function as is.
+ :param implementation: A string or None. This is passed to
+ compatibility_tags.py's get_supported() function as is.
+ """
+ # Store the given py_version_info for when we call get_supported().
+ self._given_py_version_info = py_version_info
+
+ if py_version_info is None:
+ py_version_info = sys.version_info[:3]
+ else:
+ py_version_info = normalize_version_info(py_version_info)
+
+ py_version = ".".join(map(str, py_version_info[:2]))
+
+ self.abis = abis
+ self.implementation = implementation
+ self.platforms = platforms
+ self.py_version = py_version
+ self.py_version_info = py_version_info
+
+ # This is used to cache the return value of get_(un)sorted_tags.
+ self._valid_tags: Optional[List[Tag]] = None
+ self._valid_tags_set: Optional[Set[Tag]] = None
+
+ def format_given(self) -> str:
+ """
+ Format the given, non-None attributes for display.
+ """
+ display_version = None
+ if self._given_py_version_info is not None:
+ display_version = ".".join(
+ str(part) for part in self._given_py_version_info
+ )
+
+ key_values = [
+ ("platforms", self.platforms),
+ ("version_info", display_version),
+ ("abis", self.abis),
+ ("implementation", self.implementation),
+ ]
+ return " ".join(
+ f"{key}={value!r}" for key, value in key_values if value is not None
+ )
+
+ def get_sorted_tags(self) -> List[Tag]:
+ """
+ Return the supported PEP 425 tags to check wheel candidates against.
+
+ The tags are returned in order of preference (most preferred first).
+ """
+ if self._valid_tags is None:
+ # Pass versions=None if no py_version_info was given since
+ # versions=None uses special default logic.
+ py_version_info = self._given_py_version_info
+ if py_version_info is None:
+ version = None
+ else:
+ version = version_info_to_nodot(py_version_info)
+
+ tags = get_supported(
+ version=version,
+ platforms=self.platforms,
+ abis=self.abis,
+ impl=self.implementation,
+ )
+ self._valid_tags = tags
+
+ return self._valid_tags
+
+ def get_unsorted_tags(self) -> Set[Tag]:
+ """Exactly the same as get_sorted_tags, but returns a set.
+
+ This is important for performance.
+ """
+ if self._valid_tags_set is None:
+ self._valid_tags_set = set(self.get_sorted_tags())
+
+ return self._valid_tags_set
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/__init__.py b/vllm/lib/python3.10/site-packages/pip/_internal/operations/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/check.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/check.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3f22d6cb49e928610659690f4c89f925f448c090
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/check.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..54d827f305b7a06c84c23690515ea1a42b671afd
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..29f88535ba475d3514fd30deb63df462f4f96c11
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__init__.py b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e81ea441207df51c0f4332fcd4cc7cc9fd4a6b5f
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cc89d078349e749d6cd877fed3c0742009ef63bb
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0669fb09e53da18e64d68d97f52de636240144c1
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d6ee189cced9181acecb5f3e48d6bee988694f0d
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9a4f17db4112b5eff1eaa2f0f9c4e602c8046815
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3b6228e933adb6a3894e9ab30ff50ca2b0565f20
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f776b17f706f0dac16571957c67beae37cba7782
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..262af9f76bf513f600a9fa1d67bd7d88fdea67bc
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/build_tracker.py b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/build_tracker.py
new file mode 100644
index 0000000000000000000000000000000000000000..0ed8dd235960bf406e26d5c0069b12c7f2d463e9
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/build_tracker.py
@@ -0,0 +1,138 @@
+import contextlib
+import hashlib
+import logging
+import os
+from types import TracebackType
+from typing import Dict, Generator, Optional, Type, Union
+
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils.temp_dir import TempDirectory
+
+logger = logging.getLogger(__name__)
+
+
+@contextlib.contextmanager
+def update_env_context_manager(**changes: str) -> Generator[None, None, None]:
+ target = os.environ
+
+ # Save values from the target and change them.
+ non_existent_marker = object()
+ saved_values: Dict[str, Union[object, str]] = {}
+ for name, new_value in changes.items():
+ try:
+ saved_values[name] = target[name]
+ except KeyError:
+ saved_values[name] = non_existent_marker
+ target[name] = new_value
+
+ try:
+ yield
+ finally:
+ # Restore original values in the target.
+ for name, original_value in saved_values.items():
+ if original_value is non_existent_marker:
+ del target[name]
+ else:
+ assert isinstance(original_value, str) # for mypy
+ target[name] = original_value
+
+
+@contextlib.contextmanager
+def get_build_tracker() -> Generator["BuildTracker", None, None]:
+ root = os.environ.get("PIP_BUILD_TRACKER")
+ with contextlib.ExitStack() as ctx:
+ if root is None:
+ root = ctx.enter_context(TempDirectory(kind="build-tracker")).path
+ ctx.enter_context(update_env_context_manager(PIP_BUILD_TRACKER=root))
+ logger.debug("Initialized build tracking at %s", root)
+
+ with BuildTracker(root) as tracker:
+ yield tracker
+
+
+class TrackerId(str):
+ """Uniquely identifying string provided to the build tracker."""
+
+
+class BuildTracker:
+ """Ensure that an sdist cannot request itself as a setup requirement.
+
+ When an sdist is prepared, it identifies its setup requirements in the
+ context of ``BuildTracker.track()``. If a requirement shows up recursively, this
+ raises an exception.
+
+ This stops fork bombs embedded in malicious packages."""
+
+ def __init__(self, root: str) -> None:
+ self._root = root
+ self._entries: Dict[TrackerId, InstallRequirement] = {}
+ logger.debug("Created build tracker: %s", self._root)
+
+ def __enter__(self) -> "BuildTracker":
+ logger.debug("Entered build tracker: %s", self._root)
+ return self
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
+ ) -> None:
+ self.cleanup()
+
+ def _entry_path(self, key: TrackerId) -> str:
+ hashed = hashlib.sha224(key.encode()).hexdigest()
+ return os.path.join(self._root, hashed)
+
+ def add(self, req: InstallRequirement, key: TrackerId) -> None:
+ """Add an InstallRequirement to build tracking."""
+
+ # Get the file to write information about this requirement.
+ entry_path = self._entry_path(key)
+
+ # Try reading from the file. If it exists and can be read from, a build
+ # is already in progress, so a LookupError is raised.
+ try:
+ with open(entry_path) as fp:
+ contents = fp.read()
+ except FileNotFoundError:
+ pass
+ else:
+ message = f"{req.link} is already being built: {contents}"
+ raise LookupError(message)
+
+ # If we're here, req should really not be building already.
+ assert key not in self._entries
+
+ # Start tracking this requirement.
+ with open(entry_path, "w", encoding="utf-8") as fp:
+ fp.write(str(req))
+ self._entries[key] = req
+
+ logger.debug("Added %s to build tracker %r", req, self._root)
+
+ def remove(self, req: InstallRequirement, key: TrackerId) -> None:
+ """Remove an InstallRequirement from build tracking."""
+
+ # Delete the created file and the corresponding entry.
+ os.unlink(self._entry_path(key))
+ del self._entries[key]
+
+ logger.debug("Removed %s from build tracker %r", req, self._root)
+
+ def cleanup(self) -> None:
+ for key, req in list(self._entries.items()):
+ self.remove(req, key)
+
+ logger.debug("Removed build tracker: %r", self._root)
+
+ @contextlib.contextmanager
+ def track(self, req: InstallRequirement, key: str) -> Generator[None, None, None]:
+ """Ensure that `key` cannot install itself as a setup requirement.
+
+ :raises LookupError: If `key` was already provided in a parent invocation of
+ the context introduced by this method."""
+ tracker_id = TrackerId(key)
+ self.add(req, tracker_id)
+ yield
+ self.remove(req, tracker_id)
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/metadata.py b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/metadata.py
new file mode 100644
index 0000000000000000000000000000000000000000..c66ac354deb035405fe0e4040dac539d28570257
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/metadata.py
@@ -0,0 +1,39 @@
+"""Metadata generation logic for source distributions.
+"""
+
+import os
+
+from pip._vendor.pyproject_hooks import BuildBackendHookCaller
+
+from pip._internal.build_env import BuildEnvironment
+from pip._internal.exceptions import (
+ InstallationSubprocessError,
+ MetadataGenerationFailed,
+)
+from pip._internal.utils.subprocess import runner_with_spinner_message
+from pip._internal.utils.temp_dir import TempDirectory
+
+
+def generate_metadata(
+ build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str
+) -> str:
+ """Generate metadata using mechanisms described in PEP 517.
+
+ Returns the generated metadata directory.
+ """
+ metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
+
+ metadata_dir = metadata_tmpdir.path
+
+ with build_env:
+ # Note that BuildBackendHookCaller implements a fallback for
+ # prepare_metadata_for_build_wheel, so we don't have to
+ # consider the possibility that this hook doesn't exist.
+ runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)")
+ with backend.subprocess_runner(runner):
+ try:
+ distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir)
+ except InstallationSubprocessError as error:
+ raise MetadataGenerationFailed(package_details=details) from error
+
+ return os.path.join(metadata_dir, distinfo_dir)
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_editable.py b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_editable.py
new file mode 100644
index 0000000000000000000000000000000000000000..3397ccf0f92d5903281866a693979fdcfa7d3d61
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_editable.py
@@ -0,0 +1,42 @@
+"""Metadata generation logic for source distributions.
+"""
+
+import os
+
+from pip._vendor.pyproject_hooks import BuildBackendHookCaller
+
+from pip._internal.build_env import BuildEnvironment
+from pip._internal.exceptions import (
+ InstallationSubprocessError,
+ MetadataGenerationFailed,
+)
+from pip._internal.utils.subprocess import runner_with_spinner_message
+from pip._internal.utils.temp_dir import TempDirectory
+
+
+def generate_editable_metadata(
+ build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str
+) -> str:
+ """Generate metadata using mechanisms described in PEP 660.
+
+ Returns the generated metadata directory.
+ """
+ metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
+
+ metadata_dir = metadata_tmpdir.path
+
+ with build_env:
+ # Note that BuildBackendHookCaller implements a fallback for
+ # prepare_metadata_for_build_wheel/editable, so we don't have to
+ # consider the possibility that this hook doesn't exist.
+ runner = runner_with_spinner_message(
+ "Preparing editable metadata (pyproject.toml)"
+ )
+ with backend.subprocess_runner(runner):
+ try:
+ distinfo_dir = backend.prepare_metadata_for_build_editable(metadata_dir)
+ except InstallationSubprocessError as error:
+ raise MetadataGenerationFailed(package_details=details) from error
+
+ assert distinfo_dir is not None
+ return os.path.join(metadata_dir, distinfo_dir)
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_legacy.py b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_legacy.py
new file mode 100644
index 0000000000000000000000000000000000000000..c01dd1c678a083c17190fc9b7e214eca5c91f4f1
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_legacy.py
@@ -0,0 +1,74 @@
+"""Metadata generation logic for legacy source distributions.
+"""
+
+import logging
+import os
+
+from pip._internal.build_env import BuildEnvironment
+from pip._internal.cli.spinners import open_spinner
+from pip._internal.exceptions import (
+ InstallationError,
+ InstallationSubprocessError,
+ MetadataGenerationFailed,
+)
+from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
+from pip._internal.utils.subprocess import call_subprocess
+from pip._internal.utils.temp_dir import TempDirectory
+
+logger = logging.getLogger(__name__)
+
+
+def _find_egg_info(directory: str) -> str:
+ """Find an .egg-info subdirectory in `directory`."""
+ filenames = [f for f in os.listdir(directory) if f.endswith(".egg-info")]
+
+ if not filenames:
+ raise InstallationError(f"No .egg-info directory found in {directory}")
+
+ if len(filenames) > 1:
+ raise InstallationError(
+ f"More than one .egg-info directory found in {directory}"
+ )
+
+ return os.path.join(directory, filenames[0])
+
+
+def generate_metadata(
+ build_env: BuildEnvironment,
+ setup_py_path: str,
+ source_dir: str,
+ isolated: bool,
+ details: str,
+) -> str:
+ """Generate metadata using setup.py-based defacto mechanisms.
+
+ Returns the generated metadata directory.
+ """
+ logger.debug(
+ "Running setup.py (path:%s) egg_info for package %s",
+ setup_py_path,
+ details,
+ )
+
+ egg_info_dir = TempDirectory(kind="pip-egg-info", globally_managed=True).path
+
+ args = make_setuptools_egg_info_args(
+ setup_py_path,
+ egg_info_dir=egg_info_dir,
+ no_user_config=isolated,
+ )
+
+ with build_env:
+ with open_spinner("Preparing metadata (setup.py)") as spinner:
+ try:
+ call_subprocess(
+ args,
+ cwd=source_dir,
+ command_desc="python setup.py egg_info",
+ spinner=spinner,
+ )
+ except InstallationSubprocessError as error:
+ raise MetadataGenerationFailed(package_details=details) from error
+
+ # Return the .egg-info directory.
+ return _find_egg_info(egg_info_dir)
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/wheel.py b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/wheel.py
new file mode 100644
index 0000000000000000000000000000000000000000..064811ad11bb07b2b7bc8e30ec6c03f21997d6b2
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/wheel.py
@@ -0,0 +1,37 @@
+import logging
+import os
+from typing import Optional
+
+from pip._vendor.pyproject_hooks import BuildBackendHookCaller
+
+from pip._internal.utils.subprocess import runner_with_spinner_message
+
+logger = logging.getLogger(__name__)
+
+
+def build_wheel_pep517(
+ name: str,
+ backend: BuildBackendHookCaller,
+ metadata_directory: str,
+ tempd: str,
+) -> Optional[str]:
+ """Build one InstallRequirement using the PEP 517 build process.
+
+ Returns path to wheel if successfully built. Otherwise, returns None.
+ """
+ assert metadata_directory is not None
+ try:
+ logger.debug("Destination directory: %s", tempd)
+
+ runner = runner_with_spinner_message(
+ f"Building wheel for {name} (pyproject.toml)"
+ )
+ with backend.subprocess_runner(runner):
+ wheel_name = backend.build_wheel(
+ tempd,
+ metadata_directory=metadata_directory,
+ )
+ except Exception:
+ logger.error("Failed building wheel for %s", name)
+ return None
+ return os.path.join(tempd, wheel_name)
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_editable.py b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_editable.py
new file mode 100644
index 0000000000000000000000000000000000000000..719d69dd801b78b360c6c2234080eee638b8de82
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_editable.py
@@ -0,0 +1,46 @@
+import logging
+import os
+from typing import Optional
+
+from pip._vendor.pyproject_hooks import BuildBackendHookCaller, HookMissing
+
+from pip._internal.utils.subprocess import runner_with_spinner_message
+
+logger = logging.getLogger(__name__)
+
+
+def build_wheel_editable(
+ name: str,
+ backend: BuildBackendHookCaller,
+ metadata_directory: str,
+ tempd: str,
+) -> Optional[str]:
+ """Build one InstallRequirement using the PEP 660 build process.
+
+ Returns path to wheel if successfully built. Otherwise, returns None.
+ """
+ assert metadata_directory is not None
+ try:
+ logger.debug("Destination directory: %s", tempd)
+
+ runner = runner_with_spinner_message(
+ f"Building editable for {name} (pyproject.toml)"
+ )
+ with backend.subprocess_runner(runner):
+ try:
+ wheel_name = backend.build_editable(
+ tempd,
+ metadata_directory=metadata_directory,
+ )
+ except HookMissing as e:
+ logger.error(
+ "Cannot build editable %s because the build "
+ "backend does not have the %s hook",
+ name,
+ e,
+ )
+ return None
+ except Exception:
+ logger.error("Failed building editable for %s", name)
+ return None
+ return os.path.join(tempd, wheel_name)
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_legacy.py b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_legacy.py
new file mode 100644
index 0000000000000000000000000000000000000000..3ee2a7058d323e41f3c930f14685f68e6a599fa5
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_legacy.py
@@ -0,0 +1,102 @@
+import logging
+import os.path
+from typing import List, Optional
+
+from pip._internal.cli.spinners import open_spinner
+from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args
+from pip._internal.utils.subprocess import call_subprocess, format_command_args
+
+logger = logging.getLogger(__name__)
+
+
+def format_command_result(
+ command_args: List[str],
+ command_output: str,
+) -> str:
+ """Format command information for logging."""
+ command_desc = format_command_args(command_args)
+ text = f"Command arguments: {command_desc}\n"
+
+ if not command_output:
+ text += "Command output: None"
+ elif logger.getEffectiveLevel() > logging.DEBUG:
+ text += "Command output: [use --verbose to show]"
+ else:
+ if not command_output.endswith("\n"):
+ command_output += "\n"
+ text += f"Command output:\n{command_output}"
+
+ return text
+
+
+def get_legacy_build_wheel_path(
+ names: List[str],
+ temp_dir: str,
+ name: str,
+ command_args: List[str],
+ command_output: str,
+) -> Optional[str]:
+ """Return the path to the wheel in the temporary build directory."""
+ # Sort for determinism.
+ names = sorted(names)
+ if not names:
+ msg = f"Legacy build of wheel for {name!r} created no files.\n"
+ msg += format_command_result(command_args, command_output)
+ logger.warning(msg)
+ return None
+
+ if len(names) > 1:
+ msg = (
+ f"Legacy build of wheel for {name!r} created more than one file.\n"
+ f"Filenames (choosing first): {names}\n"
+ )
+ msg += format_command_result(command_args, command_output)
+ logger.warning(msg)
+
+ return os.path.join(temp_dir, names[0])
+
+
+def build_wheel_legacy(
+ name: str,
+ setup_py_path: str,
+ source_dir: str,
+ global_options: List[str],
+ build_options: List[str],
+ tempd: str,
+) -> Optional[str]:
+ """Build one unpacked package using the "legacy" build process.
+
+ Returns path to wheel if successfully built. Otherwise, returns None.
+ """
+ wheel_args = make_setuptools_bdist_wheel_args(
+ setup_py_path,
+ global_options=global_options,
+ build_options=build_options,
+ destination_dir=tempd,
+ )
+
+ spin_message = f"Building wheel for {name} (setup.py)"
+ with open_spinner(spin_message) as spinner:
+ logger.debug("Destination directory: %s", tempd)
+
+ try:
+ output = call_subprocess(
+ wheel_args,
+ command_desc="python setup.py bdist_wheel",
+ cwd=source_dir,
+ spinner=spinner,
+ )
+ except Exception:
+ spinner.finish("error")
+ logger.error("Failed building wheel for %s", name)
+ return None
+
+ names = os.listdir(tempd)
+ wheel_path = get_legacy_build_wheel_path(
+ names=names,
+ temp_dir=tempd,
+ name=name,
+ command_args=wheel_args,
+ command_output=output,
+ )
+ return wheel_path
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/check.py b/vllm/lib/python3.10/site-packages/pip/_internal/operations/check.py
new file mode 100644
index 0000000000000000000000000000000000000000..4b6fbc4c37599588ad69da3dc8a4d9628dc89b96
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/operations/check.py
@@ -0,0 +1,181 @@
+"""Validation of dependencies of packages
+"""
+
+import logging
+from contextlib import suppress
+from email.parser import Parser
+from functools import reduce
+from typing import (
+ Callable,
+ Dict,
+ FrozenSet,
+ Generator,
+ Iterable,
+ List,
+ NamedTuple,
+ Optional,
+ Set,
+ Tuple,
+)
+
+from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.packaging.tags import Tag, parse_tag
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+from pip._vendor.packaging.version import Version
+
+from pip._internal.distributions import make_distribution_for_install_requirement
+from pip._internal.metadata import get_default_environment
+from pip._internal.metadata.base import BaseDistribution
+from pip._internal.req.req_install import InstallRequirement
+
+logger = logging.getLogger(__name__)
+
+
+class PackageDetails(NamedTuple):
+ version: Version
+ dependencies: List[Requirement]
+
+
+# Shorthands
+PackageSet = Dict[NormalizedName, PackageDetails]
+Missing = Tuple[NormalizedName, Requirement]
+Conflicting = Tuple[NormalizedName, Version, Requirement]
+
+MissingDict = Dict[NormalizedName, List[Missing]]
+ConflictingDict = Dict[NormalizedName, List[Conflicting]]
+CheckResult = Tuple[MissingDict, ConflictingDict]
+ConflictDetails = Tuple[PackageSet, CheckResult]
+
+
+def create_package_set_from_installed() -> Tuple[PackageSet, bool]:
+ """Converts a list of distributions into a PackageSet."""
+ package_set = {}
+ problems = False
+ env = get_default_environment()
+ for dist in env.iter_installed_distributions(local_only=False, skip=()):
+ name = dist.canonical_name
+ try:
+ dependencies = list(dist.iter_dependencies())
+ package_set[name] = PackageDetails(dist.version, dependencies)
+ except (OSError, ValueError) as e:
+ # Don't crash on unreadable or broken metadata.
+ logger.warning("Error parsing dependencies of %s: %s", name, e)
+ problems = True
+ return package_set, problems
+
+
+def check_package_set(
+ package_set: PackageSet, should_ignore: Optional[Callable[[str], bool]] = None
+) -> CheckResult:
+ """Check if a package set is consistent
+
+ If should_ignore is passed, it should be a callable that takes a
+ package name and returns a boolean.
+ """
+
+ missing = {}
+ conflicting = {}
+
+ for package_name, package_detail in package_set.items():
+ # Info about dependencies of package_name
+ missing_deps: Set[Missing] = set()
+ conflicting_deps: Set[Conflicting] = set()
+
+ if should_ignore and should_ignore(package_name):
+ continue
+
+ for req in package_detail.dependencies:
+ name = canonicalize_name(req.name)
+
+ # Check if it's missing
+ if name not in package_set:
+ missed = True
+ if req.marker is not None:
+ missed = req.marker.evaluate({"extra": ""})
+ if missed:
+ missing_deps.add((name, req))
+ continue
+
+ # Check if there's a conflict
+ version = package_set[name].version
+ if not req.specifier.contains(version, prereleases=True):
+ conflicting_deps.add((name, version, req))
+
+ if missing_deps:
+ missing[package_name] = sorted(missing_deps, key=str)
+ if conflicting_deps:
+ conflicting[package_name] = sorted(conflicting_deps, key=str)
+
+ return missing, conflicting
+
+
+def check_install_conflicts(to_install: List[InstallRequirement]) -> ConflictDetails:
+ """For checking if the dependency graph would be consistent after \
+ installing given requirements
+ """
+ # Start from the current state
+ package_set, _ = create_package_set_from_installed()
+ # Install packages
+ would_be_installed = _simulate_installation_of(to_install, package_set)
+
+ # Only warn about directly-dependent packages; create a whitelist of them
+ whitelist = _create_whitelist(would_be_installed, package_set)
+
+ return (
+ package_set,
+ check_package_set(
+ package_set, should_ignore=lambda name: name not in whitelist
+ ),
+ )
+
+
+def check_unsupported(
+ packages: Iterable[BaseDistribution],
+ supported_tags: Iterable[Tag],
+) -> Generator[BaseDistribution, None, None]:
+ for p in packages:
+ with suppress(FileNotFoundError):
+ wheel_file = p.read_text("WHEEL")
+ wheel_tags: FrozenSet[Tag] = reduce(
+ frozenset.union,
+ map(parse_tag, Parser().parsestr(wheel_file).get_all("Tag", [])),
+ frozenset(),
+ )
+ if wheel_tags.isdisjoint(supported_tags):
+ yield p
+
+
+def _simulate_installation_of(
+ to_install: List[InstallRequirement], package_set: PackageSet
+) -> Set[NormalizedName]:
+ """Computes the version of packages after installing to_install."""
+ # Keep track of packages that were installed
+ installed = set()
+
+ # Modify it as installing requirement_set would (assuming no errors)
+ for inst_req in to_install:
+ abstract_dist = make_distribution_for_install_requirement(inst_req)
+ dist = abstract_dist.get_metadata_distribution()
+ name = dist.canonical_name
+ package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies()))
+
+ installed.add(name)
+
+ return installed
+
+
+def _create_whitelist(
+ would_be_installed: Set[NormalizedName], package_set: PackageSet
+) -> Set[NormalizedName]:
+ packages_affected = set(would_be_installed)
+
+ for package_name in package_set:
+ if package_name in packages_affected:
+ continue
+
+ for req in package_set[package_name].dependencies:
+ if canonicalize_name(req.name) in packages_affected:
+ packages_affected.add(package_name)
+ break
+
+ return packages_affected
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/freeze.py b/vllm/lib/python3.10/site-packages/pip/_internal/operations/freeze.py
new file mode 100644
index 0000000000000000000000000000000000000000..ae5dd37f9db6e50dd48de55660bcb90e80b0ea64
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/operations/freeze.py
@@ -0,0 +1,256 @@
+import collections
+import logging
+import os
+from dataclasses import dataclass, field
+from typing import Container, Dict, Generator, Iterable, List, NamedTuple, Optional, Set
+
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+from pip._vendor.packaging.version import InvalidVersion
+
+from pip._internal.exceptions import BadCommand, InstallationError
+from pip._internal.metadata import BaseDistribution, get_environment
+from pip._internal.req.constructors import (
+ install_req_from_editable,
+ install_req_from_line,
+)
+from pip._internal.req.req_file import COMMENT_RE
+from pip._internal.utils.direct_url_helpers import direct_url_as_pep440_direct_reference
+
+logger = logging.getLogger(__name__)
+
+
+class _EditableInfo(NamedTuple):
+ requirement: str
+ comments: List[str]
+
+
+def freeze(
+ requirement: Optional[List[str]] = None,
+ local_only: bool = False,
+ user_only: bool = False,
+ paths: Optional[List[str]] = None,
+ isolated: bool = False,
+ exclude_editable: bool = False,
+ skip: Container[str] = (),
+) -> Generator[str, None, None]:
+ installations: Dict[str, FrozenRequirement] = {}
+
+ dists = get_environment(paths).iter_installed_distributions(
+ local_only=local_only,
+ skip=(),
+ user_only=user_only,
+ )
+ for dist in dists:
+ req = FrozenRequirement.from_dist(dist)
+ if exclude_editable and req.editable:
+ continue
+ installations[req.canonical_name] = req
+
+ if requirement:
+ # the options that don't get turned into an InstallRequirement
+ # should only be emitted once, even if the same option is in multiple
+ # requirements files, so we need to keep track of what has been emitted
+ # so that we don't emit it again if it's seen again
+ emitted_options: Set[str] = set()
+ # keep track of which files a requirement is in so that we can
+ # give an accurate warning if a requirement appears multiple times.
+ req_files: Dict[str, List[str]] = collections.defaultdict(list)
+ for req_file_path in requirement:
+ with open(req_file_path) as req_file:
+ for line in req_file:
+ if (
+ not line.strip()
+ or line.strip().startswith("#")
+ or line.startswith(
+ (
+ "-r",
+ "--requirement",
+ "-f",
+ "--find-links",
+ "-i",
+ "--index-url",
+ "--pre",
+ "--trusted-host",
+ "--process-dependency-links",
+ "--extra-index-url",
+ "--use-feature",
+ )
+ )
+ ):
+ line = line.rstrip()
+ if line not in emitted_options:
+ emitted_options.add(line)
+ yield line
+ continue
+
+ if line.startswith("-e") or line.startswith("--editable"):
+ if line.startswith("-e"):
+ line = line[2:].strip()
+ else:
+ line = line[len("--editable") :].strip().lstrip("=")
+ line_req = install_req_from_editable(
+ line,
+ isolated=isolated,
+ )
+ else:
+ line_req = install_req_from_line(
+ COMMENT_RE.sub("", line).strip(),
+ isolated=isolated,
+ )
+
+ if not line_req.name:
+ logger.info(
+ "Skipping line in requirement file [%s] because "
+ "it's not clear what it would install: %s",
+ req_file_path,
+ line.strip(),
+ )
+ logger.info(
+ " (add #egg=PackageName to the URL to avoid"
+ " this warning)"
+ )
+ else:
+ line_req_canonical_name = canonicalize_name(line_req.name)
+ if line_req_canonical_name not in installations:
+ # either it's not installed, or it is installed
+ # but has been processed already
+ if not req_files[line_req.name]:
+ logger.warning(
+ "Requirement file [%s] contains %s, but "
+ "package %r is not installed",
+ req_file_path,
+ COMMENT_RE.sub("", line).strip(),
+ line_req.name,
+ )
+ else:
+ req_files[line_req.name].append(req_file_path)
+ else:
+ yield str(installations[line_req_canonical_name]).rstrip()
+ del installations[line_req_canonical_name]
+ req_files[line_req.name].append(req_file_path)
+
+ # Warn about requirements that were included multiple times (in a
+ # single requirements file or in different requirements files).
+ for name, files in req_files.items():
+ if len(files) > 1:
+ logger.warning(
+ "Requirement %s included multiple times [%s]",
+ name,
+ ", ".join(sorted(set(files))),
+ )
+
+ yield ("## The following requirements were added by pip freeze:")
+ for installation in sorted(installations.values(), key=lambda x: x.name.lower()):
+ if installation.canonical_name not in skip:
+ yield str(installation).rstrip()
+
+
+def _format_as_name_version(dist: BaseDistribution) -> str:
+ try:
+ dist_version = dist.version
+ except InvalidVersion:
+ # legacy version
+ return f"{dist.raw_name}==={dist.raw_version}"
+ else:
+ return f"{dist.raw_name}=={dist_version}"
+
+
+def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:
+ """
+ Compute and return values (req, comments) for use in
+ FrozenRequirement.from_dist().
+ """
+ editable_project_location = dist.editable_project_location
+ assert editable_project_location
+ location = os.path.normcase(os.path.abspath(editable_project_location))
+
+ from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs
+
+ vcs_backend = vcs.get_backend_for_dir(location)
+
+ if vcs_backend is None:
+ display = _format_as_name_version(dist)
+ logger.debug(
+ 'No VCS found for editable requirement "%s" in: %r',
+ display,
+ location,
+ )
+ return _EditableInfo(
+ requirement=location,
+ comments=[f"# Editable install with no version control ({display})"],
+ )
+
+ vcs_name = type(vcs_backend).__name__
+
+ try:
+ req = vcs_backend.get_src_requirement(location, dist.raw_name)
+ except RemoteNotFoundError:
+ display = _format_as_name_version(dist)
+ return _EditableInfo(
+ requirement=location,
+ comments=[f"# Editable {vcs_name} install with no remote ({display})"],
+ )
+ except RemoteNotValidError as ex:
+ display = _format_as_name_version(dist)
+ return _EditableInfo(
+ requirement=location,
+ comments=[
+ f"# Editable {vcs_name} install ({display}) with either a deleted "
+ f"local remote or invalid URI:",
+ f"# '{ex.url}'",
+ ],
+ )
+ except BadCommand:
+ logger.warning(
+ "cannot determine version of editable source in %s "
+ "(%s command not found in path)",
+ location,
+ vcs_backend.name,
+ )
+ return _EditableInfo(requirement=location, comments=[])
+ except InstallationError as exc:
+ logger.warning("Error when trying to get requirement for VCS system %s", exc)
+ else:
+ return _EditableInfo(requirement=req, comments=[])
+
+ logger.warning("Could not determine repository location of %s", location)
+
+ return _EditableInfo(
+ requirement=location,
+ comments=["## !! Could not determine repository location"],
+ )
+
+
+@dataclass(frozen=True)
+class FrozenRequirement:
+ name: str
+ req: str
+ editable: bool
+ comments: Iterable[str] = field(default_factory=tuple)
+
+ @property
+ def canonical_name(self) -> NormalizedName:
+ return canonicalize_name(self.name)
+
+ @classmethod
+ def from_dist(cls, dist: BaseDistribution) -> "FrozenRequirement":
+ editable = dist.editable
+ if editable:
+ req, comments = _get_editable_info(dist)
+ else:
+ comments = []
+ direct_url = dist.direct_url
+ if direct_url:
+ # if PEP 610 metadata is present, use it
+ req = direct_url_as_pep440_direct_reference(direct_url, dist.raw_name)
+ else:
+ # name==version requirement
+ req = _format_as_name_version(dist)
+
+ return cls(dist.raw_name, req, editable, comments=comments)
+
+ def __str__(self) -> str:
+ req = self.req
+ if self.editable:
+ req = f"-e {req}"
+ return "\n".join(list(self.comments) + [str(req)]) + "\n"
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/install/__init__.py b/vllm/lib/python3.10/site-packages/pip/_internal/operations/install/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..24d6a5dd31fe33b03f90ed0f9ee465253686900c
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/operations/install/__init__.py
@@ -0,0 +1,2 @@
+"""For modules related to installing packages.
+"""
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d3bbce33f1f10986c6cde8bc19be68d862aac6e0
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..6fb57683a2a6c7c38381b08591395a07e4d95f4b
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ff00944dedba68035a331a672076a92f8dc4812e
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/install/editable_legacy.py b/vllm/lib/python3.10/site-packages/pip/_internal/operations/install/editable_legacy.py
new file mode 100644
index 0000000000000000000000000000000000000000..9aaa699a645e03d5971dbb5e43ff73d2f412a550
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/operations/install/editable_legacy.py
@@ -0,0 +1,47 @@
+"""Legacy editable installation process, i.e. `setup.py develop`.
+"""
+
+import logging
+from typing import Optional, Sequence
+
+from pip._internal.build_env import BuildEnvironment
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.setuptools_build import make_setuptools_develop_args
+from pip._internal.utils.subprocess import call_subprocess
+
+logger = logging.getLogger(__name__)
+
+
+def install_editable(
+ *,
+ global_options: Sequence[str],
+ prefix: Optional[str],
+ home: Optional[str],
+ use_user_site: bool,
+ name: str,
+ setup_py_path: str,
+ isolated: bool,
+ build_env: BuildEnvironment,
+ unpacked_source_directory: str,
+) -> None:
+ """Install a package in editable mode. Most arguments are pass-through
+ to setuptools.
+ """
+ logger.info("Running setup.py develop for %s", name)
+
+ args = make_setuptools_develop_args(
+ setup_py_path,
+ global_options=global_options,
+ no_user_config=isolated,
+ prefix=prefix,
+ home=home,
+ use_user_site=use_user_site,
+ )
+
+ with indent_log():
+ with build_env:
+ call_subprocess(
+ args,
+ command_desc="python setup.py develop",
+ cwd=unpacked_source_directory,
+ )
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/install/wheel.py b/vllm/lib/python3.10/site-packages/pip/_internal/operations/install/wheel.py
new file mode 100644
index 0000000000000000000000000000000000000000..aef42aa9eefae7cae4a3d877b6dab1c53d85ad5f
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/operations/install/wheel.py
@@ -0,0 +1,741 @@
+"""Support for installing and building the "wheel" binary package format.
+"""
+
+import collections
+import compileall
+import contextlib
+import csv
+import importlib
+import logging
+import os.path
+import re
+import shutil
+import sys
+import warnings
+from base64 import urlsafe_b64encode
+from email.message import Message
+from itertools import chain, filterfalse, starmap
+from typing import (
+ IO,
+ TYPE_CHECKING,
+ Any,
+ BinaryIO,
+ Callable,
+ Dict,
+ Generator,
+ Iterable,
+ Iterator,
+ List,
+ NewType,
+ Optional,
+ Protocol,
+ Sequence,
+ Set,
+ Tuple,
+ Union,
+ cast,
+)
+from zipfile import ZipFile, ZipInfo
+
+from pip._vendor.distlib.scripts import ScriptMaker
+from pip._vendor.distlib.util import get_export_entry
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.exceptions import InstallationError
+from pip._internal.locations import get_major_minor_version
+from pip._internal.metadata import (
+ BaseDistribution,
+ FilesystemWheel,
+ get_wheel_distribution,
+)
+from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl
+from pip._internal.models.scheme import SCHEME_KEYS, Scheme
+from pip._internal.utils.filesystem import adjacent_tmp_file, replace
+from pip._internal.utils.misc import StreamWrapper, ensure_dir, hash_file, partition
+from pip._internal.utils.unpacking import (
+ current_umask,
+ is_within_directory,
+ set_extracted_file_to_default_mode_plus_executable,
+ zip_item_is_executable,
+)
+from pip._internal.utils.wheel import parse_wheel
+
+if TYPE_CHECKING:
+
+ class File(Protocol):
+ src_record_path: "RecordPath"
+ dest_path: str
+ changed: bool
+
+ def save(self) -> None:
+ pass
+
+
+logger = logging.getLogger(__name__)
+
+RecordPath = NewType("RecordPath", str)
+InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]]
+
+
+def rehash(path: str, blocksize: int = 1 << 20) -> Tuple[str, str]:
+ """Return (encoded_digest, length) for path using hashlib.sha256()"""
+ h, length = hash_file(path, blocksize)
+ digest = "sha256=" + urlsafe_b64encode(h.digest()).decode("latin1").rstrip("=")
+ return (digest, str(length))
+
+
+def csv_io_kwargs(mode: str) -> Dict[str, Any]:
+ """Return keyword arguments to properly open a CSV file
+ in the given mode.
+ """
+ return {"mode": mode, "newline": "", "encoding": "utf-8"}
+
+
+def fix_script(path: str) -> bool:
+ """Replace #!python with #!/path/to/python
+ Return True if file was changed.
+ """
+ # XXX RECORD hashes will need to be updated
+ assert os.path.isfile(path)
+
+ with open(path, "rb") as script:
+ firstline = script.readline()
+ if not firstline.startswith(b"#!python"):
+ return False
+ exename = sys.executable.encode(sys.getfilesystemencoding())
+ firstline = b"#!" + exename + os.linesep.encode("ascii")
+ rest = script.read()
+ with open(path, "wb") as script:
+ script.write(firstline)
+ script.write(rest)
+ return True
+
+
+def wheel_root_is_purelib(metadata: Message) -> bool:
+ return metadata.get("Root-Is-Purelib", "").lower() == "true"
+
+
+def get_entrypoints(dist: BaseDistribution) -> Tuple[Dict[str, str], Dict[str, str]]:
+ console_scripts = {}
+ gui_scripts = {}
+ for entry_point in dist.iter_entry_points():
+ if entry_point.group == "console_scripts":
+ console_scripts[entry_point.name] = entry_point.value
+ elif entry_point.group == "gui_scripts":
+ gui_scripts[entry_point.name] = entry_point.value
+ return console_scripts, gui_scripts
+
+
+def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]:
+ """Determine if any scripts are not on PATH and format a warning.
+ Returns a warning message if one or more scripts are not on PATH,
+ otherwise None.
+ """
+ if not scripts:
+ return None
+
+ # Group scripts by the path they were installed in
+ grouped_by_dir: Dict[str, Set[str]] = collections.defaultdict(set)
+ for destfile in scripts:
+ parent_dir = os.path.dirname(destfile)
+ script_name = os.path.basename(destfile)
+ grouped_by_dir[parent_dir].add(script_name)
+
+ # We don't want to warn for directories that are on PATH.
+ not_warn_dirs = [
+ os.path.normcase(os.path.normpath(i)).rstrip(os.sep)
+ for i in os.environ.get("PATH", "").split(os.pathsep)
+ ]
+ # If an executable sits with sys.executable, we don't warn for it.
+ # This covers the case of venv invocations without activating the venv.
+ not_warn_dirs.append(
+ os.path.normcase(os.path.normpath(os.path.dirname(sys.executable)))
+ )
+ warn_for: Dict[str, Set[str]] = {
+ parent_dir: scripts
+ for parent_dir, scripts in grouped_by_dir.items()
+ if os.path.normcase(os.path.normpath(parent_dir)) not in not_warn_dirs
+ }
+ if not warn_for:
+ return None
+
+ # Format a message
+ msg_lines = []
+ for parent_dir, dir_scripts in warn_for.items():
+ sorted_scripts: List[str] = sorted(dir_scripts)
+ if len(sorted_scripts) == 1:
+ start_text = f"script {sorted_scripts[0]} is"
+ else:
+ start_text = "scripts {} are".format(
+ ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
+ )
+
+ msg_lines.append(
+ f"The {start_text} installed in '{parent_dir}' which is not on PATH."
+ )
+
+ last_line_fmt = (
+ "Consider adding {} to PATH or, if you prefer "
+ "to suppress this warning, use --no-warn-script-location."
+ )
+ if len(msg_lines) == 1:
+ msg_lines.append(last_line_fmt.format("this directory"))
+ else:
+ msg_lines.append(last_line_fmt.format("these directories"))
+
+ # Add a note if any directory starts with ~
+ warn_for_tilde = any(
+ i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i
+ )
+ if warn_for_tilde:
+ tilde_warning_msg = (
+ "NOTE: The current PATH contains path(s) starting with `~`, "
+ "which may not be expanded by all applications."
+ )
+ msg_lines.append(tilde_warning_msg)
+
+ # Returns the formatted multiline message
+ return "\n".join(msg_lines)
+
+
+def _normalized_outrows(
+ outrows: Iterable[InstalledCSVRow],
+) -> List[Tuple[str, str, str]]:
+ """Normalize the given rows of a RECORD file.
+
+ Items in each row are converted into str. Rows are then sorted to make
+ the value more predictable for tests.
+
+ Each row is a 3-tuple (path, hash, size) and corresponds to a record of
+ a RECORD file (see PEP 376 and PEP 427 for details). For the rows
+ passed to this function, the size can be an integer as an int or string,
+ or the empty string.
+ """
+ # Normally, there should only be one row per path, in which case the
+ # second and third elements don't come into play when sorting.
+ # However, in cases in the wild where a path might happen to occur twice,
+ # we don't want the sort operation to trigger an error (but still want
+ # determinism). Since the third element can be an int or string, we
+ # coerce each element to a string to avoid a TypeError in this case.
+ # For additional background, see--
+ # https://github.com/pypa/pip/issues/5868
+ return sorted(
+ (record_path, hash_, str(size)) for record_path, hash_, size in outrows
+ )
+
+
+def _record_to_fs_path(record_path: RecordPath, lib_dir: str) -> str:
+ return os.path.join(lib_dir, record_path)
+
+
+def _fs_to_record_path(path: str, lib_dir: str) -> RecordPath:
+ # On Windows, do not handle relative paths if they belong to different
+ # logical disks
+ if os.path.splitdrive(path)[0].lower() == os.path.splitdrive(lib_dir)[0].lower():
+ path = os.path.relpath(path, lib_dir)
+
+ path = path.replace(os.path.sep, "/")
+ return cast("RecordPath", path)
+
+
+def get_csv_rows_for_installed(
+ old_csv_rows: List[List[str]],
+ installed: Dict[RecordPath, RecordPath],
+ changed: Set[RecordPath],
+ generated: List[str],
+ lib_dir: str,
+) -> List[InstalledCSVRow]:
+ """
+ :param installed: A map from archive RECORD path to installation RECORD
+ path.
+ """
+ installed_rows: List[InstalledCSVRow] = []
+ for row in old_csv_rows:
+ if len(row) > 3:
+ logger.warning("RECORD line has more than three elements: %s", row)
+ old_record_path = cast("RecordPath", row[0])
+ new_record_path = installed.pop(old_record_path, old_record_path)
+ if new_record_path in changed:
+ digest, length = rehash(_record_to_fs_path(new_record_path, lib_dir))
+ else:
+ digest = row[1] if len(row) > 1 else ""
+ length = row[2] if len(row) > 2 else ""
+ installed_rows.append((new_record_path, digest, length))
+ for f in generated:
+ path = _fs_to_record_path(f, lib_dir)
+ digest, length = rehash(f)
+ installed_rows.append((path, digest, length))
+ return installed_rows + [
+ (installed_record_path, "", "") for installed_record_path in installed.values()
+ ]
+
+
+def get_console_script_specs(console: Dict[str, str]) -> List[str]:
+ """
+ Given the mapping from entrypoint name to callable, return the relevant
+ console script specs.
+ """
+ # Don't mutate caller's version
+ console = console.copy()
+
+ scripts_to_generate = []
+
+ # Special case pip and setuptools to generate versioned wrappers
+ #
+ # The issue is that some projects (specifically, pip and setuptools) use
+ # code in setup.py to create "versioned" entry points - pip2.7 on Python
+ # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
+ # the wheel metadata at build time, and so if the wheel is installed with
+ # a *different* version of Python the entry points will be wrong. The
+ # correct fix for this is to enhance the metadata to be able to describe
+ # such versioned entry points.
+ # Currently, projects using versioned entry points will either have
+ # incorrect versioned entry points, or they will not be able to distribute
+ # "universal" wheels (i.e., they will need a wheel per Python version).
+ #
+ # Because setuptools and pip are bundled with _ensurepip and virtualenv,
+ # we need to use universal wheels. As a workaround, we
+ # override the versioned entry points in the wheel and generate the
+ # correct ones.
+ #
+ # To add the level of hack in this section of code, in order to support
+ # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
+ # variable which will control which version scripts get installed.
+ #
+ # ENSUREPIP_OPTIONS=altinstall
+ # - Only pipX.Y and easy_install-X.Y will be generated and installed
+ # ENSUREPIP_OPTIONS=install
+ # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
+ # that this option is technically if ENSUREPIP_OPTIONS is set and is
+ # not altinstall
+ # DEFAULT
+ # - The default behavior is to install pip, pipX, pipX.Y, easy_install
+ # and easy_install-X.Y.
+ pip_script = console.pop("pip", None)
+ if pip_script:
+ if "ENSUREPIP_OPTIONS" not in os.environ:
+ scripts_to_generate.append("pip = " + pip_script)
+
+ if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
+ scripts_to_generate.append(f"pip{sys.version_info[0]} = {pip_script}")
+
+ scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}")
+ # Delete any other versioned pip entry points
+ pip_ep = [k for k in console if re.match(r"pip(\d+(\.\d+)?)?$", k)]
+ for k in pip_ep:
+ del console[k]
+ easy_install_script = console.pop("easy_install", None)
+ if easy_install_script:
+ if "ENSUREPIP_OPTIONS" not in os.environ:
+ scripts_to_generate.append("easy_install = " + easy_install_script)
+
+ scripts_to_generate.append(
+ f"easy_install-{get_major_minor_version()} = {easy_install_script}"
+ )
+ # Delete any other versioned easy_install entry points
+ easy_install_ep = [
+ k for k in console if re.match(r"easy_install(-\d+\.\d+)?$", k)
+ ]
+ for k in easy_install_ep:
+ del console[k]
+
+ # Generate the console entry points specified in the wheel
+ scripts_to_generate.extend(starmap("{} = {}".format, console.items()))
+
+ return scripts_to_generate
+
+
+class ZipBackedFile:
+ def __init__(
+ self, src_record_path: RecordPath, dest_path: str, zip_file: ZipFile
+ ) -> None:
+ self.src_record_path = src_record_path
+ self.dest_path = dest_path
+ self._zip_file = zip_file
+ self.changed = False
+
+ def _getinfo(self) -> ZipInfo:
+ return self._zip_file.getinfo(self.src_record_path)
+
+ def save(self) -> None:
+ # When we open the output file below, any existing file is truncated
+ # before we start writing the new contents. This is fine in most
+ # cases, but can cause a segfault if pip has loaded a shared
+ # object (e.g. from pyopenssl through its vendored urllib3)
+ # Since the shared object is mmap'd an attempt to call a
+ # symbol in it will then cause a segfault. Unlinking the file
+ # allows writing of new contents while allowing the process to
+ # continue to use the old copy.
+ if os.path.exists(self.dest_path):
+ os.unlink(self.dest_path)
+
+ zipinfo = self._getinfo()
+
+ # optimization: the file is created by open(),
+ # skip the decompression when there is 0 bytes to decompress.
+ with open(self.dest_path, "wb") as dest:
+ if zipinfo.file_size > 0:
+ with self._zip_file.open(zipinfo) as f:
+ blocksize = min(zipinfo.file_size, 1024 * 1024)
+ shutil.copyfileobj(f, dest, blocksize)
+
+ if zip_item_is_executable(zipinfo):
+ set_extracted_file_to_default_mode_plus_executable(self.dest_path)
+
+
+class ScriptFile:
+ def __init__(self, file: "File") -> None:
+ self._file = file
+ self.src_record_path = self._file.src_record_path
+ self.dest_path = self._file.dest_path
+ self.changed = False
+
+ def save(self) -> None:
+ self._file.save()
+ self.changed = fix_script(self.dest_path)
+
+
+class MissingCallableSuffix(InstallationError):
+ def __init__(self, entry_point: str) -> None:
+ super().__init__(
+ f"Invalid script entry point: {entry_point} - A callable "
+ "suffix is required. Cf https://packaging.python.org/"
+ "specifications/entry-points/#use-for-scripts for more "
+ "information."
+ )
+
+
+def _raise_for_invalid_entrypoint(specification: str) -> None:
+ entry = get_export_entry(specification)
+ if entry is not None and entry.suffix is None:
+ raise MissingCallableSuffix(str(entry))
+
+
+class PipScriptMaker(ScriptMaker):
+ def make(
+ self, specification: str, options: Optional[Dict[str, Any]] = None
+ ) -> List[str]:
+ _raise_for_invalid_entrypoint(specification)
+ return super().make(specification, options)
+
+
+def _install_wheel( # noqa: C901, PLR0915 function is too long
+ name: str,
+ wheel_zip: ZipFile,
+ wheel_path: str,
+ scheme: Scheme,
+ pycompile: bool = True,
+ warn_script_location: bool = True,
+ direct_url: Optional[DirectUrl] = None,
+ requested: bool = False,
+) -> None:
+ """Install a wheel.
+
+ :param name: Name of the project to install
+ :param wheel_zip: open ZipFile for wheel being installed
+ :param scheme: Distutils scheme dictating the install directories
+ :param req_description: String used in place of the requirement, for
+ logging
+ :param pycompile: Whether to byte-compile installed Python files
+ :param warn_script_location: Whether to check that scripts are installed
+ into a directory on PATH
+ :raises UnsupportedWheel:
+ * when the directory holds an unpacked wheel with incompatible
+ Wheel-Version
+ * when the .dist-info dir does not match the wheel
+ """
+ info_dir, metadata = parse_wheel(wheel_zip, name)
+
+ if wheel_root_is_purelib(metadata):
+ lib_dir = scheme.purelib
+ else:
+ lib_dir = scheme.platlib
+
+ # Record details of the files moved
+ # installed = files copied from the wheel to the destination
+ # changed = files changed while installing (scripts #! line typically)
+ # generated = files newly generated during the install (script wrappers)
+ installed: Dict[RecordPath, RecordPath] = {}
+ changed: Set[RecordPath] = set()
+ generated: List[str] = []
+
+ def record_installed(
+ srcfile: RecordPath, destfile: str, modified: bool = False
+ ) -> None:
+ """Map archive RECORD paths to installation RECORD paths."""
+ newpath = _fs_to_record_path(destfile, lib_dir)
+ installed[srcfile] = newpath
+ if modified:
+ changed.add(newpath)
+
+ def is_dir_path(path: RecordPath) -> bool:
+ return path.endswith("/")
+
+ def assert_no_path_traversal(dest_dir_path: str, target_path: str) -> None:
+ if not is_within_directory(dest_dir_path, target_path):
+ message = (
+ "The wheel {!r} has a file {!r} trying to install"
+ " outside the target directory {!r}"
+ )
+ raise InstallationError(
+ message.format(wheel_path, target_path, dest_dir_path)
+ )
+
+ def root_scheme_file_maker(
+ zip_file: ZipFile, dest: str
+ ) -> Callable[[RecordPath], "File"]:
+ def make_root_scheme_file(record_path: RecordPath) -> "File":
+ normed_path = os.path.normpath(record_path)
+ dest_path = os.path.join(dest, normed_path)
+ assert_no_path_traversal(dest, dest_path)
+ return ZipBackedFile(record_path, dest_path, zip_file)
+
+ return make_root_scheme_file
+
+ def data_scheme_file_maker(
+ zip_file: ZipFile, scheme: Scheme
+ ) -> Callable[[RecordPath], "File"]:
+ scheme_paths = {key: getattr(scheme, key) for key in SCHEME_KEYS}
+
+ def make_data_scheme_file(record_path: RecordPath) -> "File":
+ normed_path = os.path.normpath(record_path)
+ try:
+ _, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2)
+ except ValueError:
+ message = (
+ f"Unexpected file in {wheel_path}: {record_path!r}. .data directory"
+ " contents should be named like: '/'."
+ )
+ raise InstallationError(message)
+
+ try:
+ scheme_path = scheme_paths[scheme_key]
+ except KeyError:
+ valid_scheme_keys = ", ".join(sorted(scheme_paths))
+ message = (
+ f"Unknown scheme key used in {wheel_path}: {scheme_key} "
+ f"(for file {record_path!r}). .data directory contents "
+ f"should be in subdirectories named with a valid scheme "
+ f"key ({valid_scheme_keys})"
+ )
+ raise InstallationError(message)
+
+ dest_path = os.path.join(scheme_path, dest_subpath)
+ assert_no_path_traversal(scheme_path, dest_path)
+ return ZipBackedFile(record_path, dest_path, zip_file)
+
+ return make_data_scheme_file
+
+ def is_data_scheme_path(path: RecordPath) -> bool:
+ return path.split("/", 1)[0].endswith(".data")
+
+ paths = cast(List[RecordPath], wheel_zip.namelist())
+ file_paths = filterfalse(is_dir_path, paths)
+ root_scheme_paths, data_scheme_paths = partition(is_data_scheme_path, file_paths)
+
+ make_root_scheme_file = root_scheme_file_maker(wheel_zip, lib_dir)
+ files: Iterator[File] = map(make_root_scheme_file, root_scheme_paths)
+
+ def is_script_scheme_path(path: RecordPath) -> bool:
+ parts = path.split("/", 2)
+ return len(parts) > 2 and parts[0].endswith(".data") and parts[1] == "scripts"
+
+ other_scheme_paths, script_scheme_paths = partition(
+ is_script_scheme_path, data_scheme_paths
+ )
+
+ make_data_scheme_file = data_scheme_file_maker(wheel_zip, scheme)
+ other_scheme_files = map(make_data_scheme_file, other_scheme_paths)
+ files = chain(files, other_scheme_files)
+
+ # Get the defined entry points
+ distribution = get_wheel_distribution(
+ FilesystemWheel(wheel_path),
+ canonicalize_name(name),
+ )
+ console, gui = get_entrypoints(distribution)
+
+ def is_entrypoint_wrapper(file: "File") -> bool:
+ # EP, EP.exe and EP-script.py are scripts generated for
+ # entry point EP by setuptools
+ path = file.dest_path
+ name = os.path.basename(path)
+ if name.lower().endswith(".exe"):
+ matchname = name[:-4]
+ elif name.lower().endswith("-script.py"):
+ matchname = name[:-10]
+ elif name.lower().endswith(".pya"):
+ matchname = name[:-4]
+ else:
+ matchname = name
+ # Ignore setuptools-generated scripts
+ return matchname in console or matchname in gui
+
+ script_scheme_files: Iterator[File] = map(
+ make_data_scheme_file, script_scheme_paths
+ )
+ script_scheme_files = filterfalse(is_entrypoint_wrapper, script_scheme_files)
+ script_scheme_files = map(ScriptFile, script_scheme_files)
+ files = chain(files, script_scheme_files)
+
+ existing_parents = set()
+ for file in files:
+ # directory creation is lazy and after file filtering
+ # to ensure we don't install empty dirs; empty dirs can't be
+ # uninstalled.
+ parent_dir = os.path.dirname(file.dest_path)
+ if parent_dir not in existing_parents:
+ ensure_dir(parent_dir)
+ existing_parents.add(parent_dir)
+ file.save()
+ record_installed(file.src_record_path, file.dest_path, file.changed)
+
+ def pyc_source_file_paths() -> Generator[str, None, None]:
+ # We de-duplicate installation paths, since there can be overlap (e.g.
+ # file in .data maps to same location as file in wheel root).
+ # Sorting installation paths makes it easier to reproduce and debug
+ # issues related to permissions on existing files.
+ for installed_path in sorted(set(installed.values())):
+ full_installed_path = os.path.join(lib_dir, installed_path)
+ if not os.path.isfile(full_installed_path):
+ continue
+ if not full_installed_path.endswith(".py"):
+ continue
+ yield full_installed_path
+
+ def pyc_output_path(path: str) -> str:
+ """Return the path the pyc file would have been written to."""
+ return importlib.util.cache_from_source(path)
+
+ # Compile all of the pyc files for the installed files
+ if pycompile:
+ with contextlib.redirect_stdout(
+ StreamWrapper.from_stream(sys.stdout)
+ ) as stdout:
+ with warnings.catch_warnings():
+ warnings.filterwarnings("ignore")
+ for path in pyc_source_file_paths():
+ success = compileall.compile_file(path, force=True, quiet=True)
+ if success:
+ pyc_path = pyc_output_path(path)
+ assert os.path.exists(pyc_path)
+ pyc_record_path = cast(
+ "RecordPath", pyc_path.replace(os.path.sep, "/")
+ )
+ record_installed(pyc_record_path, pyc_path)
+ logger.debug(stdout.getvalue())
+
+ maker = PipScriptMaker(None, scheme.scripts)
+
+ # Ensure old scripts are overwritten.
+ # See https://github.com/pypa/pip/issues/1800
+ maker.clobber = True
+
+ # Ensure we don't generate any variants for scripts because this is almost
+ # never what somebody wants.
+ # See https://bitbucket.org/pypa/distlib/issue/35/
+ maker.variants = {""}
+
+ # This is required because otherwise distlib creates scripts that are not
+ # executable.
+ # See https://bitbucket.org/pypa/distlib/issue/32/
+ maker.set_mode = True
+
+ # Generate the console and GUI entry points specified in the wheel
+ scripts_to_generate = get_console_script_specs(console)
+
+ gui_scripts_to_generate = list(starmap("{} = {}".format, gui.items()))
+
+ generated_console_scripts = maker.make_multiple(scripts_to_generate)
+ generated.extend(generated_console_scripts)
+
+ generated.extend(maker.make_multiple(gui_scripts_to_generate, {"gui": True}))
+
+ if warn_script_location:
+ msg = message_about_scripts_not_on_PATH(generated_console_scripts)
+ if msg is not None:
+ logger.warning(msg)
+
+ generated_file_mode = 0o666 & ~current_umask()
+
+ @contextlib.contextmanager
+ def _generate_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]:
+ with adjacent_tmp_file(path, **kwargs) as f:
+ yield f
+ os.chmod(f.name, generated_file_mode)
+ replace(f.name, path)
+
+ dest_info_dir = os.path.join(lib_dir, info_dir)
+
+ # Record pip as the installer
+ installer_path = os.path.join(dest_info_dir, "INSTALLER")
+ with _generate_file(installer_path) as installer_file:
+ installer_file.write(b"pip\n")
+ generated.append(installer_path)
+
+ # Record the PEP 610 direct URL reference
+ if direct_url is not None:
+ direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME)
+ with _generate_file(direct_url_path) as direct_url_file:
+ direct_url_file.write(direct_url.to_json().encode("utf-8"))
+ generated.append(direct_url_path)
+
+ # Record the REQUESTED file
+ if requested:
+ requested_path = os.path.join(dest_info_dir, "REQUESTED")
+ with open(requested_path, "wb"):
+ pass
+ generated.append(requested_path)
+
+ record_text = distribution.read_text("RECORD")
+ record_rows = list(csv.reader(record_text.splitlines()))
+
+ rows = get_csv_rows_for_installed(
+ record_rows,
+ installed=installed,
+ changed=changed,
+ generated=generated,
+ lib_dir=lib_dir,
+ )
+
+ # Record details of all files installed
+ record_path = os.path.join(dest_info_dir, "RECORD")
+
+ with _generate_file(record_path, **csv_io_kwargs("w")) as record_file:
+ # Explicitly cast to typing.IO[str] as a workaround for the mypy error:
+ # "writer" has incompatible type "BinaryIO"; expected "_Writer"
+ writer = csv.writer(cast("IO[str]", record_file))
+ writer.writerows(_normalized_outrows(rows))
+
+
+@contextlib.contextmanager
+def req_error_context(req_description: str) -> Generator[None, None, None]:
+ try:
+ yield
+ except InstallationError as e:
+ message = f"For req: {req_description}. {e.args[0]}"
+ raise InstallationError(message) from e
+
+
+def install_wheel(
+ name: str,
+ wheel_path: str,
+ scheme: Scheme,
+ req_description: str,
+ pycompile: bool = True,
+ warn_script_location: bool = True,
+ direct_url: Optional[DirectUrl] = None,
+ requested: bool = False,
+) -> None:
+ with ZipFile(wheel_path, allowZip64=True) as z:
+ with req_error_context(req_description):
+ _install_wheel(
+ name=name,
+ wheel_zip=z,
+ wheel_path=wheel_path,
+ scheme=scheme,
+ pycompile=pycompile,
+ warn_script_location=warn_script_location,
+ direct_url=direct_url,
+ requested=requested,
+ )
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/operations/prepare.py b/vllm/lib/python3.10/site-packages/pip/_internal/operations/prepare.py
new file mode 100644
index 0000000000000000000000000000000000000000..e6aa344720028f422840a720d26f5cfab358062c
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/operations/prepare.py
@@ -0,0 +1,732 @@
+"""Prepares a distribution for installation
+"""
+
+# The following comment should be removed at some point in the future.
+# mypy: strict-optional=False
+
+import mimetypes
+import os
+import shutil
+from dataclasses import dataclass
+from pathlib import Path
+from typing import Dict, Iterable, List, Optional
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.distributions import make_distribution_for_install_requirement
+from pip._internal.distributions.installed import InstalledDistribution
+from pip._internal.exceptions import (
+ DirectoryUrlHashUnsupported,
+ HashMismatch,
+ HashUnpinned,
+ InstallationError,
+ MetadataInconsistent,
+ NetworkConnectionError,
+ VcsHashUnsupported,
+)
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import BaseDistribution, get_metadata_distribution
+from pip._internal.models.direct_url import ArchiveInfo
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.network.download import BatchDownloader, Downloader
+from pip._internal.network.lazy_wheel import (
+ HTTPRangeRequestUnsupported,
+ dist_from_wheel_url,
+)
+from pip._internal.network.session import PipSession
+from pip._internal.operations.build.build_tracker import BuildTracker
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.utils._log import getLogger
+from pip._internal.utils.direct_url_helpers import (
+ direct_url_for_editable,
+ direct_url_from_link,
+)
+from pip._internal.utils.hashes import Hashes, MissingHashes
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import (
+ display_path,
+ hash_file,
+ hide_url,
+ redact_auth_from_requirement,
+)
+from pip._internal.utils.temp_dir import TempDirectory
+from pip._internal.utils.unpacking import unpack_file
+from pip._internal.vcs import vcs
+
+logger = getLogger(__name__)
+
+
+def _get_prepared_distribution(
+ req: InstallRequirement,
+ build_tracker: BuildTracker,
+ finder: PackageFinder,
+ build_isolation: bool,
+ check_build_deps: bool,
+) -> BaseDistribution:
+ """Prepare a distribution for installation."""
+ abstract_dist = make_distribution_for_install_requirement(req)
+ tracker_id = abstract_dist.build_tracker_id
+ if tracker_id is not None:
+ with build_tracker.track(req, tracker_id):
+ abstract_dist.prepare_distribution_metadata(
+ finder, build_isolation, check_build_deps
+ )
+ return abstract_dist.get_metadata_distribution()
+
+
+def unpack_vcs_link(link: Link, location: str, verbosity: int) -> None:
+ vcs_backend = vcs.get_backend_for_scheme(link.scheme)
+ assert vcs_backend is not None
+ vcs_backend.unpack(location, url=hide_url(link.url), verbosity=verbosity)
+
+
+@dataclass
+class File:
+ path: str
+ content_type: Optional[str] = None
+
+ def __post_init__(self) -> None:
+ if self.content_type is None:
+ self.content_type = mimetypes.guess_type(self.path)[0]
+
+
+def get_http_url(
+ link: Link,
+ download: Downloader,
+ download_dir: Optional[str] = None,
+ hashes: Optional[Hashes] = None,
+) -> File:
+ temp_dir = TempDirectory(kind="unpack", globally_managed=True)
+ # If a download dir is specified, is the file already downloaded there?
+ already_downloaded_path = None
+ if download_dir:
+ already_downloaded_path = _check_download_dir(link, download_dir, hashes)
+
+ if already_downloaded_path:
+ from_path = already_downloaded_path
+ content_type = None
+ else:
+ # let's download to a tmp dir
+ from_path, content_type = download(link, temp_dir.path)
+ if hashes:
+ hashes.check_against_path(from_path)
+
+ return File(from_path, content_type)
+
+
+def get_file_url(
+ link: Link, download_dir: Optional[str] = None, hashes: Optional[Hashes] = None
+) -> File:
+ """Get file and optionally check its hash."""
+ # If a download dir is specified, is the file already there and valid?
+ already_downloaded_path = None
+ if download_dir:
+ already_downloaded_path = _check_download_dir(link, download_dir, hashes)
+
+ if already_downloaded_path:
+ from_path = already_downloaded_path
+ else:
+ from_path = link.file_path
+
+ # If --require-hashes is off, `hashes` is either empty, the
+ # link's embedded hash, or MissingHashes; it is required to
+ # match. If --require-hashes is on, we are satisfied by any
+ # hash in `hashes` matching: a URL-based or an option-based
+ # one; no internet-sourced hash will be in `hashes`.
+ if hashes:
+ hashes.check_against_path(from_path)
+ return File(from_path, None)
+
+
+def unpack_url(
+ link: Link,
+ location: str,
+ download: Downloader,
+ verbosity: int,
+ download_dir: Optional[str] = None,
+ hashes: Optional[Hashes] = None,
+) -> Optional[File]:
+ """Unpack link into location, downloading if required.
+
+ :param hashes: A Hashes object, one of whose embedded hashes must match,
+ or HashMismatch will be raised. If the Hashes is empty, no matches are
+ required, and unhashable types of requirements (like VCS ones, which
+ would ordinarily raise HashUnsupported) are allowed.
+ """
+ # non-editable vcs urls
+ if link.is_vcs:
+ unpack_vcs_link(link, location, verbosity=verbosity)
+ return None
+
+ assert not link.is_existing_dir()
+
+ # file urls
+ if link.is_file:
+ file = get_file_url(link, download_dir, hashes=hashes)
+
+ # http urls
+ else:
+ file = get_http_url(
+ link,
+ download,
+ download_dir,
+ hashes=hashes,
+ )
+
+ # unpack the archive to the build dir location. even when only downloading
+ # archives, they have to be unpacked to parse dependencies, except wheels
+ if not link.is_wheel:
+ unpack_file(file.path, location, file.content_type)
+
+ return file
+
+
+def _check_download_dir(
+ link: Link,
+ download_dir: str,
+ hashes: Optional[Hashes],
+ warn_on_hash_mismatch: bool = True,
+) -> Optional[str]:
+ """Check download_dir for previously downloaded file with correct hash
+ If a correct file is found return its path else None
+ """
+ download_path = os.path.join(download_dir, link.filename)
+
+ if not os.path.exists(download_path):
+ return None
+
+ # If already downloaded, does its hash match?
+ logger.info("File was already downloaded %s", download_path)
+ if hashes:
+ try:
+ hashes.check_against_path(download_path)
+ except HashMismatch:
+ if warn_on_hash_mismatch:
+ logger.warning(
+ "Previously-downloaded file %s has bad hash. Re-downloading.",
+ download_path,
+ )
+ os.unlink(download_path)
+ return None
+ return download_path
+
+
+class RequirementPreparer:
+ """Prepares a Requirement"""
+
+ def __init__(
+ self,
+ build_dir: str,
+ download_dir: Optional[str],
+ src_dir: str,
+ build_isolation: bool,
+ check_build_deps: bool,
+ build_tracker: BuildTracker,
+ session: PipSession,
+ progress_bar: str,
+ finder: PackageFinder,
+ require_hashes: bool,
+ use_user_site: bool,
+ lazy_wheel: bool,
+ verbosity: int,
+ legacy_resolver: bool,
+ ) -> None:
+ super().__init__()
+
+ self.src_dir = src_dir
+ self.build_dir = build_dir
+ self.build_tracker = build_tracker
+ self._session = session
+ self._download = Downloader(session, progress_bar)
+ self._batch_download = BatchDownloader(session, progress_bar)
+ self.finder = finder
+
+ # Where still-packed archives should be written to. If None, they are
+ # not saved, and are deleted immediately after unpacking.
+ self.download_dir = download_dir
+
+ # Is build isolation allowed?
+ self.build_isolation = build_isolation
+
+ # Should check build dependencies?
+ self.check_build_deps = check_build_deps
+
+ # Should hash-checking be required?
+ self.require_hashes = require_hashes
+
+ # Should install in user site-packages?
+ self.use_user_site = use_user_site
+
+ # Should wheels be downloaded lazily?
+ self.use_lazy_wheel = lazy_wheel
+
+ # How verbose should underlying tooling be?
+ self.verbosity = verbosity
+
+ # Are we using the legacy resolver?
+ self.legacy_resolver = legacy_resolver
+
+ # Memoized downloaded files, as mapping of url: path.
+ self._downloaded: Dict[str, str] = {}
+
+ # Previous "header" printed for a link-based InstallRequirement
+ self._previous_requirement_header = ("", "")
+
+ def _log_preparing_link(self, req: InstallRequirement) -> None:
+ """Provide context for the requirement being prepared."""
+ if req.link.is_file and not req.is_wheel_from_cache:
+ message = "Processing %s"
+ information = str(display_path(req.link.file_path))
+ else:
+ message = "Collecting %s"
+ information = redact_auth_from_requirement(req.req) if req.req else str(req)
+
+ # If we used req.req, inject requirement source if available (this
+ # would already be included if we used req directly)
+ if req.req and req.comes_from:
+ if isinstance(req.comes_from, str):
+ comes_from: Optional[str] = req.comes_from
+ else:
+ comes_from = req.comes_from.from_path()
+ if comes_from:
+ information += f" (from {comes_from})"
+
+ if (message, information) != self._previous_requirement_header:
+ self._previous_requirement_header = (message, information)
+ logger.info(message, information)
+
+ if req.is_wheel_from_cache:
+ with indent_log():
+ logger.info("Using cached %s", req.link.filename)
+
+ def _ensure_link_req_src_dir(
+ self, req: InstallRequirement, parallel_builds: bool
+ ) -> None:
+ """Ensure source_dir of a linked InstallRequirement."""
+ # Since source_dir is only set for editable requirements.
+ if req.link.is_wheel:
+ # We don't need to unpack wheels, so no need for a source
+ # directory.
+ return
+ assert req.source_dir is None
+ if req.link.is_existing_dir():
+ # build local directories in-tree
+ req.source_dir = req.link.file_path
+ return
+
+ # We always delete unpacked sdists after pip runs.
+ req.ensure_has_source_dir(
+ self.build_dir,
+ autodelete=True,
+ parallel_builds=parallel_builds,
+ )
+ req.ensure_pristine_source_checkout()
+
+ def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes:
+ # By the time this is called, the requirement's link should have
+ # been checked so we can tell what kind of requirements req is
+ # and raise some more informative errors than otherwise.
+ # (For example, we can raise VcsHashUnsupported for a VCS URL
+ # rather than HashMissing.)
+ if not self.require_hashes:
+ return req.hashes(trust_internet=True)
+
+ # We could check these first 2 conditions inside unpack_url
+ # and save repetition of conditions, but then we would
+ # report less-useful error messages for unhashable
+ # requirements, complaining that there's no hash provided.
+ if req.link.is_vcs:
+ raise VcsHashUnsupported()
+ if req.link.is_existing_dir():
+ raise DirectoryUrlHashUnsupported()
+
+ # Unpinned packages are asking for trouble when a new version
+ # is uploaded. This isn't a security check, but it saves users
+ # a surprising hash mismatch in the future.
+ # file:/// URLs aren't pinnable, so don't complain about them
+ # not being pinned.
+ if not req.is_direct and not req.is_pinned:
+ raise HashUnpinned()
+
+ # If known-good hashes are missing for this requirement,
+ # shim it with a facade object that will provoke hash
+ # computation and then raise a HashMissing exception
+ # showing the user what the hash should be.
+ return req.hashes(trust_internet=False) or MissingHashes()
+
+ def _fetch_metadata_only(
+ self,
+ req: InstallRequirement,
+ ) -> Optional[BaseDistribution]:
+ if self.legacy_resolver:
+ logger.debug(
+ "Metadata-only fetching is not used in the legacy resolver",
+ )
+ return None
+ if self.require_hashes:
+ logger.debug(
+ "Metadata-only fetching is not used as hash checking is required",
+ )
+ return None
+ # Try PEP 658 metadata first, then fall back to lazy wheel if unavailable.
+ return self._fetch_metadata_using_link_data_attr(
+ req
+ ) or self._fetch_metadata_using_lazy_wheel(req.link)
+
+ def _fetch_metadata_using_link_data_attr(
+ self,
+ req: InstallRequirement,
+ ) -> Optional[BaseDistribution]:
+ """Fetch metadata from the data-dist-info-metadata attribute, if possible."""
+ # (1) Get the link to the metadata file, if provided by the backend.
+ metadata_link = req.link.metadata_link()
+ if metadata_link is None:
+ return None
+ assert req.req is not None
+ logger.verbose(
+ "Obtaining dependency information for %s from %s",
+ req.req,
+ metadata_link,
+ )
+ # (2) Download the contents of the METADATA file, separate from the dist itself.
+ metadata_file = get_http_url(
+ metadata_link,
+ self._download,
+ hashes=metadata_link.as_hashes(),
+ )
+ with open(metadata_file.path, "rb") as f:
+ metadata_contents = f.read()
+ # (3) Generate a dist just from those file contents.
+ metadata_dist = get_metadata_distribution(
+ metadata_contents,
+ req.link.filename,
+ req.req.name,
+ )
+ # (4) Ensure the Name: field from the METADATA file matches the name from the
+ # install requirement.
+ #
+ # NB: raw_name will fall back to the name from the install requirement if
+ # the Name: field is not present, but it's noted in the raw_name docstring
+ # that that should NEVER happen anyway.
+ if canonicalize_name(metadata_dist.raw_name) != canonicalize_name(req.req.name):
+ raise MetadataInconsistent(
+ req, "Name", req.req.name, metadata_dist.raw_name
+ )
+ return metadata_dist
+
+ def _fetch_metadata_using_lazy_wheel(
+ self,
+ link: Link,
+ ) -> Optional[BaseDistribution]:
+ """Fetch metadata using lazy wheel, if possible."""
+ # --use-feature=fast-deps must be provided.
+ if not self.use_lazy_wheel:
+ return None
+ if link.is_file or not link.is_wheel:
+ logger.debug(
+ "Lazy wheel is not used as %r does not point to a remote wheel",
+ link,
+ )
+ return None
+
+ wheel = Wheel(link.filename)
+ name = canonicalize_name(wheel.name)
+ logger.info(
+ "Obtaining dependency information from %s %s",
+ name,
+ wheel.version,
+ )
+ url = link.url.split("#", 1)[0]
+ try:
+ return dist_from_wheel_url(name, url, self._session)
+ except HTTPRangeRequestUnsupported:
+ logger.debug("%s does not support range requests", url)
+ return None
+
+ def _complete_partial_requirements(
+ self,
+ partially_downloaded_reqs: Iterable[InstallRequirement],
+ parallel_builds: bool = False,
+ ) -> None:
+ """Download any requirements which were only fetched by metadata."""
+ # Download to a temporary directory. These will be copied over as
+ # needed for downstream 'download', 'wheel', and 'install' commands.
+ temp_dir = TempDirectory(kind="unpack", globally_managed=True).path
+
+ # Map each link to the requirement that owns it. This allows us to set
+ # `req.local_file_path` on the appropriate requirement after passing
+ # all the links at once into BatchDownloader.
+ links_to_fully_download: Dict[Link, InstallRequirement] = {}
+ for req in partially_downloaded_reqs:
+ assert req.link
+ links_to_fully_download[req.link] = req
+
+ batch_download = self._batch_download(
+ links_to_fully_download.keys(),
+ temp_dir,
+ )
+ for link, (filepath, _) in batch_download:
+ logger.debug("Downloading link %s to %s", link, filepath)
+ req = links_to_fully_download[link]
+ # Record the downloaded file path so wheel reqs can extract a Distribution
+ # in .get_dist().
+ req.local_file_path = filepath
+ # Record that the file is downloaded so we don't do it again in
+ # _prepare_linked_requirement().
+ self._downloaded[req.link.url] = filepath
+
+ # If this is an sdist, we need to unpack it after downloading, but the
+ # .source_dir won't be set up until we are in _prepare_linked_requirement().
+ # Add the downloaded archive to the install requirement to unpack after
+ # preparing the source dir.
+ if not req.is_wheel:
+ req.needs_unpacked_archive(Path(filepath))
+
+ # This step is necessary to ensure all lazy wheels are processed
+ # successfully by the 'download', 'wheel', and 'install' commands.
+ for req in partially_downloaded_reqs:
+ self._prepare_linked_requirement(req, parallel_builds)
+
+ def prepare_linked_requirement(
+ self, req: InstallRequirement, parallel_builds: bool = False
+ ) -> BaseDistribution:
+ """Prepare a requirement to be obtained from req.link."""
+ assert req.link
+ self._log_preparing_link(req)
+ with indent_log():
+ # Check if the relevant file is already available
+ # in the download directory
+ file_path = None
+ if self.download_dir is not None and req.link.is_wheel:
+ hashes = self._get_linked_req_hashes(req)
+ file_path = _check_download_dir(
+ req.link,
+ self.download_dir,
+ hashes,
+ # When a locally built wheel has been found in cache, we don't warn
+ # about re-downloading when the already downloaded wheel hash does
+ # not match. This is because the hash must be checked against the
+ # original link, not the cached link. It that case the already
+ # downloaded file will be removed and re-fetched from cache (which
+ # implies a hash check against the cache entry's origin.json).
+ warn_on_hash_mismatch=not req.is_wheel_from_cache,
+ )
+
+ if file_path is not None:
+ # The file is already available, so mark it as downloaded
+ self._downloaded[req.link.url] = file_path
+ else:
+ # The file is not available, attempt to fetch only metadata
+ metadata_dist = self._fetch_metadata_only(req)
+ if metadata_dist is not None:
+ req.needs_more_preparation = True
+ return metadata_dist
+
+ # None of the optimizations worked, fully prepare the requirement
+ return self._prepare_linked_requirement(req, parallel_builds)
+
+ def prepare_linked_requirements_more(
+ self, reqs: Iterable[InstallRequirement], parallel_builds: bool = False
+ ) -> None:
+ """Prepare linked requirements more, if needed."""
+ reqs = [req for req in reqs if req.needs_more_preparation]
+ for req in reqs:
+ # Determine if any of these requirements were already downloaded.
+ if self.download_dir is not None and req.link.is_wheel:
+ hashes = self._get_linked_req_hashes(req)
+ file_path = _check_download_dir(req.link, self.download_dir, hashes)
+ if file_path is not None:
+ self._downloaded[req.link.url] = file_path
+ req.needs_more_preparation = False
+
+ # Prepare requirements we found were already downloaded for some
+ # reason. The other downloads will be completed separately.
+ partially_downloaded_reqs: List[InstallRequirement] = []
+ for req in reqs:
+ if req.needs_more_preparation:
+ partially_downloaded_reqs.append(req)
+ else:
+ self._prepare_linked_requirement(req, parallel_builds)
+
+ # TODO: separate this part out from RequirementPreparer when the v1
+ # resolver can be removed!
+ self._complete_partial_requirements(
+ partially_downloaded_reqs,
+ parallel_builds=parallel_builds,
+ )
+
+ def _prepare_linked_requirement(
+ self, req: InstallRequirement, parallel_builds: bool
+ ) -> BaseDistribution:
+ assert req.link
+ link = req.link
+
+ hashes = self._get_linked_req_hashes(req)
+
+ if hashes and req.is_wheel_from_cache:
+ assert req.download_info is not None
+ assert link.is_wheel
+ assert link.is_file
+ # We need to verify hashes, and we have found the requirement in the cache
+ # of locally built wheels.
+ if (
+ isinstance(req.download_info.info, ArchiveInfo)
+ and req.download_info.info.hashes
+ and hashes.has_one_of(req.download_info.info.hashes)
+ ):
+ # At this point we know the requirement was built from a hashable source
+ # artifact, and we verified that the cache entry's hash of the original
+ # artifact matches one of the hashes we expect. We don't verify hashes
+ # against the cached wheel, because the wheel is not the original.
+ hashes = None
+ else:
+ logger.warning(
+ "The hashes of the source archive found in cache entry "
+ "don't match, ignoring cached built wheel "
+ "and re-downloading source."
+ )
+ req.link = req.cached_wheel_source_link
+ link = req.link
+
+ self._ensure_link_req_src_dir(req, parallel_builds)
+
+ if link.is_existing_dir():
+ local_file = None
+ elif link.url not in self._downloaded:
+ try:
+ local_file = unpack_url(
+ link,
+ req.source_dir,
+ self._download,
+ self.verbosity,
+ self.download_dir,
+ hashes,
+ )
+ except NetworkConnectionError as exc:
+ raise InstallationError(
+ f"Could not install requirement {req} because of HTTP "
+ f"error {exc} for URL {link}"
+ )
+ else:
+ file_path = self._downloaded[link.url]
+ if hashes:
+ hashes.check_against_path(file_path)
+ local_file = File(file_path, content_type=None)
+
+ # If download_info is set, we got it from the wheel cache.
+ if req.download_info is None:
+ # Editables don't go through this function (see
+ # prepare_editable_requirement).
+ assert not req.editable
+ req.download_info = direct_url_from_link(link, req.source_dir)
+ # Make sure we have a hash in download_info. If we got it as part of the
+ # URL, it will have been verified and we can rely on it. Otherwise we
+ # compute it from the downloaded file.
+ # FIXME: https://github.com/pypa/pip/issues/11943
+ if (
+ isinstance(req.download_info.info, ArchiveInfo)
+ and not req.download_info.info.hashes
+ and local_file
+ ):
+ hash = hash_file(local_file.path)[0].hexdigest()
+ # We populate info.hash for backward compatibility.
+ # This will automatically populate info.hashes.
+ req.download_info.info.hash = f"sha256={hash}"
+
+ # For use in later processing,
+ # preserve the file path on the requirement.
+ if local_file:
+ req.local_file_path = local_file.path
+
+ dist = _get_prepared_distribution(
+ req,
+ self.build_tracker,
+ self.finder,
+ self.build_isolation,
+ self.check_build_deps,
+ )
+ return dist
+
+ def save_linked_requirement(self, req: InstallRequirement) -> None:
+ assert self.download_dir is not None
+ assert req.link is not None
+ link = req.link
+ if link.is_vcs or (link.is_existing_dir() and req.editable):
+ # Make a .zip of the source_dir we already created.
+ req.archive(self.download_dir)
+ return
+
+ if link.is_existing_dir():
+ logger.debug(
+ "Not copying link to destination directory "
+ "since it is a directory: %s",
+ link,
+ )
+ return
+ if req.local_file_path is None:
+ # No distribution was downloaded for this requirement.
+ return
+
+ download_location = os.path.join(self.download_dir, link.filename)
+ if not os.path.exists(download_location):
+ shutil.copy(req.local_file_path, download_location)
+ download_path = display_path(download_location)
+ logger.info("Saved %s", download_path)
+
+ def prepare_editable_requirement(
+ self,
+ req: InstallRequirement,
+ ) -> BaseDistribution:
+ """Prepare an editable requirement."""
+ assert req.editable, "cannot prepare a non-editable req as editable"
+
+ logger.info("Obtaining %s", req)
+
+ with indent_log():
+ if self.require_hashes:
+ raise InstallationError(
+ f"The editable requirement {req} cannot be installed when "
+ "requiring hashes, because there is no single file to "
+ "hash."
+ )
+ req.ensure_has_source_dir(self.src_dir)
+ req.update_editable()
+ assert req.source_dir
+ req.download_info = direct_url_for_editable(req.unpacked_source_directory)
+
+ dist = _get_prepared_distribution(
+ req,
+ self.build_tracker,
+ self.finder,
+ self.build_isolation,
+ self.check_build_deps,
+ )
+
+ req.check_if_exists(self.use_user_site)
+
+ return dist
+
+ def prepare_installed_requirement(
+ self,
+ req: InstallRequirement,
+ skip_reason: str,
+ ) -> BaseDistribution:
+ """Prepare an already-installed requirement."""
+ assert req.satisfied_by, "req should have been satisfied but isn't"
+ assert skip_reason is not None, (
+ "did not get skip reason skipped but req.satisfied_by "
+ f"is set to {req.satisfied_by}"
+ )
+ logger.info(
+ "Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version
+ )
+ with indent_log():
+ if self.require_hashes:
+ logger.debug(
+ "Since it is already installed, we are trusting this "
+ "package without checking its hash. To ensure a "
+ "completely repeatable environment, install into an "
+ "empty virtualenv."
+ )
+ return InstalledDistribution(req).get_metadata_distribution()
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/resolution/__pycache__/base.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/__pycache__/base.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3fc14c4b2f70ecfb97c35a39bdb6ead754d2da99
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/__pycache__/base.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3e9a1f300faff4f41b5b5ba04f4507a1dd07a33b
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cd0067e6b7c5f4e33974224610a0fc4d5fafaf70
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/resolution/legacy/resolver.py b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/legacy/resolver.py
new file mode 100644
index 0000000000000000000000000000000000000000..1dd0d7041bb7a0a32bdf22f825c52f87276e5e07
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/legacy/resolver.py
@@ -0,0 +1,597 @@
+"""Dependency Resolution
+
+The dependency resolution in pip is performed as follows:
+
+for top-level requirements:
+ a. only one spec allowed per project, regardless of conflicts or not.
+ otherwise a "double requirement" exception is raised
+ b. they override sub-dependency requirements.
+for sub-dependencies
+ a. "first found, wins" (where the order is breadth first)
+"""
+
+import logging
+import sys
+from collections import defaultdict
+from itertools import chain
+from typing import DefaultDict, Iterable, List, Optional, Set, Tuple
+
+from pip._vendor.packaging import specifiers
+from pip._vendor.packaging.requirements import Requirement
+
+from pip._internal.cache import WheelCache
+from pip._internal.exceptions import (
+ BestVersionAlreadyInstalled,
+ DistributionNotFound,
+ HashError,
+ HashErrors,
+ InstallationError,
+ NoneMetadataError,
+ UnsupportedPythonVersion,
+)
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import BaseDistribution
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req.req_install import (
+ InstallRequirement,
+ check_invalid_constraint_type,
+)
+from pip._internal.req.req_set import RequirementSet
+from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
+from pip._internal.utils import compatibility_tags
+from pip._internal.utils.compatibility_tags import get_supported
+from pip._internal.utils.direct_url_helpers import direct_url_from_link
+from pip._internal.utils.logging import indent_log
+from pip._internal.utils.misc import normalize_version_info
+from pip._internal.utils.packaging import check_requires_python
+
+logger = logging.getLogger(__name__)
+
+DiscoveredDependencies = DefaultDict[Optional[str], List[InstallRequirement]]
+
+
+def _check_dist_requires_python(
+ dist: BaseDistribution,
+ version_info: Tuple[int, int, int],
+ ignore_requires_python: bool = False,
+) -> None:
+ """
+ Check whether the given Python version is compatible with a distribution's
+ "Requires-Python" value.
+
+ :param version_info: A 3-tuple of ints representing the Python
+ major-minor-micro version to check.
+ :param ignore_requires_python: Whether to ignore the "Requires-Python"
+ value if the given Python version isn't compatible.
+
+ :raises UnsupportedPythonVersion: When the given Python version isn't
+ compatible.
+ """
+ # This idiosyncratically converts the SpecifierSet to str and let
+ # check_requires_python then parse it again into SpecifierSet. But this
+ # is the legacy resolver so I'm just not going to bother refactoring.
+ try:
+ requires_python = str(dist.requires_python)
+ except FileNotFoundError as e:
+ raise NoneMetadataError(dist, str(e))
+ try:
+ is_compatible = check_requires_python(
+ requires_python,
+ version_info=version_info,
+ )
+ except specifiers.InvalidSpecifier as exc:
+ logger.warning(
+ "Package %r has an invalid Requires-Python: %s", dist.raw_name, exc
+ )
+ return
+
+ if is_compatible:
+ return
+
+ version = ".".join(map(str, version_info))
+ if ignore_requires_python:
+ logger.debug(
+ "Ignoring failed Requires-Python check for package %r: %s not in %r",
+ dist.raw_name,
+ version,
+ requires_python,
+ )
+ return
+
+ raise UnsupportedPythonVersion(
+ f"Package {dist.raw_name!r} requires a different Python: "
+ f"{version} not in {requires_python!r}"
+ )
+
+
+class Resolver(BaseResolver):
+ """Resolves which packages need to be installed/uninstalled to perform \
+ the requested operation without breaking the requirements of any package.
+ """
+
+ _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
+
+ def __init__(
+ self,
+ preparer: RequirementPreparer,
+ finder: PackageFinder,
+ wheel_cache: Optional[WheelCache],
+ make_install_req: InstallRequirementProvider,
+ use_user_site: bool,
+ ignore_dependencies: bool,
+ ignore_installed: bool,
+ ignore_requires_python: bool,
+ force_reinstall: bool,
+ upgrade_strategy: str,
+ py_version_info: Optional[Tuple[int, ...]] = None,
+ ) -> None:
+ super().__init__()
+ assert upgrade_strategy in self._allowed_strategies
+
+ if py_version_info is None:
+ py_version_info = sys.version_info[:3]
+ else:
+ py_version_info = normalize_version_info(py_version_info)
+
+ self._py_version_info = py_version_info
+
+ self.preparer = preparer
+ self.finder = finder
+ self.wheel_cache = wheel_cache
+
+ self.upgrade_strategy = upgrade_strategy
+ self.force_reinstall = force_reinstall
+ self.ignore_dependencies = ignore_dependencies
+ self.ignore_installed = ignore_installed
+ self.ignore_requires_python = ignore_requires_python
+ self.use_user_site = use_user_site
+ self._make_install_req = make_install_req
+
+ self._discovered_dependencies: DiscoveredDependencies = defaultdict(list)
+
+ def resolve(
+ self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
+ ) -> RequirementSet:
+ """Resolve what operations need to be done
+
+ As a side-effect of this method, the packages (and their dependencies)
+ are downloaded, unpacked and prepared for installation. This
+ preparation is done by ``pip.operations.prepare``.
+
+ Once PyPI has static dependency metadata available, it would be
+ possible to move the preparation to become a step separated from
+ dependency resolution.
+ """
+ requirement_set = RequirementSet(check_supported_wheels=check_supported_wheels)
+ for req in root_reqs:
+ if req.constraint:
+ check_invalid_constraint_type(req)
+ self._add_requirement_to_set(requirement_set, req)
+
+ # Actually prepare the files, and collect any exceptions. Most hash
+ # exceptions cannot be checked ahead of time, because
+ # _populate_link() needs to be called before we can make decisions
+ # based on link type.
+ discovered_reqs: List[InstallRequirement] = []
+ hash_errors = HashErrors()
+ for req in chain(requirement_set.all_requirements, discovered_reqs):
+ try:
+ discovered_reqs.extend(self._resolve_one(requirement_set, req))
+ except HashError as exc:
+ exc.req = req
+ hash_errors.append(exc)
+
+ if hash_errors:
+ raise hash_errors
+
+ return requirement_set
+
+ def _add_requirement_to_set(
+ self,
+ requirement_set: RequirementSet,
+ install_req: InstallRequirement,
+ parent_req_name: Optional[str] = None,
+ extras_requested: Optional[Iterable[str]] = None,
+ ) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]]:
+ """Add install_req as a requirement to install.
+
+ :param parent_req_name: The name of the requirement that needed this
+ added. The name is used because when multiple unnamed requirements
+ resolve to the same name, we could otherwise end up with dependency
+ links that point outside the Requirements set. parent_req must
+ already be added. Note that None implies that this is a user
+ supplied requirement, vs an inferred one.
+ :param extras_requested: an iterable of extras used to evaluate the
+ environment markers.
+ :return: Additional requirements to scan. That is either [] if
+ the requirement is not applicable, or [install_req] if the
+ requirement is applicable and has just been added.
+ """
+ # If the markers do not match, ignore this requirement.
+ if not install_req.match_markers(extras_requested):
+ logger.info(
+ "Ignoring %s: markers '%s' don't match your environment",
+ install_req.name,
+ install_req.markers,
+ )
+ return [], None
+
+ # If the wheel is not supported, raise an error.
+ # Should check this after filtering out based on environment markers to
+ # allow specifying different wheels based on the environment/OS, in a
+ # single requirements file.
+ if install_req.link and install_req.link.is_wheel:
+ wheel = Wheel(install_req.link.filename)
+ tags = compatibility_tags.get_supported()
+ if requirement_set.check_supported_wheels and not wheel.supported(tags):
+ raise InstallationError(
+ f"{wheel.filename} is not a supported wheel on this platform."
+ )
+
+ # This next bit is really a sanity check.
+ assert (
+ not install_req.user_supplied or parent_req_name is None
+ ), "a user supplied req shouldn't have a parent"
+
+ # Unnamed requirements are scanned again and the requirement won't be
+ # added as a dependency until after scanning.
+ if not install_req.name:
+ requirement_set.add_unnamed_requirement(install_req)
+ return [install_req], None
+
+ try:
+ existing_req: Optional[InstallRequirement] = (
+ requirement_set.get_requirement(install_req.name)
+ )
+ except KeyError:
+ existing_req = None
+
+ has_conflicting_requirement = (
+ parent_req_name is None
+ and existing_req
+ and not existing_req.constraint
+ and existing_req.extras == install_req.extras
+ and existing_req.req
+ and install_req.req
+ and existing_req.req.specifier != install_req.req.specifier
+ )
+ if has_conflicting_requirement:
+ raise InstallationError(
+ f"Double requirement given: {install_req} "
+ f"(already in {existing_req}, name={install_req.name!r})"
+ )
+
+ # When no existing requirement exists, add the requirement as a
+ # dependency and it will be scanned again after.
+ if not existing_req:
+ requirement_set.add_named_requirement(install_req)
+ # We'd want to rescan this requirement later
+ return [install_req], install_req
+
+ # Assume there's no need to scan, and that we've already
+ # encountered this for scanning.
+ if install_req.constraint or not existing_req.constraint:
+ return [], existing_req
+
+ does_not_satisfy_constraint = install_req.link and not (
+ existing_req.link and install_req.link.path == existing_req.link.path
+ )
+ if does_not_satisfy_constraint:
+ raise InstallationError(
+ f"Could not satisfy constraints for '{install_req.name}': "
+ "installation from path or url cannot be "
+ "constrained to a version"
+ )
+ # If we're now installing a constraint, mark the existing
+ # object for real installation.
+ existing_req.constraint = False
+ # If we're now installing a user supplied requirement,
+ # mark the existing object as such.
+ if install_req.user_supplied:
+ existing_req.user_supplied = True
+ existing_req.extras = tuple(
+ sorted(set(existing_req.extras) | set(install_req.extras))
+ )
+ logger.debug(
+ "Setting %s extras to: %s",
+ existing_req,
+ existing_req.extras,
+ )
+ # Return the existing requirement for addition to the parent and
+ # scanning again.
+ return [existing_req], existing_req
+
+ def _is_upgrade_allowed(self, req: InstallRequirement) -> bool:
+ if self.upgrade_strategy == "to-satisfy-only":
+ return False
+ elif self.upgrade_strategy == "eager":
+ return True
+ else:
+ assert self.upgrade_strategy == "only-if-needed"
+ return req.user_supplied or req.constraint
+
+ def _set_req_to_reinstall(self, req: InstallRequirement) -> None:
+ """
+ Set a requirement to be installed.
+ """
+ # Don't uninstall the conflict if doing a user install and the
+ # conflict is not a user install.
+ assert req.satisfied_by is not None
+ if not self.use_user_site or req.satisfied_by.in_usersite:
+ req.should_reinstall = True
+ req.satisfied_by = None
+
+ def _check_skip_installed(
+ self, req_to_install: InstallRequirement
+ ) -> Optional[str]:
+ """Check if req_to_install should be skipped.
+
+ This will check if the req is installed, and whether we should upgrade
+ or reinstall it, taking into account all the relevant user options.
+
+ After calling this req_to_install will only have satisfied_by set to
+ None if the req_to_install is to be upgraded/reinstalled etc. Any
+ other value will be a dist recording the current thing installed that
+ satisfies the requirement.
+
+ Note that for vcs urls and the like we can't assess skipping in this
+ routine - we simply identify that we need to pull the thing down,
+ then later on it is pulled down and introspected to assess upgrade/
+ reinstalls etc.
+
+ :return: A text reason for why it was skipped, or None.
+ """
+ if self.ignore_installed:
+ return None
+
+ req_to_install.check_if_exists(self.use_user_site)
+ if not req_to_install.satisfied_by:
+ return None
+
+ if self.force_reinstall:
+ self._set_req_to_reinstall(req_to_install)
+ return None
+
+ if not self._is_upgrade_allowed(req_to_install):
+ if self.upgrade_strategy == "only-if-needed":
+ return "already satisfied, skipping upgrade"
+ return "already satisfied"
+
+ # Check for the possibility of an upgrade. For link-based
+ # requirements we have to pull the tree down and inspect to assess
+ # the version #, so it's handled way down.
+ if not req_to_install.link:
+ try:
+ self.finder.find_requirement(req_to_install, upgrade=True)
+ except BestVersionAlreadyInstalled:
+ # Then the best version is installed.
+ return "already up-to-date"
+ except DistributionNotFound:
+ # No distribution found, so we squash the error. It will
+ # be raised later when we re-try later to do the install.
+ # Why don't we just raise here?
+ pass
+
+ self._set_req_to_reinstall(req_to_install)
+ return None
+
+ def _find_requirement_link(self, req: InstallRequirement) -> Optional[Link]:
+ upgrade = self._is_upgrade_allowed(req)
+ best_candidate = self.finder.find_requirement(req, upgrade)
+ if not best_candidate:
+ return None
+
+ # Log a warning per PEP 592 if necessary before returning.
+ link = best_candidate.link
+ if link.is_yanked:
+ reason = link.yanked_reason or ""
+ msg = (
+ # Mark this as a unicode string to prevent
+ # "UnicodeEncodeError: 'ascii' codec can't encode character"
+ # in Python 2 when the reason contains non-ascii characters.
+ "The candidate selected for download or install is a "
+ f"yanked version: {best_candidate}\n"
+ f"Reason for being yanked: {reason}"
+ )
+ logger.warning(msg)
+
+ return link
+
+ def _populate_link(self, req: InstallRequirement) -> None:
+ """Ensure that if a link can be found for this, that it is found.
+
+ Note that req.link may still be None - if the requirement is already
+ installed and not needed to be upgraded based on the return value of
+ _is_upgrade_allowed().
+
+ If preparer.require_hashes is True, don't use the wheel cache, because
+ cached wheels, always built locally, have different hashes than the
+ files downloaded from the index server and thus throw false hash
+ mismatches. Furthermore, cached wheels at present have undeterministic
+ contents due to file modification times.
+ """
+ if req.link is None:
+ req.link = self._find_requirement_link(req)
+
+ if self.wheel_cache is None or self.preparer.require_hashes:
+ return
+
+ assert req.link is not None, "_find_requirement_link unexpectedly returned None"
+ cache_entry = self.wheel_cache.get_cache_entry(
+ link=req.link,
+ package_name=req.name,
+ supported_tags=get_supported(),
+ )
+ if cache_entry is not None:
+ logger.debug("Using cached wheel link: %s", cache_entry.link)
+ if req.link is req.original_link and cache_entry.persistent:
+ req.cached_wheel_source_link = req.link
+ if cache_entry.origin is not None:
+ req.download_info = cache_entry.origin
+ else:
+ # Legacy cache entry that does not have origin.json.
+ # download_info may miss the archive_info.hashes field.
+ req.download_info = direct_url_from_link(
+ req.link, link_is_in_wheel_cache=cache_entry.persistent
+ )
+ req.link = cache_entry.link
+
+ def _get_dist_for(self, req: InstallRequirement) -> BaseDistribution:
+ """Takes a InstallRequirement and returns a single AbstractDist \
+ representing a prepared variant of the same.
+ """
+ if req.editable:
+ return self.preparer.prepare_editable_requirement(req)
+
+ # satisfied_by is only evaluated by calling _check_skip_installed,
+ # so it must be None here.
+ assert req.satisfied_by is None
+ skip_reason = self._check_skip_installed(req)
+
+ if req.satisfied_by:
+ return self.preparer.prepare_installed_requirement(req, skip_reason)
+
+ # We eagerly populate the link, since that's our "legacy" behavior.
+ self._populate_link(req)
+ dist = self.preparer.prepare_linked_requirement(req)
+
+ # NOTE
+ # The following portion is for determining if a certain package is
+ # going to be re-installed/upgraded or not and reporting to the user.
+ # This should probably get cleaned up in a future refactor.
+
+ # req.req is only avail after unpack for URL
+ # pkgs repeat check_if_exists to uninstall-on-upgrade
+ # (#14)
+ if not self.ignore_installed:
+ req.check_if_exists(self.use_user_site)
+
+ if req.satisfied_by:
+ should_modify = (
+ self.upgrade_strategy != "to-satisfy-only"
+ or self.force_reinstall
+ or self.ignore_installed
+ or req.link.scheme == "file"
+ )
+ if should_modify:
+ self._set_req_to_reinstall(req)
+ else:
+ logger.info(
+ "Requirement already satisfied (use --upgrade to upgrade): %s",
+ req,
+ )
+ return dist
+
+ def _resolve_one(
+ self,
+ requirement_set: RequirementSet,
+ req_to_install: InstallRequirement,
+ ) -> List[InstallRequirement]:
+ """Prepare a single requirements file.
+
+ :return: A list of additional InstallRequirements to also install.
+ """
+ # Tell user what we are doing for this requirement:
+ # obtain (editable), skipping, processing (local url), collecting
+ # (remote url or package name)
+ if req_to_install.constraint or req_to_install.prepared:
+ return []
+
+ req_to_install.prepared = True
+
+ # Parse and return dependencies
+ dist = self._get_dist_for(req_to_install)
+ # This will raise UnsupportedPythonVersion if the given Python
+ # version isn't compatible with the distribution's Requires-Python.
+ _check_dist_requires_python(
+ dist,
+ version_info=self._py_version_info,
+ ignore_requires_python=self.ignore_requires_python,
+ )
+
+ more_reqs: List[InstallRequirement] = []
+
+ def add_req(subreq: Requirement, extras_requested: Iterable[str]) -> None:
+ # This idiosyncratically converts the Requirement to str and let
+ # make_install_req then parse it again into Requirement. But this is
+ # the legacy resolver so I'm just not going to bother refactoring.
+ sub_install_req = self._make_install_req(str(subreq), req_to_install)
+ parent_req_name = req_to_install.name
+ to_scan_again, add_to_parent = self._add_requirement_to_set(
+ requirement_set,
+ sub_install_req,
+ parent_req_name=parent_req_name,
+ extras_requested=extras_requested,
+ )
+ if parent_req_name and add_to_parent:
+ self._discovered_dependencies[parent_req_name].append(add_to_parent)
+ more_reqs.extend(to_scan_again)
+
+ with indent_log():
+ # We add req_to_install before its dependencies, so that we
+ # can refer to it when adding dependencies.
+ assert req_to_install.name is not None
+ if not requirement_set.has_requirement(req_to_install.name):
+ # 'unnamed' requirements will get added here
+ # 'unnamed' requirements can only come from being directly
+ # provided by the user.
+ assert req_to_install.user_supplied
+ self._add_requirement_to_set(
+ requirement_set, req_to_install, parent_req_name=None
+ )
+
+ if not self.ignore_dependencies:
+ if req_to_install.extras:
+ logger.debug(
+ "Installing extra requirements: %r",
+ ",".join(req_to_install.extras),
+ )
+ missing_requested = sorted(
+ set(req_to_install.extras) - set(dist.iter_provided_extras())
+ )
+ for missing in missing_requested:
+ logger.warning(
+ "%s %s does not provide the extra '%s'",
+ dist.raw_name,
+ dist.version,
+ missing,
+ )
+
+ available_requested = sorted(
+ set(dist.iter_provided_extras()) & set(req_to_install.extras)
+ )
+ for subreq in dist.iter_dependencies(available_requested):
+ add_req(subreq, extras_requested=available_requested)
+
+ return more_reqs
+
+ def get_installation_order(
+ self, req_set: RequirementSet
+ ) -> List[InstallRequirement]:
+ """Create the installation order.
+
+ The installation order is topological - requirements are installed
+ before the requiring thing. We break cycles at an arbitrary point,
+ and make no other guarantees.
+ """
+ # The current implementation, which we may change at any point
+ # installs the user specified things in the order given, except when
+ # dependencies must come earlier to achieve topological order.
+ order = []
+ ordered_reqs: Set[InstallRequirement] = set()
+
+ def schedule(req: InstallRequirement) -> None:
+ if req.satisfied_by or req in ordered_reqs:
+ return
+ if req.constraint:
+ return
+ ordered_reqs.add(req)
+ for dep in self._discovered_dependencies[req.name]:
+ schedule(dep)
+ order.append(req)
+
+ for install_req in req_set.requirements.values():
+ schedule(install_req)
+ return order
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0f7128855b4a98550d8c02846c0101743e9810bd
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..bb74f8505bbaddab5e8a06e5059d48284ea1bdeb
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f385cce866aba74cde4f6a1cd252ef8b1431e8ca
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..72be99743a6a27a6155df964923308cf2d83c45c
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-310.pyc b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..79bd3d0f760972c314a801b81a77632981e2b301
Binary files /dev/null and b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-310.pyc differ
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/factory.py b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/factory.py
new file mode 100644
index 0000000000000000000000000000000000000000..6c273eb88dbc6b2de39b6f444991f3781834854b
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/factory.py
@@ -0,0 +1,823 @@
+import contextlib
+import functools
+import logging
+from typing import (
+ TYPE_CHECKING,
+ Callable,
+ Dict,
+ FrozenSet,
+ Iterable,
+ Iterator,
+ List,
+ Mapping,
+ NamedTuple,
+ Optional,
+ Protocol,
+ Sequence,
+ Set,
+ Tuple,
+ TypeVar,
+ cast,
+)
+
+from pip._vendor.packaging.requirements import InvalidRequirement
+from pip._vendor.packaging.specifiers import SpecifierSet
+from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
+from pip._vendor.packaging.version import InvalidVersion, Version
+from pip._vendor.resolvelib import ResolutionImpossible
+
+from pip._internal.cache import CacheEntry, WheelCache
+from pip._internal.exceptions import (
+ DistributionNotFound,
+ InstallationError,
+ InvalidInstalledPackage,
+ MetadataInconsistent,
+ MetadataInvalid,
+ UnsupportedPythonVersion,
+ UnsupportedWheel,
+)
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.metadata import BaseDistribution, get_default_environment
+from pip._internal.models.link import Link
+from pip._internal.models.wheel import Wheel
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req.constructors import (
+ install_req_drop_extras,
+ install_req_from_link_and_ireq,
+)
+from pip._internal.req.req_install import (
+ InstallRequirement,
+ check_invalid_constraint_type,
+)
+from pip._internal.resolution.base import InstallRequirementProvider
+from pip._internal.utils.compatibility_tags import get_supported
+from pip._internal.utils.hashes import Hashes
+from pip._internal.utils.packaging import get_requirement
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+from .base import Candidate, Constraint, Requirement
+from .candidates import (
+ AlreadyInstalledCandidate,
+ BaseCandidate,
+ EditableCandidate,
+ ExtrasCandidate,
+ LinkCandidate,
+ RequiresPythonCandidate,
+ as_base_candidate,
+)
+from .found_candidates import FoundCandidates, IndexCandidateInfo
+from .requirements import (
+ ExplicitRequirement,
+ RequiresPythonRequirement,
+ SpecifierRequirement,
+ SpecifierWithoutExtrasRequirement,
+ UnsatisfiableRequirement,
+)
+
+if TYPE_CHECKING:
+
+ class ConflictCause(Protocol):
+ requirement: RequiresPythonRequirement
+ parent: Candidate
+
+
+logger = logging.getLogger(__name__)
+
+C = TypeVar("C")
+Cache = Dict[Link, C]
+
+
+class CollectedRootRequirements(NamedTuple):
+ requirements: List[Requirement]
+ constraints: Dict[str, Constraint]
+ user_requested: Dict[str, int]
+
+
+class Factory:
+ def __init__(
+ self,
+ finder: PackageFinder,
+ preparer: RequirementPreparer,
+ make_install_req: InstallRequirementProvider,
+ wheel_cache: Optional[WheelCache],
+ use_user_site: bool,
+ force_reinstall: bool,
+ ignore_installed: bool,
+ ignore_requires_python: bool,
+ py_version_info: Optional[Tuple[int, ...]] = None,
+ ) -> None:
+ self._finder = finder
+ self.preparer = preparer
+ self._wheel_cache = wheel_cache
+ self._python_candidate = RequiresPythonCandidate(py_version_info)
+ self._make_install_req_from_spec = make_install_req
+ self._use_user_site = use_user_site
+ self._force_reinstall = force_reinstall
+ self._ignore_requires_python = ignore_requires_python
+
+ self._build_failures: Cache[InstallationError] = {}
+ self._link_candidate_cache: Cache[LinkCandidate] = {}
+ self._editable_candidate_cache: Cache[EditableCandidate] = {}
+ self._installed_candidate_cache: Dict[str, AlreadyInstalledCandidate] = {}
+ self._extras_candidate_cache: Dict[
+ Tuple[int, FrozenSet[NormalizedName]], ExtrasCandidate
+ ] = {}
+ self._supported_tags_cache = get_supported()
+
+ if not ignore_installed:
+ env = get_default_environment()
+ self._installed_dists = {
+ dist.canonical_name: dist
+ for dist in env.iter_installed_distributions(local_only=False)
+ }
+ else:
+ self._installed_dists = {}
+
+ @property
+ def force_reinstall(self) -> bool:
+ return self._force_reinstall
+
+ def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None:
+ if not link.is_wheel:
+ return
+ wheel = Wheel(link.filename)
+ if wheel.supported(self._finder.target_python.get_unsorted_tags()):
+ return
+ msg = f"{link.filename} is not a supported wheel on this platform."
+ raise UnsupportedWheel(msg)
+
+ def _make_extras_candidate(
+ self,
+ base: BaseCandidate,
+ extras: FrozenSet[str],
+ *,
+ comes_from: Optional[InstallRequirement] = None,
+ ) -> ExtrasCandidate:
+ cache_key = (id(base), frozenset(canonicalize_name(e) for e in extras))
+ try:
+ candidate = self._extras_candidate_cache[cache_key]
+ except KeyError:
+ candidate = ExtrasCandidate(base, extras, comes_from=comes_from)
+ self._extras_candidate_cache[cache_key] = candidate
+ return candidate
+
+ def _make_candidate_from_dist(
+ self,
+ dist: BaseDistribution,
+ extras: FrozenSet[str],
+ template: InstallRequirement,
+ ) -> Candidate:
+ try:
+ base = self._installed_candidate_cache[dist.canonical_name]
+ except KeyError:
+ base = AlreadyInstalledCandidate(dist, template, factory=self)
+ self._installed_candidate_cache[dist.canonical_name] = base
+ if not extras:
+ return base
+ return self._make_extras_candidate(base, extras, comes_from=template)
+
+ def _make_candidate_from_link(
+ self,
+ link: Link,
+ extras: FrozenSet[str],
+ template: InstallRequirement,
+ name: Optional[NormalizedName],
+ version: Optional[Version],
+ ) -> Optional[Candidate]:
+ base: Optional[BaseCandidate] = self._make_base_candidate_from_link(
+ link, template, name, version
+ )
+ if not extras or base is None:
+ return base
+ return self._make_extras_candidate(base, extras, comes_from=template)
+
+ def _make_base_candidate_from_link(
+ self,
+ link: Link,
+ template: InstallRequirement,
+ name: Optional[NormalizedName],
+ version: Optional[Version],
+ ) -> Optional[BaseCandidate]:
+ # TODO: Check already installed candidate, and use it if the link and
+ # editable flag match.
+
+ if link in self._build_failures:
+ # We already tried this candidate before, and it does not build.
+ # Don't bother trying again.
+ return None
+
+ if template.editable:
+ if link not in self._editable_candidate_cache:
+ try:
+ self._editable_candidate_cache[link] = EditableCandidate(
+ link,
+ template,
+ factory=self,
+ name=name,
+ version=version,
+ )
+ except (MetadataInconsistent, MetadataInvalid) as e:
+ logger.info(
+ "Discarding [blue underline]%s[/]: [yellow]%s[reset]",
+ link,
+ e,
+ extra={"markup": True},
+ )
+ self._build_failures[link] = e
+ return None
+
+ return self._editable_candidate_cache[link]
+ else:
+ if link not in self._link_candidate_cache:
+ try:
+ self._link_candidate_cache[link] = LinkCandidate(
+ link,
+ template,
+ factory=self,
+ name=name,
+ version=version,
+ )
+ except MetadataInconsistent as e:
+ logger.info(
+ "Discarding [blue underline]%s[/]: [yellow]%s[reset]",
+ link,
+ e,
+ extra={"markup": True},
+ )
+ self._build_failures[link] = e
+ return None
+ return self._link_candidate_cache[link]
+
+ def _iter_found_candidates(
+ self,
+ ireqs: Sequence[InstallRequirement],
+ specifier: SpecifierSet,
+ hashes: Hashes,
+ prefers_installed: bool,
+ incompatible_ids: Set[int],
+ ) -> Iterable[Candidate]:
+ if not ireqs:
+ return ()
+
+ # The InstallRequirement implementation requires us to give it a
+ # "template". Here we just choose the first requirement to represent
+ # all of them.
+ # Hopefully the Project model can correct this mismatch in the future.
+ template = ireqs[0]
+ assert template.req, "Candidates found on index must be PEP 508"
+ name = canonicalize_name(template.req.name)
+
+ extras: FrozenSet[str] = frozenset()
+ for ireq in ireqs:
+ assert ireq.req, "Candidates found on index must be PEP 508"
+ specifier &= ireq.req.specifier
+ hashes &= ireq.hashes(trust_internet=False)
+ extras |= frozenset(ireq.extras)
+
+ def _get_installed_candidate() -> Optional[Candidate]:
+ """Get the candidate for the currently-installed version."""
+ # If --force-reinstall is set, we want the version from the index
+ # instead, so we "pretend" there is nothing installed.
+ if self._force_reinstall:
+ return None
+ try:
+ installed_dist = self._installed_dists[name]
+ except KeyError:
+ return None
+
+ try:
+ # Don't use the installed distribution if its version
+ # does not fit the current dependency graph.
+ if not specifier.contains(installed_dist.version, prereleases=True):
+ return None
+ except InvalidVersion as e:
+ raise InvalidInstalledPackage(dist=installed_dist, invalid_exc=e)
+
+ candidate = self._make_candidate_from_dist(
+ dist=installed_dist,
+ extras=extras,
+ template=template,
+ )
+ # The candidate is a known incompatibility. Don't use it.
+ if id(candidate) in incompatible_ids:
+ return None
+ return candidate
+
+ def iter_index_candidate_infos() -> Iterator[IndexCandidateInfo]:
+ result = self._finder.find_best_candidate(
+ project_name=name,
+ specifier=specifier,
+ hashes=hashes,
+ )
+ icans = result.applicable_candidates
+
+ # PEP 592: Yanked releases are ignored unless the specifier
+ # explicitly pins a version (via '==' or '===') that can be
+ # solely satisfied by a yanked release.
+ all_yanked = all(ican.link.is_yanked for ican in icans)
+
+ def is_pinned(specifier: SpecifierSet) -> bool:
+ for sp in specifier:
+ if sp.operator == "===":
+ return True
+ if sp.operator != "==":
+ continue
+ if sp.version.endswith(".*"):
+ continue
+ return True
+ return False
+
+ pinned = is_pinned(specifier)
+
+ # PackageFinder returns earlier versions first, so we reverse.
+ for ican in reversed(icans):
+ if not (all_yanked and pinned) and ican.link.is_yanked:
+ continue
+ func = functools.partial(
+ self._make_candidate_from_link,
+ link=ican.link,
+ extras=extras,
+ template=template,
+ name=name,
+ version=ican.version,
+ )
+ yield ican.version, func
+
+ return FoundCandidates(
+ iter_index_candidate_infos,
+ _get_installed_candidate(),
+ prefers_installed,
+ incompatible_ids,
+ )
+
+ def _iter_explicit_candidates_from_base(
+ self,
+ base_requirements: Iterable[Requirement],
+ extras: FrozenSet[str],
+ ) -> Iterator[Candidate]:
+ """Produce explicit candidates from the base given an extra-ed package.
+
+ :param base_requirements: Requirements known to the resolver. The
+ requirements are guaranteed to not have extras.
+ :param extras: The extras to inject into the explicit requirements'
+ candidates.
+ """
+ for req in base_requirements:
+ lookup_cand, _ = req.get_candidate_lookup()
+ if lookup_cand is None: # Not explicit.
+ continue
+ # We've stripped extras from the identifier, and should always
+ # get a BaseCandidate here, unless there's a bug elsewhere.
+ base_cand = as_base_candidate(lookup_cand)
+ assert base_cand is not None, "no extras here"
+ yield self._make_extras_candidate(base_cand, extras)
+
+ def _iter_candidates_from_constraints(
+ self,
+ identifier: str,
+ constraint: Constraint,
+ template: InstallRequirement,
+ ) -> Iterator[Candidate]:
+ """Produce explicit candidates from constraints.
+
+ This creates "fake" InstallRequirement objects that are basically clones
+ of what "should" be the template, but with original_link set to link.
+ """
+ for link in constraint.links:
+ self._fail_if_link_is_unsupported_wheel(link)
+ candidate = self._make_base_candidate_from_link(
+ link,
+ template=install_req_from_link_and_ireq(link, template),
+ name=canonicalize_name(identifier),
+ version=None,
+ )
+ if candidate:
+ yield candidate
+
+ def find_candidates(
+ self,
+ identifier: str,
+ requirements: Mapping[str, Iterable[Requirement]],
+ incompatibilities: Mapping[str, Iterator[Candidate]],
+ constraint: Constraint,
+ prefers_installed: bool,
+ is_satisfied_by: Callable[[Requirement, Candidate], bool],
+ ) -> Iterable[Candidate]:
+ # Collect basic lookup information from the requirements.
+ explicit_candidates: Set[Candidate] = set()
+ ireqs: List[InstallRequirement] = []
+ for req in requirements[identifier]:
+ cand, ireq = req.get_candidate_lookup()
+ if cand is not None:
+ explicit_candidates.add(cand)
+ if ireq is not None:
+ ireqs.append(ireq)
+
+ # If the current identifier contains extras, add requires and explicit
+ # candidates from entries from extra-less identifier.
+ with contextlib.suppress(InvalidRequirement):
+ parsed_requirement = get_requirement(identifier)
+ if parsed_requirement.name != identifier:
+ explicit_candidates.update(
+ self._iter_explicit_candidates_from_base(
+ requirements.get(parsed_requirement.name, ()),
+ frozenset(parsed_requirement.extras),
+ ),
+ )
+ for req in requirements.get(parsed_requirement.name, []):
+ _, ireq = req.get_candidate_lookup()
+ if ireq is not None:
+ ireqs.append(ireq)
+
+ # Add explicit candidates from constraints. We only do this if there are
+ # known ireqs, which represent requirements not already explicit. If
+ # there are no ireqs, we're constraining already-explicit requirements,
+ # which is handled later when we return the explicit candidates.
+ if ireqs:
+ try:
+ explicit_candidates.update(
+ self._iter_candidates_from_constraints(
+ identifier,
+ constraint,
+ template=ireqs[0],
+ ),
+ )
+ except UnsupportedWheel:
+ # If we're constrained to install a wheel incompatible with the
+ # target architecture, no candidates will ever be valid.
+ return ()
+
+ # Since we cache all the candidates, incompatibility identification
+ # can be made quicker by comparing only the id() values.
+ incompat_ids = {id(c) for c in incompatibilities.get(identifier, ())}
+
+ # If none of the requirements want an explicit candidate, we can ask
+ # the finder for candidates.
+ if not explicit_candidates:
+ return self._iter_found_candidates(
+ ireqs,
+ constraint.specifier,
+ constraint.hashes,
+ prefers_installed,
+ incompat_ids,
+ )
+
+ return (
+ c
+ for c in explicit_candidates
+ if id(c) not in incompat_ids
+ and constraint.is_satisfied_by(c)
+ and all(is_satisfied_by(req, c) for req in requirements[identifier])
+ )
+
+ def _make_requirements_from_install_req(
+ self, ireq: InstallRequirement, requested_extras: Iterable[str]
+ ) -> Iterator[Requirement]:
+ """
+ Returns requirement objects associated with the given InstallRequirement. In
+ most cases this will be a single object but the following special cases exist:
+ - the InstallRequirement has markers that do not apply -> result is empty
+ - the InstallRequirement has both a constraint (or link) and extras
+ -> result is split in two requirement objects: one with the constraint
+ (or link) and one with the extra. This allows centralized constraint
+ handling for the base, resulting in fewer candidate rejections.
+ """
+ if not ireq.match_markers(requested_extras):
+ logger.info(
+ "Ignoring %s: markers '%s' don't match your environment",
+ ireq.name,
+ ireq.markers,
+ )
+ elif not ireq.link:
+ if ireq.extras and ireq.req is not None and ireq.req.specifier:
+ yield SpecifierWithoutExtrasRequirement(ireq)
+ yield SpecifierRequirement(ireq)
+ else:
+ self._fail_if_link_is_unsupported_wheel(ireq.link)
+ # Always make the link candidate for the base requirement to make it
+ # available to `find_candidates` for explicit candidate lookup for any
+ # set of extras.
+ # The extras are required separately via a second requirement.
+ cand = self._make_base_candidate_from_link(
+ ireq.link,
+ template=install_req_drop_extras(ireq) if ireq.extras else ireq,
+ name=canonicalize_name(ireq.name) if ireq.name else None,
+ version=None,
+ )
+ if cand is None:
+ # There's no way we can satisfy a URL requirement if the underlying
+ # candidate fails to build. An unnamed URL must be user-supplied, so
+ # we fail eagerly. If the URL is named, an unsatisfiable requirement
+ # can make the resolver do the right thing, either backtrack (and
+ # maybe find some other requirement that's buildable) or raise a
+ # ResolutionImpossible eventually.
+ if not ireq.name:
+ raise self._build_failures[ireq.link]
+ yield UnsatisfiableRequirement(canonicalize_name(ireq.name))
+ else:
+ # require the base from the link
+ yield self.make_requirement_from_candidate(cand)
+ if ireq.extras:
+ # require the extras on top of the base candidate
+ yield self.make_requirement_from_candidate(
+ self._make_extras_candidate(cand, frozenset(ireq.extras))
+ )
+
+ def collect_root_requirements(
+ self, root_ireqs: List[InstallRequirement]
+ ) -> CollectedRootRequirements:
+ collected = CollectedRootRequirements([], {}, {})
+ for i, ireq in enumerate(root_ireqs):
+ if ireq.constraint:
+ # Ensure we only accept valid constraints
+ problem = check_invalid_constraint_type(ireq)
+ if problem:
+ raise InstallationError(problem)
+ if not ireq.match_markers():
+ continue
+ assert ireq.name, "Constraint must be named"
+ name = canonicalize_name(ireq.name)
+ if name in collected.constraints:
+ collected.constraints[name] &= ireq
+ else:
+ collected.constraints[name] = Constraint.from_ireq(ireq)
+ else:
+ reqs = list(
+ self._make_requirements_from_install_req(
+ ireq,
+ requested_extras=(),
+ )
+ )
+ if not reqs:
+ continue
+ template = reqs[0]
+ if ireq.user_supplied and template.name not in collected.user_requested:
+ collected.user_requested[template.name] = i
+ collected.requirements.extend(reqs)
+ # Put requirements with extras at the end of the root requires. This does not
+ # affect resolvelib's picking preference but it does affect its initial criteria
+ # population: by putting extras at the end we enable the candidate finder to
+ # present resolvelib with a smaller set of candidates to resolvelib, already
+ # taking into account any non-transient constraints on the associated base. This
+ # means resolvelib will have fewer candidates to visit and reject.
+ # Python's list sort is stable, meaning relative order is kept for objects with
+ # the same key.
+ collected.requirements.sort(key=lambda r: r.name != r.project_name)
+ return collected
+
+ def make_requirement_from_candidate(
+ self, candidate: Candidate
+ ) -> ExplicitRequirement:
+ return ExplicitRequirement(candidate)
+
+ def make_requirements_from_spec(
+ self,
+ specifier: str,
+ comes_from: Optional[InstallRequirement],
+ requested_extras: Iterable[str] = (),
+ ) -> Iterator[Requirement]:
+ """
+ Returns requirement objects associated with the given specifier. In most cases
+ this will be a single object but the following special cases exist:
+ - the specifier has markers that do not apply -> result is empty
+ - the specifier has both a constraint and extras -> result is split
+ in two requirement objects: one with the constraint and one with the
+ extra. This allows centralized constraint handling for the base,
+ resulting in fewer candidate rejections.
+ """
+ ireq = self._make_install_req_from_spec(specifier, comes_from)
+ return self._make_requirements_from_install_req(ireq, requested_extras)
+
+ def make_requires_python_requirement(
+ self,
+ specifier: SpecifierSet,
+ ) -> Optional[Requirement]:
+ if self._ignore_requires_python:
+ return None
+ # Don't bother creating a dependency for an empty Requires-Python.
+ if not str(specifier):
+ return None
+ return RequiresPythonRequirement(specifier, self._python_candidate)
+
+ def get_wheel_cache_entry(
+ self, link: Link, name: Optional[str]
+ ) -> Optional[CacheEntry]:
+ """Look up the link in the wheel cache.
+
+ If ``preparer.require_hashes`` is True, don't use the wheel cache,
+ because cached wheels, always built locally, have different hashes
+ than the files downloaded from the index server and thus throw false
+ hash mismatches. Furthermore, cached wheels at present have
+ nondeterministic contents due to file modification times.
+ """
+ if self._wheel_cache is None:
+ return None
+ return self._wheel_cache.get_cache_entry(
+ link=link,
+ package_name=name,
+ supported_tags=self._supported_tags_cache,
+ )
+
+ def get_dist_to_uninstall(self, candidate: Candidate) -> Optional[BaseDistribution]:
+ # TODO: Are there more cases this needs to return True? Editable?
+ dist = self._installed_dists.get(candidate.project_name)
+ if dist is None: # Not installed, no uninstallation required.
+ return None
+
+ # We're installing into global site. The current installation must
+ # be uninstalled, no matter it's in global or user site, because the
+ # user site installation has precedence over global.
+ if not self._use_user_site:
+ return dist
+
+ # We're installing into user site. Remove the user site installation.
+ if dist.in_usersite:
+ return dist
+
+ # We're installing into user site, but the installed incompatible
+ # package is in global site. We can't uninstall that, and would let
+ # the new user installation to "shadow" it. But shadowing won't work
+ # in virtual environments, so we error out.
+ if running_under_virtualenv() and dist.in_site_packages:
+ message = (
+ f"Will not install to the user site because it will lack "
+ f"sys.path precedence to {dist.raw_name} in {dist.location}"
+ )
+ raise InstallationError(message)
+ return None
+
+ def _report_requires_python_error(
+ self, causes: Sequence["ConflictCause"]
+ ) -> UnsupportedPythonVersion:
+ assert causes, "Requires-Python error reported with no cause"
+
+ version = self._python_candidate.version
+
+ if len(causes) == 1:
+ specifier = str(causes[0].requirement.specifier)
+ message = (
+ f"Package {causes[0].parent.name!r} requires a different "
+ f"Python: {version} not in {specifier!r}"
+ )
+ return UnsupportedPythonVersion(message)
+
+ message = f"Packages require a different Python. {version} not in:"
+ for cause in causes:
+ package = cause.parent.format_for_error()
+ specifier = str(cause.requirement.specifier)
+ message += f"\n{specifier!r} (required by {package})"
+ return UnsupportedPythonVersion(message)
+
+ def _report_single_requirement_conflict(
+ self, req: Requirement, parent: Optional[Candidate]
+ ) -> DistributionNotFound:
+ if parent is None:
+ req_disp = str(req)
+ else:
+ req_disp = f"{req} (from {parent.name})"
+
+ cands = self._finder.find_all_candidates(req.project_name)
+ skipped_by_requires_python = self._finder.requires_python_skipped_reasons()
+
+ versions_set: Set[Version] = set()
+ yanked_versions_set: Set[Version] = set()
+ for c in cands:
+ is_yanked = c.link.is_yanked if c.link else False
+ if is_yanked:
+ yanked_versions_set.add(c.version)
+ else:
+ versions_set.add(c.version)
+
+ versions = [str(v) for v in sorted(versions_set)]
+ yanked_versions = [str(v) for v in sorted(yanked_versions_set)]
+
+ if yanked_versions:
+ # Saying "version X is yanked" isn't entirely accurate.
+ # https://github.com/pypa/pip/issues/11745#issuecomment-1402805842
+ logger.critical(
+ "Ignored the following yanked versions: %s",
+ ", ".join(yanked_versions) or "none",
+ )
+ if skipped_by_requires_python:
+ logger.critical(
+ "Ignored the following versions that require a different python "
+ "version: %s",
+ "; ".join(skipped_by_requires_python) or "none",
+ )
+ logger.critical(
+ "Could not find a version that satisfies the requirement %s "
+ "(from versions: %s)",
+ req_disp,
+ ", ".join(versions) or "none",
+ )
+ if str(req) == "requirements.txt":
+ logger.info(
+ "HINT: You are attempting to install a package literally "
+ 'named "requirements.txt" (which cannot exist). Consider '
+ "using the '-r' flag to install the packages listed in "
+ "requirements.txt"
+ )
+
+ return DistributionNotFound(f"No matching distribution found for {req}")
+
+ def get_installation_error(
+ self,
+ e: "ResolutionImpossible[Requirement, Candidate]",
+ constraints: Dict[str, Constraint],
+ ) -> InstallationError:
+ assert e.causes, "Installation error reported with no cause"
+
+ # If one of the things we can't solve is "we need Python X.Y",
+ # that is what we report.
+ requires_python_causes = [
+ cause
+ for cause in e.causes
+ if isinstance(cause.requirement, RequiresPythonRequirement)
+ and not cause.requirement.is_satisfied_by(self._python_candidate)
+ ]
+ if requires_python_causes:
+ # The comprehension above makes sure all Requirement instances are
+ # RequiresPythonRequirement, so let's cast for convenience.
+ return self._report_requires_python_error(
+ cast("Sequence[ConflictCause]", requires_python_causes),
+ )
+
+ # Otherwise, we have a set of causes which can't all be satisfied
+ # at once.
+
+ # The simplest case is when we have *one* cause that can't be
+ # satisfied. We just report that case.
+ if len(e.causes) == 1:
+ req, parent = e.causes[0]
+ if req.name not in constraints:
+ return self._report_single_requirement_conflict(req, parent)
+
+ # OK, we now have a list of requirements that can't all be
+ # satisfied at once.
+
+ # A couple of formatting helpers
+ def text_join(parts: List[str]) -> str:
+ if len(parts) == 1:
+ return parts[0]
+
+ return ", ".join(parts[:-1]) + " and " + parts[-1]
+
+ def describe_trigger(parent: Candidate) -> str:
+ ireq = parent.get_install_requirement()
+ if not ireq or not ireq.comes_from:
+ return f"{parent.name}=={parent.version}"
+ if isinstance(ireq.comes_from, InstallRequirement):
+ return str(ireq.comes_from.name)
+ return str(ireq.comes_from)
+
+ triggers = set()
+ for req, parent in e.causes:
+ if parent is None:
+ # This is a root requirement, so we can report it directly
+ trigger = req.format_for_error()
+ else:
+ trigger = describe_trigger(parent)
+ triggers.add(trigger)
+
+ if triggers:
+ info = text_join(sorted(triggers))
+ else:
+ info = "the requested packages"
+
+ msg = (
+ f"Cannot install {info} because these package versions "
+ "have conflicting dependencies."
+ )
+ logger.critical(msg)
+ msg = "\nThe conflict is caused by:"
+
+ relevant_constraints = set()
+ for req, parent in e.causes:
+ if req.name in constraints:
+ relevant_constraints.add(req.name)
+ msg = msg + "\n "
+ if parent:
+ msg = msg + f"{parent.name} {parent.version} depends on "
+ else:
+ msg = msg + "The user requested "
+ msg = msg + req.format_for_error()
+ for key in relevant_constraints:
+ spec = constraints[key].specifier
+ msg += f"\n The user requested (constraint) {key}{spec}"
+
+ msg = (
+ msg
+ + "\n\n"
+ + "To fix this you could try to:\n"
+ + "1. loosen the range of package versions you've specified\n"
+ + "2. remove package versions to allow pip to attempt to solve "
+ + "the dependency conflict\n"
+ )
+
+ logger.info(msg)
+
+ return DistributionNotFound(
+ "ResolutionImpossible: for help visit "
+ "https://pip.pypa.io/en/latest/topics/dependency-resolution/"
+ "#dealing-with-dependency-conflicts"
+ )
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py
new file mode 100644
index 0000000000000000000000000000000000000000..a1d57e0f4b275cd5783c950381ddfff0184da138
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py
@@ -0,0 +1,174 @@
+"""Utilities to lazily create and visit candidates found.
+
+Creating and visiting a candidate is a *very* costly operation. It involves
+fetching, extracting, potentially building modules from source, and verifying
+distribution metadata. It is therefore crucial for performance to keep
+everything here lazy all the way down, so we only touch candidates that we
+absolutely need, and not "download the world" when we only need one version of
+something.
+"""
+
+import functools
+import logging
+from collections.abc import Sequence
+from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, Set, Tuple
+
+from pip._vendor.packaging.version import _BaseVersion
+
+from pip._internal.exceptions import MetadataInvalid
+
+from .base import Candidate
+
+logger = logging.getLogger(__name__)
+
+IndexCandidateInfo = Tuple[_BaseVersion, Callable[[], Optional[Candidate]]]
+
+if TYPE_CHECKING:
+ SequenceCandidate = Sequence[Candidate]
+else:
+ # For compatibility: Python before 3.9 does not support using [] on the
+ # Sequence class.
+ #
+ # >>> from collections.abc import Sequence
+ # >>> Sequence[str]
+ # Traceback (most recent call last):
+ # File "", line 1, in
+ # TypeError: 'ABCMeta' object is not subscriptable
+ #
+ # TODO: Remove this block after dropping Python 3.8 support.
+ SequenceCandidate = Sequence
+
+
+def _iter_built(infos: Iterator[IndexCandidateInfo]) -> Iterator[Candidate]:
+ """Iterator for ``FoundCandidates``.
+
+ This iterator is used when the package is not already installed. Candidates
+ from index come later in their normal ordering.
+ """
+ versions_found: Set[_BaseVersion] = set()
+ for version, func in infos:
+ if version in versions_found:
+ continue
+ try:
+ candidate = func()
+ except MetadataInvalid as e:
+ logger.warning(
+ "Ignoring version %s of %s since it has invalid metadata:\n"
+ "%s\n"
+ "Please use pip<24.1 if you need to use this version.",
+ version,
+ e.ireq.name,
+ e,
+ )
+ # Mark version as found to avoid trying other candidates with the same
+ # version, since they most likely have invalid metadata as well.
+ versions_found.add(version)
+ else:
+ if candidate is None:
+ continue
+ yield candidate
+ versions_found.add(version)
+
+
+def _iter_built_with_prepended(
+ installed: Candidate, infos: Iterator[IndexCandidateInfo]
+) -> Iterator[Candidate]:
+ """Iterator for ``FoundCandidates``.
+
+ This iterator is used when the resolver prefers the already-installed
+ candidate and NOT to upgrade. The installed candidate is therefore
+ always yielded first, and candidates from index come later in their
+ normal ordering, except skipped when the version is already installed.
+ """
+ yield installed
+ versions_found: Set[_BaseVersion] = {installed.version}
+ for version, func in infos:
+ if version in versions_found:
+ continue
+ candidate = func()
+ if candidate is None:
+ continue
+ yield candidate
+ versions_found.add(version)
+
+
+def _iter_built_with_inserted(
+ installed: Candidate, infos: Iterator[IndexCandidateInfo]
+) -> Iterator[Candidate]:
+ """Iterator for ``FoundCandidates``.
+
+ This iterator is used when the resolver prefers to upgrade an
+ already-installed package. Candidates from index are returned in their
+ normal ordering, except replaced when the version is already installed.
+
+ The implementation iterates through and yields other candidates, inserting
+ the installed candidate exactly once before we start yielding older or
+ equivalent candidates, or after all other candidates if they are all newer.
+ """
+ versions_found: Set[_BaseVersion] = set()
+ for version, func in infos:
+ if version in versions_found:
+ continue
+ # If the installed candidate is better, yield it first.
+ if installed.version >= version:
+ yield installed
+ versions_found.add(installed.version)
+ candidate = func()
+ if candidate is None:
+ continue
+ yield candidate
+ versions_found.add(version)
+
+ # If the installed candidate is older than all other candidates.
+ if installed.version not in versions_found:
+ yield installed
+
+
+class FoundCandidates(SequenceCandidate):
+ """A lazy sequence to provide candidates to the resolver.
+
+ The intended usage is to return this from `find_matches()` so the resolver
+ can iterate through the sequence multiple times, but only access the index
+ page when remote packages are actually needed. This improve performances
+ when suitable candidates are already installed on disk.
+ """
+
+ def __init__(
+ self,
+ get_infos: Callable[[], Iterator[IndexCandidateInfo]],
+ installed: Optional[Candidate],
+ prefers_installed: bool,
+ incompatible_ids: Set[int],
+ ):
+ self._get_infos = get_infos
+ self._installed = installed
+ self._prefers_installed = prefers_installed
+ self._incompatible_ids = incompatible_ids
+
+ def __getitem__(self, index: Any) -> Any:
+ # Implemented to satisfy the ABC check. This is not needed by the
+ # resolver, and should not be used by the provider either (for
+ # performance reasons).
+ raise NotImplementedError("don't do this")
+
+ def __iter__(self) -> Iterator[Candidate]:
+ infos = self._get_infos()
+ if not self._installed:
+ iterator = _iter_built(infos)
+ elif self._prefers_installed:
+ iterator = _iter_built_with_prepended(self._installed, infos)
+ else:
+ iterator = _iter_built_with_inserted(self._installed, infos)
+ return (c for c in iterator if id(c) not in self._incompatible_ids)
+
+ def __len__(self) -> int:
+ # Implemented to satisfy the ABC check. This is not needed by the
+ # resolver, and should not be used by the provider either (for
+ # performance reasons).
+ raise NotImplementedError("don't do this")
+
+ @functools.lru_cache(maxsize=1)
+ def __bool__(self) -> bool:
+ if self._prefers_installed and self._installed:
+ return True
+ return any(self)
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/provider.py b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/provider.py
new file mode 100644
index 0000000000000000000000000000000000000000..fb0dd85f1124bed20da9402ada714af3f3584e07
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/provider.py
@@ -0,0 +1,258 @@
+import collections
+import math
+from functools import lru_cache
+from typing import (
+ TYPE_CHECKING,
+ Dict,
+ Iterable,
+ Iterator,
+ Mapping,
+ Sequence,
+ TypeVar,
+ Union,
+)
+
+from pip._vendor.resolvelib.providers import AbstractProvider
+
+from .base import Candidate, Constraint, Requirement
+from .candidates import REQUIRES_PYTHON_IDENTIFIER
+from .factory import Factory
+
+if TYPE_CHECKING:
+ from pip._vendor.resolvelib.providers import Preference
+ from pip._vendor.resolvelib.resolvers import RequirementInformation
+
+ PreferenceInformation = RequirementInformation[Requirement, Candidate]
+
+ _ProviderBase = AbstractProvider[Requirement, Candidate, str]
+else:
+ _ProviderBase = AbstractProvider
+
+# Notes on the relationship between the provider, the factory, and the
+# candidate and requirement classes.
+#
+# The provider is a direct implementation of the resolvelib class. Its role
+# is to deliver the API that resolvelib expects.
+#
+# Rather than work with completely abstract "requirement" and "candidate"
+# concepts as resolvelib does, pip has concrete classes implementing these two
+# ideas. The API of Requirement and Candidate objects are defined in the base
+# classes, but essentially map fairly directly to the equivalent provider
+# methods. In particular, `find_matches` and `is_satisfied_by` are
+# requirement methods, and `get_dependencies` is a candidate method.
+#
+# The factory is the interface to pip's internal mechanisms. It is stateless,
+# and is created by the resolver and held as a property of the provider. It is
+# responsible for creating Requirement and Candidate objects, and provides
+# services to those objects (access to pip's finder and preparer).
+
+
+D = TypeVar("D")
+V = TypeVar("V")
+
+
+def _get_with_identifier(
+ mapping: Mapping[str, V],
+ identifier: str,
+ default: D,
+) -> Union[D, V]:
+ """Get item from a package name lookup mapping with a resolver identifier.
+
+ This extra logic is needed when the target mapping is keyed by package
+ name, which cannot be directly looked up with an identifier (which may
+ contain requested extras). Additional logic is added to also look up a value
+ by "cleaning up" the extras from the identifier.
+ """
+ if identifier in mapping:
+ return mapping[identifier]
+ # HACK: Theoretically we should check whether this identifier is a valid
+ # "NAME[EXTRAS]" format, and parse out the name part with packaging or
+ # some regular expression. But since pip's resolver only spits out three
+ # kinds of identifiers: normalized PEP 503 names, normalized names plus
+ # extras, and Requires-Python, we can cheat a bit here.
+ name, open_bracket, _ = identifier.partition("[")
+ if open_bracket and name in mapping:
+ return mapping[name]
+ return default
+
+
+class PipProvider(_ProviderBase):
+ """Pip's provider implementation for resolvelib.
+
+ :params constraints: A mapping of constraints specified by the user. Keys
+ are canonicalized project names.
+ :params ignore_dependencies: Whether the user specified ``--no-deps``.
+ :params upgrade_strategy: The user-specified upgrade strategy.
+ :params user_requested: A set of canonicalized package names that the user
+ supplied for pip to install/upgrade.
+ """
+
+ def __init__(
+ self,
+ factory: Factory,
+ constraints: Dict[str, Constraint],
+ ignore_dependencies: bool,
+ upgrade_strategy: str,
+ user_requested: Dict[str, int],
+ ) -> None:
+ self._factory = factory
+ self._constraints = constraints
+ self._ignore_dependencies = ignore_dependencies
+ self._upgrade_strategy = upgrade_strategy
+ self._user_requested = user_requested
+ self._known_depths: Dict[str, float] = collections.defaultdict(lambda: math.inf)
+
+ def identify(self, requirement_or_candidate: Union[Requirement, Candidate]) -> str:
+ return requirement_or_candidate.name
+
+ def get_preference(
+ self,
+ identifier: str,
+ resolutions: Mapping[str, Candidate],
+ candidates: Mapping[str, Iterator[Candidate]],
+ information: Mapping[str, Iterable["PreferenceInformation"]],
+ backtrack_causes: Sequence["PreferenceInformation"],
+ ) -> "Preference":
+ """Produce a sort key for given requirement based on preference.
+
+ The lower the return value is, the more preferred this group of
+ arguments is.
+
+ Currently pip considers the following in order:
+
+ * Prefer if any of the known requirements is "direct", e.g. points to an
+ explicit URL.
+ * If equal, prefer if any requirement is "pinned", i.e. contains
+ operator ``===`` or ``==``.
+ * If equal, calculate an approximate "depth" and resolve requirements
+ closer to the user-specified requirements first. If the depth cannot
+ by determined (eg: due to no matching parents), it is considered
+ infinite.
+ * Order user-specified requirements by the order they are specified.
+ * If equal, prefers "non-free" requirements, i.e. contains at least one
+ operator, such as ``>=`` or ``<``.
+ * If equal, order alphabetically for consistency (helps debuggability).
+ """
+ try:
+ next(iter(information[identifier]))
+ except StopIteration:
+ # There is no information for this identifier, so there's no known
+ # candidates.
+ has_information = False
+ else:
+ has_information = True
+
+ if has_information:
+ lookups = (r.get_candidate_lookup() for r, _ in information[identifier])
+ candidate, ireqs = zip(*lookups)
+ else:
+ candidate, ireqs = None, ()
+
+ operators = [
+ specifier.operator
+ for specifier_set in (ireq.specifier for ireq in ireqs if ireq)
+ for specifier in specifier_set
+ ]
+
+ direct = candidate is not None
+ pinned = any(op[:2] == "==" for op in operators)
+ unfree = bool(operators)
+
+ try:
+ requested_order: Union[int, float] = self._user_requested[identifier]
+ except KeyError:
+ requested_order = math.inf
+ if has_information:
+ parent_depths = (
+ self._known_depths[parent.name] if parent is not None else 0.0
+ for _, parent in information[identifier]
+ )
+ inferred_depth = min(d for d in parent_depths) + 1.0
+ else:
+ inferred_depth = math.inf
+ else:
+ inferred_depth = 1.0
+ self._known_depths[identifier] = inferred_depth
+
+ requested_order = self._user_requested.get(identifier, math.inf)
+
+ # Requires-Python has only one candidate and the check is basically
+ # free, so we always do it first to avoid needless work if it fails.
+ requires_python = identifier == REQUIRES_PYTHON_IDENTIFIER
+
+ # Prefer the causes of backtracking on the assumption that the problem
+ # resolving the dependency tree is related to the failures that caused
+ # the backtracking
+ backtrack_cause = self.is_backtrack_cause(identifier, backtrack_causes)
+
+ return (
+ not requires_python,
+ not direct,
+ not pinned,
+ not backtrack_cause,
+ inferred_depth,
+ requested_order,
+ not unfree,
+ identifier,
+ )
+
+ def find_matches(
+ self,
+ identifier: str,
+ requirements: Mapping[str, Iterator[Requirement]],
+ incompatibilities: Mapping[str, Iterator[Candidate]],
+ ) -> Iterable[Candidate]:
+ def _eligible_for_upgrade(identifier: str) -> bool:
+ """Are upgrades allowed for this project?
+
+ This checks the upgrade strategy, and whether the project was one
+ that the user specified in the command line, in order to decide
+ whether we should upgrade if there's a newer version available.
+
+ (Note that we don't need access to the `--upgrade` flag, because
+ an upgrade strategy of "to-satisfy-only" means that `--upgrade`
+ was not specified).
+ """
+ if self._upgrade_strategy == "eager":
+ return True
+ elif self._upgrade_strategy == "only-if-needed":
+ user_order = _get_with_identifier(
+ self._user_requested,
+ identifier,
+ default=None,
+ )
+ return user_order is not None
+ return False
+
+ constraint = _get_with_identifier(
+ self._constraints,
+ identifier,
+ default=Constraint.empty(),
+ )
+ return self._factory.find_candidates(
+ identifier=identifier,
+ requirements=requirements,
+ constraint=constraint,
+ prefers_installed=(not _eligible_for_upgrade(identifier)),
+ incompatibilities=incompatibilities,
+ is_satisfied_by=self.is_satisfied_by,
+ )
+
+ @lru_cache(maxsize=None)
+ def is_satisfied_by(self, requirement: Requirement, candidate: Candidate) -> bool:
+ return requirement.is_satisfied_by(candidate)
+
+ def get_dependencies(self, candidate: Candidate) -> Sequence[Requirement]:
+ with_requires = not self._ignore_dependencies
+ return [r for r in candidate.iter_dependencies(with_requires) if r is not None]
+
+ @staticmethod
+ def is_backtrack_cause(
+ identifier: str, backtrack_causes: Sequence["PreferenceInformation"]
+ ) -> bool:
+ for backtrack_cause in backtrack_causes:
+ if identifier == backtrack_cause.requirement.name:
+ return True
+ if backtrack_cause.parent and identifier == backtrack_cause.parent.name:
+ return True
+ return False
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/resolver.py b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/resolver.py
new file mode 100644
index 0000000000000000000000000000000000000000..c12beef0b2a4344a4e0daca2540bbfd0c58ce777
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/resolver.py
@@ -0,0 +1,317 @@
+import contextlib
+import functools
+import logging
+import os
+from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, cast
+
+from pip._vendor.packaging.utils import canonicalize_name
+from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible
+from pip._vendor.resolvelib import Resolver as RLResolver
+from pip._vendor.resolvelib.structs import DirectedGraph
+
+from pip._internal.cache import WheelCache
+from pip._internal.index.package_finder import PackageFinder
+from pip._internal.operations.prepare import RequirementPreparer
+from pip._internal.req.constructors import install_req_extend_extras
+from pip._internal.req.req_install import InstallRequirement
+from pip._internal.req.req_set import RequirementSet
+from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
+from pip._internal.resolution.resolvelib.provider import PipProvider
+from pip._internal.resolution.resolvelib.reporter import (
+ PipDebuggingReporter,
+ PipReporter,
+)
+from pip._internal.utils.packaging import get_requirement
+
+from .base import Candidate, Requirement
+from .factory import Factory
+
+if TYPE_CHECKING:
+ from pip._vendor.resolvelib.resolvers import Result as RLResult
+
+ Result = RLResult[Requirement, Candidate, str]
+
+
+logger = logging.getLogger(__name__)
+
+
+class Resolver(BaseResolver):
+ _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
+
+ def __init__(
+ self,
+ preparer: RequirementPreparer,
+ finder: PackageFinder,
+ wheel_cache: Optional[WheelCache],
+ make_install_req: InstallRequirementProvider,
+ use_user_site: bool,
+ ignore_dependencies: bool,
+ ignore_installed: bool,
+ ignore_requires_python: bool,
+ force_reinstall: bool,
+ upgrade_strategy: str,
+ py_version_info: Optional[Tuple[int, ...]] = None,
+ ):
+ super().__init__()
+ assert upgrade_strategy in self._allowed_strategies
+
+ self.factory = Factory(
+ finder=finder,
+ preparer=preparer,
+ make_install_req=make_install_req,
+ wheel_cache=wheel_cache,
+ use_user_site=use_user_site,
+ force_reinstall=force_reinstall,
+ ignore_installed=ignore_installed,
+ ignore_requires_python=ignore_requires_python,
+ py_version_info=py_version_info,
+ )
+ self.ignore_dependencies = ignore_dependencies
+ self.upgrade_strategy = upgrade_strategy
+ self._result: Optional[Result] = None
+
+ def resolve(
+ self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
+ ) -> RequirementSet:
+ collected = self.factory.collect_root_requirements(root_reqs)
+ provider = PipProvider(
+ factory=self.factory,
+ constraints=collected.constraints,
+ ignore_dependencies=self.ignore_dependencies,
+ upgrade_strategy=self.upgrade_strategy,
+ user_requested=collected.user_requested,
+ )
+ if "PIP_RESOLVER_DEBUG" in os.environ:
+ reporter: BaseReporter = PipDebuggingReporter()
+ else:
+ reporter = PipReporter()
+ resolver: RLResolver[Requirement, Candidate, str] = RLResolver(
+ provider,
+ reporter,
+ )
+
+ try:
+ limit_how_complex_resolution_can_be = 200000
+ result = self._result = resolver.resolve(
+ collected.requirements, max_rounds=limit_how_complex_resolution_can_be
+ )
+
+ except ResolutionImpossible as e:
+ error = self.factory.get_installation_error(
+ cast("ResolutionImpossible[Requirement, Candidate]", e),
+ collected.constraints,
+ )
+ raise error from e
+
+ req_set = RequirementSet(check_supported_wheels=check_supported_wheels)
+ # process candidates with extras last to ensure their base equivalent is
+ # already in the req_set if appropriate.
+ # Python's sort is stable so using a binary key function keeps relative order
+ # within both subsets.
+ for candidate in sorted(
+ result.mapping.values(), key=lambda c: c.name != c.project_name
+ ):
+ ireq = candidate.get_install_requirement()
+ if ireq is None:
+ if candidate.name != candidate.project_name:
+ # extend existing req's extras
+ with contextlib.suppress(KeyError):
+ req = req_set.get_requirement(candidate.project_name)
+ req_set.add_named_requirement(
+ install_req_extend_extras(
+ req, get_requirement(candidate.name).extras
+ )
+ )
+ continue
+
+ # Check if there is already an installation under the same name,
+ # and set a flag for later stages to uninstall it, if needed.
+ installed_dist = self.factory.get_dist_to_uninstall(candidate)
+ if installed_dist is None:
+ # There is no existing installation -- nothing to uninstall.
+ ireq.should_reinstall = False
+ elif self.factory.force_reinstall:
+ # The --force-reinstall flag is set -- reinstall.
+ ireq.should_reinstall = True
+ elif installed_dist.version != candidate.version:
+ # The installation is different in version -- reinstall.
+ ireq.should_reinstall = True
+ elif candidate.is_editable or installed_dist.editable:
+ # The incoming distribution is editable, or different in
+ # editable-ness to installation -- reinstall.
+ ireq.should_reinstall = True
+ elif candidate.source_link and candidate.source_link.is_file:
+ # The incoming distribution is under file://
+ if candidate.source_link.is_wheel:
+ # is a local wheel -- do nothing.
+ logger.info(
+ "%s is already installed with the same version as the "
+ "provided wheel. Use --force-reinstall to force an "
+ "installation of the wheel.",
+ ireq.name,
+ )
+ continue
+
+ # is a local sdist or path -- reinstall
+ ireq.should_reinstall = True
+ else:
+ continue
+
+ link = candidate.source_link
+ if link and link.is_yanked:
+ # The reason can contain non-ASCII characters, Unicode
+ # is required for Python 2.
+ msg = (
+ "The candidate selected for download or install is a "
+ "yanked version: {name!r} candidate (version {version} "
+ "at {link})\nReason for being yanked: {reason}"
+ ).format(
+ name=candidate.name,
+ version=candidate.version,
+ link=link,
+ reason=link.yanked_reason or "",
+ )
+ logger.warning(msg)
+
+ req_set.add_named_requirement(ireq)
+
+ reqs = req_set.all_requirements
+ self.factory.preparer.prepare_linked_requirements_more(reqs)
+ for req in reqs:
+ req.prepared = True
+ req.needs_more_preparation = False
+ return req_set
+
+ def get_installation_order(
+ self, req_set: RequirementSet
+ ) -> List[InstallRequirement]:
+ """Get order for installation of requirements in RequirementSet.
+
+ The returned list contains a requirement before another that depends on
+ it. This helps ensure that the environment is kept consistent as they
+ get installed one-by-one.
+
+ The current implementation creates a topological ordering of the
+ dependency graph, giving more weight to packages with less
+ or no dependencies, while breaking any cycles in the graph at
+ arbitrary points. We make no guarantees about where the cycle
+ would be broken, other than it *would* be broken.
+ """
+ assert self._result is not None, "must call resolve() first"
+
+ if not req_set.requirements:
+ # Nothing is left to install, so we do not need an order.
+ return []
+
+ graph = self._result.graph
+ weights = get_topological_weights(graph, set(req_set.requirements.keys()))
+
+ sorted_items = sorted(
+ req_set.requirements.items(),
+ key=functools.partial(_req_set_item_sorter, weights=weights),
+ reverse=True,
+ )
+ return [ireq for _, ireq in sorted_items]
+
+
+def get_topological_weights(
+ graph: "DirectedGraph[Optional[str]]", requirement_keys: Set[str]
+) -> Dict[Optional[str], int]:
+ """Assign weights to each node based on how "deep" they are.
+
+ This implementation may change at any point in the future without prior
+ notice.
+
+ We first simplify the dependency graph by pruning any leaves and giving them
+ the highest weight: a package without any dependencies should be installed
+ first. This is done again and again in the same way, giving ever less weight
+ to the newly found leaves. The loop stops when no leaves are left: all
+ remaining packages have at least one dependency left in the graph.
+
+ Then we continue with the remaining graph, by taking the length for the
+ longest path to any node from root, ignoring any paths that contain a single
+ node twice (i.e. cycles). This is done through a depth-first search through
+ the graph, while keeping track of the path to the node.
+
+ Cycles in the graph result would result in node being revisited while also
+ being on its own path. In this case, take no action. This helps ensure we
+ don't get stuck in a cycle.
+
+ When assigning weight, the longer path (i.e. larger length) is preferred.
+
+ We are only interested in the weights of packages that are in the
+ requirement_keys.
+ """
+ path: Set[Optional[str]] = set()
+ weights: Dict[Optional[str], int] = {}
+
+ def visit(node: Optional[str]) -> None:
+ if node in path:
+ # We hit a cycle, so we'll break it here.
+ return
+
+ # Time to visit the children!
+ path.add(node)
+ for child in graph.iter_children(node):
+ visit(child)
+ path.remove(node)
+
+ if node not in requirement_keys:
+ return
+
+ last_known_parent_count = weights.get(node, 0)
+ weights[node] = max(last_known_parent_count, len(path))
+
+ # Simplify the graph, pruning leaves that have no dependencies.
+ # This is needed for large graphs (say over 200 packages) because the
+ # `visit` function is exponentially slower then, taking minutes.
+ # See https://github.com/pypa/pip/issues/10557
+ # We will loop until we explicitly break the loop.
+ while True:
+ leaves = set()
+ for key in graph:
+ if key is None:
+ continue
+ for _child in graph.iter_children(key):
+ # This means we have at least one child
+ break
+ else:
+ # No child.
+ leaves.add(key)
+ if not leaves:
+ # We are done simplifying.
+ break
+ # Calculate the weight for the leaves.
+ weight = len(graph) - 1
+ for leaf in leaves:
+ if leaf not in requirement_keys:
+ continue
+ weights[leaf] = weight
+ # Remove the leaves from the graph, making it simpler.
+ for leaf in leaves:
+ graph.remove(leaf)
+
+ # Visit the remaining graph.
+ # `None` is guaranteed to be the root node by resolvelib.
+ visit(None)
+
+ # Sanity check: all requirement keys should be in the weights,
+ # and no other keys should be in the weights.
+ difference = set(weights.keys()).difference(requirement_keys)
+ assert not difference, difference
+
+ return weights
+
+
+def _req_set_item_sorter(
+ item: Tuple[str, InstallRequirement],
+ weights: Dict[Optional[str], int],
+) -> Tuple[int, str]:
+ """Key function used to sort install requirements for installation.
+
+ Based on the "weight" mapping calculated in ``get_installation_order()``.
+ The canonical package name is returned as the second member as a tie-
+ breaker to ensure the result is predictable, which is useful in tests.
+ """
+ name = canonicalize_name(item[0])
+ return weights[name], name
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/utils/compatibility_tags.py b/vllm/lib/python3.10/site-packages/pip/_internal/utils/compatibility_tags.py
new file mode 100644
index 0000000000000000000000000000000000000000..2e7b7450dcea5b3bbcfe118f2e4cbe3fc16a7b1a
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/utils/compatibility_tags.py
@@ -0,0 +1,188 @@
+"""Generate and work with PEP 425 Compatibility Tags.
+"""
+
+import re
+from typing import List, Optional, Tuple
+
+from pip._vendor.packaging.tags import (
+ PythonVersion,
+ Tag,
+ compatible_tags,
+ cpython_tags,
+ generic_tags,
+ interpreter_name,
+ interpreter_version,
+ ios_platforms,
+ mac_platforms,
+)
+
+_apple_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)")
+
+
+def version_info_to_nodot(version_info: Tuple[int, ...]) -> str:
+ # Only use up to the first two numbers.
+ return "".join(map(str, version_info[:2]))
+
+
+def _mac_platforms(arch: str) -> List[str]:
+ match = _apple_arch_pat.match(arch)
+ if match:
+ name, major, minor, actual_arch = match.groups()
+ mac_version = (int(major), int(minor))
+ arches = [
+ # Since we have always only checked that the platform starts
+ # with "macosx", for backwards-compatibility we extract the
+ # actual prefix provided by the user in case they provided
+ # something like "macosxcustom_". It may be good to remove
+ # this as undocumented or deprecate it in the future.
+ "{}_{}".format(name, arch[len("macosx_") :])
+ for arch in mac_platforms(mac_version, actual_arch)
+ ]
+ else:
+ # arch pattern didn't match (?!)
+ arches = [arch]
+ return arches
+
+
+def _ios_platforms(arch: str) -> List[str]:
+ match = _apple_arch_pat.match(arch)
+ if match:
+ name, major, minor, actual_multiarch = match.groups()
+ ios_version = (int(major), int(minor))
+ arches = [
+ # Since we have always only checked that the platform starts
+ # with "ios", for backwards-compatibility we extract the
+ # actual prefix provided by the user in case they provided
+ # something like "ioscustom_". It may be good to remove
+ # this as undocumented or deprecate it in the future.
+ "{}_{}".format(name, arch[len("ios_") :])
+ for arch in ios_platforms(ios_version, actual_multiarch)
+ ]
+ else:
+ # arch pattern didn't match (?!)
+ arches = [arch]
+ return arches
+
+
+def _custom_manylinux_platforms(arch: str) -> List[str]:
+ arches = [arch]
+ arch_prefix, arch_sep, arch_suffix = arch.partition("_")
+ if arch_prefix == "manylinux2014":
+ # manylinux1/manylinux2010 wheels run on most manylinux2014 systems
+ # with the exception of wheels depending on ncurses. PEP 599 states
+ # manylinux1/manylinux2010 wheels should be considered
+ # manylinux2014 wheels:
+ # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels
+ if arch_suffix in {"i686", "x86_64"}:
+ arches.append("manylinux2010" + arch_sep + arch_suffix)
+ arches.append("manylinux1" + arch_sep + arch_suffix)
+ elif arch_prefix == "manylinux2010":
+ # manylinux1 wheels run on most manylinux2010 systems with the
+ # exception of wheels depending on ncurses. PEP 571 states
+ # manylinux1 wheels should be considered manylinux2010 wheels:
+ # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels
+ arches.append("manylinux1" + arch_sep + arch_suffix)
+ return arches
+
+
+def _get_custom_platforms(arch: str) -> List[str]:
+ arch_prefix, arch_sep, arch_suffix = arch.partition("_")
+ if arch.startswith("macosx"):
+ arches = _mac_platforms(arch)
+ elif arch.startswith("ios"):
+ arches = _ios_platforms(arch)
+ elif arch_prefix in ["manylinux2014", "manylinux2010"]:
+ arches = _custom_manylinux_platforms(arch)
+ else:
+ arches = [arch]
+ return arches
+
+
+def _expand_allowed_platforms(platforms: Optional[List[str]]) -> Optional[List[str]]:
+ if not platforms:
+ return None
+
+ seen = set()
+ result = []
+
+ for p in platforms:
+ if p in seen:
+ continue
+ additions = [c for c in _get_custom_platforms(p) if c not in seen]
+ seen.update(additions)
+ result.extend(additions)
+
+ return result
+
+
+def _get_python_version(version: str) -> PythonVersion:
+ if len(version) > 1:
+ return int(version[0]), int(version[1:])
+ else:
+ return (int(version[0]),)
+
+
+def _get_custom_interpreter(
+ implementation: Optional[str] = None, version: Optional[str] = None
+) -> str:
+ if implementation is None:
+ implementation = interpreter_name()
+ if version is None:
+ version = interpreter_version()
+ return f"{implementation}{version}"
+
+
+def get_supported(
+ version: Optional[str] = None,
+ platforms: Optional[List[str]] = None,
+ impl: Optional[str] = None,
+ abis: Optional[List[str]] = None,
+) -> List[Tag]:
+ """Return a list of supported tags for each version specified in
+ `versions`.
+
+ :param version: a string version, of the form "33" or "32",
+ or None. The version will be assumed to support our ABI.
+ :param platform: specify a list of platforms you want valid
+ tags for, or None. If None, use the local system platform.
+ :param impl: specify the exact implementation you want valid
+ tags for, or None. If None, use the local interpreter impl.
+ :param abis: specify a list of abis you want valid
+ tags for, or None. If None, use the local interpreter abi.
+ """
+ supported: List[Tag] = []
+
+ python_version: Optional[PythonVersion] = None
+ if version is not None:
+ python_version = _get_python_version(version)
+
+ interpreter = _get_custom_interpreter(impl, version)
+
+ platforms = _expand_allowed_platforms(platforms)
+
+ is_cpython = (impl or interpreter_name()) == "cp"
+ if is_cpython:
+ supported.extend(
+ cpython_tags(
+ python_version=python_version,
+ abis=abis,
+ platforms=platforms,
+ )
+ )
+ else:
+ supported.extend(
+ generic_tags(
+ interpreter=interpreter,
+ abis=abis,
+ platforms=platforms,
+ )
+ )
+ supported.extend(
+ compatible_tags(
+ python_version=python_version,
+ interpreter=interpreter,
+ platforms=platforms,
+ )
+ )
+
+ return supported
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/utils/datetime.py b/vllm/lib/python3.10/site-packages/pip/_internal/utils/datetime.py
new file mode 100644
index 0000000000000000000000000000000000000000..8668b3b0ec1deec2aeb7ff6bd94265d6705e05bf
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/utils/datetime.py
@@ -0,0 +1,11 @@
+"""For when pip wants to check the date or time.
+"""
+
+import datetime
+
+
+def today_is_later_than(year: int, month: int, day: int) -> bool:
+ today = datetime.date.today()
+ given = datetime.date(year, month, day)
+
+ return today > given
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/utils/deprecation.py b/vllm/lib/python3.10/site-packages/pip/_internal/utils/deprecation.py
new file mode 100644
index 0000000000000000000000000000000000000000..0911147e784737f58f174dce98ecae32b615c7b7
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/utils/deprecation.py
@@ -0,0 +1,124 @@
+"""
+A module that implements tooling to enable easy warnings about deprecations.
+"""
+
+import logging
+import warnings
+from typing import Any, Optional, TextIO, Type, Union
+
+from pip._vendor.packaging.version import parse
+
+from pip import __version__ as current_version # NOTE: tests patch this name.
+
+DEPRECATION_MSG_PREFIX = "DEPRECATION: "
+
+
+class PipDeprecationWarning(Warning):
+ pass
+
+
+_original_showwarning: Any = None
+
+
+# Warnings <-> Logging Integration
+def _showwarning(
+ message: Union[Warning, str],
+ category: Type[Warning],
+ filename: str,
+ lineno: int,
+ file: Optional[TextIO] = None,
+ line: Optional[str] = None,
+) -> None:
+ if file is not None:
+ if _original_showwarning is not None:
+ _original_showwarning(message, category, filename, lineno, file, line)
+ elif issubclass(category, PipDeprecationWarning):
+ # We use a specially named logger which will handle all of the
+ # deprecation messages for pip.
+ logger = logging.getLogger("pip._internal.deprecations")
+ logger.warning(message)
+ else:
+ _original_showwarning(message, category, filename, lineno, file, line)
+
+
+def install_warning_logger() -> None:
+ # Enable our Deprecation Warnings
+ warnings.simplefilter("default", PipDeprecationWarning, append=True)
+
+ global _original_showwarning
+
+ if _original_showwarning is None:
+ _original_showwarning = warnings.showwarning
+ warnings.showwarning = _showwarning
+
+
+def deprecated(
+ *,
+ reason: str,
+ replacement: Optional[str],
+ gone_in: Optional[str],
+ feature_flag: Optional[str] = None,
+ issue: Optional[int] = None,
+) -> None:
+ """Helper to deprecate existing functionality.
+
+ reason:
+ Textual reason shown to the user about why this functionality has
+ been deprecated. Should be a complete sentence.
+ replacement:
+ Textual suggestion shown to the user about what alternative
+ functionality they can use.
+ gone_in:
+ The version of pip does this functionality should get removed in.
+ Raises an error if pip's current version is greater than or equal to
+ this.
+ feature_flag:
+ Command-line flag of the form --use-feature={feature_flag} for testing
+ upcoming functionality.
+ issue:
+ Issue number on the tracker that would serve as a useful place for
+ users to find related discussion and provide feedback.
+ """
+
+ # Determine whether or not the feature is already gone in this version.
+ is_gone = gone_in is not None and parse(current_version) >= parse(gone_in)
+
+ message_parts = [
+ (reason, f"{DEPRECATION_MSG_PREFIX}{{}}"),
+ (
+ gone_in,
+ (
+ "pip {} will enforce this behaviour change."
+ if not is_gone
+ else "Since pip {}, this is no longer supported."
+ ),
+ ),
+ (
+ replacement,
+ "A possible replacement is {}.",
+ ),
+ (
+ feature_flag,
+ (
+ "You can use the flag --use-feature={} to test the upcoming behaviour."
+ if not is_gone
+ else None
+ ),
+ ),
+ (
+ issue,
+ "Discussion can be found at https://github.com/pypa/pip/issues/{}",
+ ),
+ ]
+
+ message = " ".join(
+ format_str.format(value)
+ for value, format_str in message_parts
+ if format_str is not None and value is not None
+ )
+
+ # Raise as an error if this behaviour is deprecated.
+ if is_gone:
+ raise PipDeprecationWarning(message)
+
+ warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/utils/egg_link.py b/vllm/lib/python3.10/site-packages/pip/_internal/utils/egg_link.py
new file mode 100644
index 0000000000000000000000000000000000000000..4a384a63682ce53cafcf889551b13b9177a14e44
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/utils/egg_link.py
@@ -0,0 +1,80 @@
+import os
+import re
+import sys
+from typing import List, Optional
+
+from pip._internal.locations import site_packages, user_site
+from pip._internal.utils.virtualenv import (
+ running_under_virtualenv,
+ virtualenv_no_global,
+)
+
+__all__ = [
+ "egg_link_path_from_sys_path",
+ "egg_link_path_from_location",
+]
+
+
+def _egg_link_names(raw_name: str) -> List[str]:
+ """
+ Convert a Name metadata value to a .egg-link name, by applying
+ the same substitution as pkg_resources's safe_name function.
+ Note: we cannot use canonicalize_name because it has a different logic.
+
+ We also look for the raw name (without normalization) as setuptools 69 changed
+ the way it names .egg-link files (https://github.com/pypa/setuptools/issues/4167).
+ """
+ return [
+ re.sub("[^A-Za-z0-9.]+", "-", raw_name) + ".egg-link",
+ f"{raw_name}.egg-link",
+ ]
+
+
+def egg_link_path_from_sys_path(raw_name: str) -> Optional[str]:
+ """
+ Look for a .egg-link file for project name, by walking sys.path.
+ """
+ egg_link_names = _egg_link_names(raw_name)
+ for path_item in sys.path:
+ for egg_link_name in egg_link_names:
+ egg_link = os.path.join(path_item, egg_link_name)
+ if os.path.isfile(egg_link):
+ return egg_link
+ return None
+
+
+def egg_link_path_from_location(raw_name: str) -> Optional[str]:
+ """
+ Return the path for the .egg-link file if it exists, otherwise, None.
+
+ There's 3 scenarios:
+ 1) not in a virtualenv
+ try to find in site.USER_SITE, then site_packages
+ 2) in a no-global virtualenv
+ try to find in site_packages
+ 3) in a yes-global virtualenv
+ try to find in site_packages, then site.USER_SITE
+ (don't look in global location)
+
+ For #1 and #3, there could be odd cases, where there's an egg-link in 2
+ locations.
+
+ This method will just return the first one found.
+ """
+ sites: List[str] = []
+ if running_under_virtualenv():
+ sites.append(site_packages)
+ if not virtualenv_no_global() and user_site:
+ sites.append(user_site)
+ else:
+ if user_site:
+ sites.append(user_site)
+ sites.append(site_packages)
+
+ egg_link_names = _egg_link_names(raw_name)
+ for site in sites:
+ for egg_link_name in egg_link_names:
+ egglink = os.path.join(site, egg_link_name)
+ if os.path.isfile(egglink):
+ return egglink
+ return None
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/utils/glibc.py b/vllm/lib/python3.10/site-packages/pip/_internal/utils/glibc.py
new file mode 100644
index 0000000000000000000000000000000000000000..998868ff2a482648024c848c9650d584403cbc8a
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/utils/glibc.py
@@ -0,0 +1,101 @@
+import os
+import sys
+from typing import Optional, Tuple
+
+
+def glibc_version_string() -> Optional[str]:
+ "Returns glibc version string, or None if not using glibc."
+ return glibc_version_string_confstr() or glibc_version_string_ctypes()
+
+
+def glibc_version_string_confstr() -> Optional[str]:
+ "Primary implementation of glibc_version_string using os.confstr."
+ # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
+ # to be broken or missing. This strategy is used in the standard library
+ # platform module:
+ # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183
+ if sys.platform == "win32":
+ return None
+ try:
+ gnu_libc_version = os.confstr("CS_GNU_LIBC_VERSION")
+ if gnu_libc_version is None:
+ return None
+ # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17":
+ _, version = gnu_libc_version.split()
+ except (AttributeError, OSError, ValueError):
+ # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
+ return None
+ return version
+
+
+def glibc_version_string_ctypes() -> Optional[str]:
+ "Fallback implementation of glibc_version_string using ctypes."
+
+ try:
+ import ctypes
+ except ImportError:
+ return None
+
+ # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
+ # manpage says, "If filename is NULL, then the returned handle is for the
+ # main program". This way we can let the linker do the work to figure out
+ # which libc our process is actually using.
+ #
+ # We must also handle the special case where the executable is not a
+ # dynamically linked executable. This can occur when using musl libc,
+ # for example. In this situation, dlopen() will error, leading to an
+ # OSError. Interestingly, at least in the case of musl, there is no
+ # errno set on the OSError. The single string argument used to construct
+ # OSError comes from libc itself and is therefore not portable to
+ # hard code here. In any case, failure to call dlopen() means we
+ # can't proceed, so we bail on our attempt.
+ try:
+ process_namespace = ctypes.CDLL(None)
+ except OSError:
+ return None
+
+ try:
+ gnu_get_libc_version = process_namespace.gnu_get_libc_version
+ except AttributeError:
+ # Symbol doesn't exist -> therefore, we are not linked to
+ # glibc.
+ return None
+
+ # Call gnu_get_libc_version, which returns a string like "2.5"
+ gnu_get_libc_version.restype = ctypes.c_char_p
+ version_str: str = gnu_get_libc_version()
+ # py2 / py3 compatibility:
+ if not isinstance(version_str, str):
+ version_str = version_str.decode("ascii")
+
+ return version_str
+
+
+# platform.libc_ver regularly returns completely nonsensical glibc
+# versions. E.g. on my computer, platform says:
+#
+# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
+# ('glibc', '2.7')
+# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
+# ('glibc', '2.9')
+#
+# But the truth is:
+#
+# ~$ ldd --version
+# ldd (Debian GLIBC 2.22-11) 2.22
+#
+# This is unfortunate, because it means that the linehaul data on libc
+# versions that was generated by pip 8.1.2 and earlier is useless and
+# misleading. Solution: instead of using platform, use our code that actually
+# works.
+def libc_ver() -> Tuple[str, str]:
+ """Try to determine the glibc version
+
+ Returns a tuple of strings (lib, version) which default to empty strings
+ in case the lookup fails.
+ """
+ glibc_version = glibc_version_string()
+ if glibc_version is None:
+ return ("", "")
+ else:
+ return ("glibc", glibc_version)
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/utils/misc.py b/vllm/lib/python3.10/site-packages/pip/_internal/utils/misc.py
new file mode 100644
index 0000000000000000000000000000000000000000..44f6a05fbdd7f7b5779141f53b25b523af7e15eb
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/utils/misc.py
@@ -0,0 +1,773 @@
+import errno
+import getpass
+import hashlib
+import logging
+import os
+import posixpath
+import shutil
+import stat
+import sys
+import sysconfig
+import urllib.parse
+from dataclasses import dataclass
+from functools import partial
+from io import StringIO
+from itertools import filterfalse, tee, zip_longest
+from pathlib import Path
+from types import FunctionType, TracebackType
+from typing import (
+ Any,
+ BinaryIO,
+ Callable,
+ Generator,
+ Iterable,
+ Iterator,
+ List,
+ Mapping,
+ Optional,
+ Sequence,
+ TextIO,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ cast,
+)
+
+from pip._vendor.packaging.requirements import Requirement
+from pip._vendor.pyproject_hooks import BuildBackendHookCaller
+
+from pip import __version__
+from pip._internal.exceptions import CommandError, ExternallyManagedEnvironment
+from pip._internal.locations import get_major_minor_version
+from pip._internal.utils.compat import WINDOWS
+from pip._internal.utils.retry import retry
+from pip._internal.utils.virtualenv import running_under_virtualenv
+
+__all__ = [
+ "rmtree",
+ "display_path",
+ "backup_dir",
+ "ask",
+ "splitext",
+ "format_size",
+ "is_installable_dir",
+ "normalize_path",
+ "renames",
+ "get_prog",
+ "ensure_dir",
+ "remove_auth_from_url",
+ "check_externally_managed",
+ "ConfiguredBuildBackendHookCaller",
+]
+
+logger = logging.getLogger(__name__)
+
+T = TypeVar("T")
+ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType]
+VersionInfo = Tuple[int, int, int]
+NetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]]
+OnExc = Callable[[FunctionType, Path, BaseException], Any]
+OnErr = Callable[[FunctionType, Path, ExcInfo], Any]
+
+FILE_CHUNK_SIZE = 1024 * 1024
+
+
+def get_pip_version() -> str:
+ pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..")
+ pip_pkg_dir = os.path.abspath(pip_pkg_dir)
+
+ return f"pip {__version__} from {pip_pkg_dir} (python {get_major_minor_version()})"
+
+
+def normalize_version_info(py_version_info: Tuple[int, ...]) -> Tuple[int, int, int]:
+ """
+ Convert a tuple of ints representing a Python version to one of length
+ three.
+
+ :param py_version_info: a tuple of ints representing a Python version,
+ or None to specify no version. The tuple can have any length.
+
+ :return: a tuple of length three if `py_version_info` is non-None.
+ Otherwise, return `py_version_info` unchanged (i.e. None).
+ """
+ if len(py_version_info) < 3:
+ py_version_info += (3 - len(py_version_info)) * (0,)
+ elif len(py_version_info) > 3:
+ py_version_info = py_version_info[:3]
+
+ return cast("VersionInfo", py_version_info)
+
+
+def ensure_dir(path: str) -> None:
+ """os.path.makedirs without EEXIST."""
+ try:
+ os.makedirs(path)
+ except OSError as e:
+ # Windows can raise spurious ENOTEMPTY errors. See #6426.
+ if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY:
+ raise
+
+
+def get_prog() -> str:
+ try:
+ prog = os.path.basename(sys.argv[0])
+ if prog in ("__main__.py", "-c"):
+ return f"{sys.executable} -m pip"
+ else:
+ return prog
+ except (AttributeError, TypeError, IndexError):
+ pass
+ return "pip"
+
+
+# Retry every half second for up to 3 seconds
+@retry(stop_after_delay=3, wait=0.5)
+def rmtree(
+ dir: str, ignore_errors: bool = False, onexc: Optional[OnExc] = None
+) -> None:
+ if ignore_errors:
+ onexc = _onerror_ignore
+ if onexc is None:
+ onexc = _onerror_reraise
+ handler: OnErr = partial(rmtree_errorhandler, onexc=onexc)
+ if sys.version_info >= (3, 12):
+ # See https://docs.python.org/3.12/whatsnew/3.12.html#shutil.
+ shutil.rmtree(dir, onexc=handler) # type: ignore
+ else:
+ shutil.rmtree(dir, onerror=handler) # type: ignore
+
+
+def _onerror_ignore(*_args: Any) -> None:
+ pass
+
+
+def _onerror_reraise(*_args: Any) -> None:
+ raise # noqa: PLE0704 - Bare exception used to reraise existing exception
+
+
+def rmtree_errorhandler(
+ func: FunctionType,
+ path: Path,
+ exc_info: Union[ExcInfo, BaseException],
+ *,
+ onexc: OnExc = _onerror_reraise,
+) -> None:
+ """
+ `rmtree` error handler to 'force' a file remove (i.e. like `rm -f`).
+
+ * If a file is readonly then it's write flag is set and operation is
+ retried.
+
+ * `onerror` is the original callback from `rmtree(... onerror=onerror)`
+ that is chained at the end if the "rm -f" still fails.
+ """
+ try:
+ st_mode = os.stat(path).st_mode
+ except OSError:
+ # it's equivalent to os.path.exists
+ return
+
+ if not st_mode & stat.S_IWRITE:
+ # convert to read/write
+ try:
+ os.chmod(path, st_mode | stat.S_IWRITE)
+ except OSError:
+ pass
+ else:
+ # use the original function to repeat the operation
+ try:
+ func(path)
+ return
+ except OSError:
+ pass
+
+ if not isinstance(exc_info, BaseException):
+ _, exc_info, _ = exc_info
+ onexc(func, path, exc_info)
+
+
+def display_path(path: str) -> str:
+ """Gives the display value for a given path, making it relative to cwd
+ if possible."""
+ path = os.path.normcase(os.path.abspath(path))
+ if path.startswith(os.getcwd() + os.path.sep):
+ path = "." + path[len(os.getcwd()) :]
+ return path
+
+
+def backup_dir(dir: str, ext: str = ".bak") -> str:
+ """Figure out the name of a directory to back up the given dir to
+ (adding .bak, .bak2, etc)"""
+ n = 1
+ extension = ext
+ while os.path.exists(dir + extension):
+ n += 1
+ extension = ext + str(n)
+ return dir + extension
+
+
+def ask_path_exists(message: str, options: Iterable[str]) -> str:
+ for action in os.environ.get("PIP_EXISTS_ACTION", "").split():
+ if action in options:
+ return action
+ return ask(message, options)
+
+
+def _check_no_input(message: str) -> None:
+ """Raise an error if no input is allowed."""
+ if os.environ.get("PIP_NO_INPUT"):
+ raise Exception(
+ f"No input was expected ($PIP_NO_INPUT set); question: {message}"
+ )
+
+
+def ask(message: str, options: Iterable[str]) -> str:
+ """Ask the message interactively, with the given possible responses"""
+ while 1:
+ _check_no_input(message)
+ response = input(message)
+ response = response.strip().lower()
+ if response not in options:
+ print(
+ "Your response ({!r}) was not one of the expected responses: "
+ "{}".format(response, ", ".join(options))
+ )
+ else:
+ return response
+
+
+def ask_input(message: str) -> str:
+ """Ask for input interactively."""
+ _check_no_input(message)
+ return input(message)
+
+
+def ask_password(message: str) -> str:
+ """Ask for a password interactively."""
+ _check_no_input(message)
+ return getpass.getpass(message)
+
+
+def strtobool(val: str) -> int:
+ """Convert a string representation of truth to true (1) or false (0).
+
+ True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
+ are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
+ 'val' is anything else.
+ """
+ val = val.lower()
+ if val in ("y", "yes", "t", "true", "on", "1"):
+ return 1
+ elif val in ("n", "no", "f", "false", "off", "0"):
+ return 0
+ else:
+ raise ValueError(f"invalid truth value {val!r}")
+
+
+def format_size(bytes: float) -> str:
+ if bytes > 1000 * 1000:
+ return f"{bytes / 1000.0 / 1000:.1f} MB"
+ elif bytes > 10 * 1000:
+ return f"{int(bytes / 1000)} kB"
+ elif bytes > 1000:
+ return f"{bytes / 1000.0:.1f} kB"
+ else:
+ return f"{int(bytes)} bytes"
+
+
+def tabulate(rows: Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]]:
+ """Return a list of formatted rows and a list of column sizes.
+
+ For example::
+
+ >>> tabulate([['foobar', 2000], [0xdeadbeef]])
+ (['foobar 2000', '3735928559'], [10, 4])
+ """
+ rows = [tuple(map(str, row)) for row in rows]
+ sizes = [max(map(len, col)) for col in zip_longest(*rows, fillvalue="")]
+ table = [" ".join(map(str.ljust, row, sizes)).rstrip() for row in rows]
+ return table, sizes
+
+
+def is_installable_dir(path: str) -> bool:
+ """Is path is a directory containing pyproject.toml or setup.py?
+
+ If pyproject.toml exists, this is a PEP 517 project. Otherwise we look for
+ a legacy setuptools layout by identifying setup.py. We don't check for the
+ setup.cfg because using it without setup.py is only available for PEP 517
+ projects, which are already covered by the pyproject.toml check.
+ """
+ if not os.path.isdir(path):
+ return False
+ if os.path.isfile(os.path.join(path, "pyproject.toml")):
+ return True
+ if os.path.isfile(os.path.join(path, "setup.py")):
+ return True
+ return False
+
+
+def read_chunks(
+ file: BinaryIO, size: int = FILE_CHUNK_SIZE
+) -> Generator[bytes, None, None]:
+ """Yield pieces of data from a file-like object until EOF."""
+ while True:
+ chunk = file.read(size)
+ if not chunk:
+ break
+ yield chunk
+
+
+def normalize_path(path: str, resolve_symlinks: bool = True) -> str:
+ """
+ Convert a path to its canonical, case-normalized, absolute version.
+
+ """
+ path = os.path.expanduser(path)
+ if resolve_symlinks:
+ path = os.path.realpath(path)
+ else:
+ path = os.path.abspath(path)
+ return os.path.normcase(path)
+
+
+def splitext(path: str) -> Tuple[str, str]:
+ """Like os.path.splitext, but take off .tar too"""
+ base, ext = posixpath.splitext(path)
+ if base.lower().endswith(".tar"):
+ ext = base[-4:] + ext
+ base = base[:-4]
+ return base, ext
+
+
+def renames(old: str, new: str) -> None:
+ """Like os.renames(), but handles renaming across devices."""
+ # Implementation borrowed from os.renames().
+ head, tail = os.path.split(new)
+ if head and tail and not os.path.exists(head):
+ os.makedirs(head)
+
+ shutil.move(old, new)
+
+ head, tail = os.path.split(old)
+ if head and tail:
+ try:
+ os.removedirs(head)
+ except OSError:
+ pass
+
+
+def is_local(path: str) -> bool:
+ """
+ Return True if path is within sys.prefix, if we're running in a virtualenv.
+
+ If we're not in a virtualenv, all paths are considered "local."
+
+ Caution: this function assumes the head of path has been normalized
+ with normalize_path.
+ """
+ if not running_under_virtualenv():
+ return True
+ return path.startswith(normalize_path(sys.prefix))
+
+
+def write_output(msg: Any, *args: Any) -> None:
+ logger.info(msg, *args)
+
+
+class StreamWrapper(StringIO):
+ orig_stream: TextIO
+
+ @classmethod
+ def from_stream(cls, orig_stream: TextIO) -> "StreamWrapper":
+ ret = cls()
+ ret.orig_stream = orig_stream
+ return ret
+
+ # compileall.compile_dir() needs stdout.encoding to print to stdout
+ # type ignore is because TextIOBase.encoding is writeable
+ @property
+ def encoding(self) -> str: # type: ignore
+ return self.orig_stream.encoding
+
+
+# Simulates an enum
+def enum(*sequential: Any, **named: Any) -> Type[Any]:
+ enums = dict(zip(sequential, range(len(sequential))), **named)
+ reverse = {value: key for key, value in enums.items()}
+ enums["reverse_mapping"] = reverse
+ return type("Enum", (), enums)
+
+
+def build_netloc(host: str, port: Optional[int]) -> str:
+ """
+ Build a netloc from a host-port pair
+ """
+ if port is None:
+ return host
+ if ":" in host:
+ # Only wrap host with square brackets when it is IPv6
+ host = f"[{host}]"
+ return f"{host}:{port}"
+
+
+def build_url_from_netloc(netloc: str, scheme: str = "https") -> str:
+ """
+ Build a full URL from a netloc.
+ """
+ if netloc.count(":") >= 2 and "@" not in netloc and "[" not in netloc:
+ # It must be a bare IPv6 address, so wrap it with brackets.
+ netloc = f"[{netloc}]"
+ return f"{scheme}://{netloc}"
+
+
+def parse_netloc(netloc: str) -> Tuple[Optional[str], Optional[int]]:
+ """
+ Return the host-port pair from a netloc.
+ """
+ url = build_url_from_netloc(netloc)
+ parsed = urllib.parse.urlparse(url)
+ return parsed.hostname, parsed.port
+
+
+def split_auth_from_netloc(netloc: str) -> NetlocTuple:
+ """
+ Parse out and remove the auth information from a netloc.
+
+ Returns: (netloc, (username, password)).
+ """
+ if "@" not in netloc:
+ return netloc, (None, None)
+
+ # Split from the right because that's how urllib.parse.urlsplit()
+ # behaves if more than one @ is present (which can be checked using
+ # the password attribute of urlsplit()'s return value).
+ auth, netloc = netloc.rsplit("@", 1)
+ pw: Optional[str] = None
+ if ":" in auth:
+ # Split from the left because that's how urllib.parse.urlsplit()
+ # behaves if more than one : is present (which again can be checked
+ # using the password attribute of the return value)
+ user, pw = auth.split(":", 1)
+ else:
+ user, pw = auth, None
+
+ user = urllib.parse.unquote(user)
+ if pw is not None:
+ pw = urllib.parse.unquote(pw)
+
+ return netloc, (user, pw)
+
+
+def redact_netloc(netloc: str) -> str:
+ """
+ Replace the sensitive data in a netloc with "****", if it exists.
+
+ For example:
+ - "user:pass@example.com" returns "user:****@example.com"
+ - "accesstoken@example.com" returns "****@example.com"
+ """
+ netloc, (user, password) = split_auth_from_netloc(netloc)
+ if user is None:
+ return netloc
+ if password is None:
+ user = "****"
+ password = ""
+ else:
+ user = urllib.parse.quote(user)
+ password = ":****"
+ return f"{user}{password}@{netloc}"
+
+
+def _transform_url(
+ url: str, transform_netloc: Callable[[str], Tuple[Any, ...]]
+) -> Tuple[str, NetlocTuple]:
+ """Transform and replace netloc in a url.
+
+ transform_netloc is a function taking the netloc and returning a
+ tuple. The first element of this tuple is the new netloc. The
+ entire tuple is returned.
+
+ Returns a tuple containing the transformed url as item 0 and the
+ original tuple returned by transform_netloc as item 1.
+ """
+ purl = urllib.parse.urlsplit(url)
+ netloc_tuple = transform_netloc(purl.netloc)
+ # stripped url
+ url_pieces = (purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment)
+ surl = urllib.parse.urlunsplit(url_pieces)
+ return surl, cast("NetlocTuple", netloc_tuple)
+
+
+def _get_netloc(netloc: str) -> NetlocTuple:
+ return split_auth_from_netloc(netloc)
+
+
+def _redact_netloc(netloc: str) -> Tuple[str]:
+ return (redact_netloc(netloc),)
+
+
+def split_auth_netloc_from_url(
+ url: str,
+) -> Tuple[str, str, Tuple[Optional[str], Optional[str]]]:
+ """
+ Parse a url into separate netloc, auth, and url with no auth.
+
+ Returns: (url_without_auth, netloc, (username, password))
+ """
+ url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc)
+ return url_without_auth, netloc, auth
+
+
+def remove_auth_from_url(url: str) -> str:
+ """Return a copy of url with 'username:password@' removed."""
+ # username/pass params are passed to subversion through flags
+ # and are not recognized in the url.
+ return _transform_url(url, _get_netloc)[0]
+
+
+def redact_auth_from_url(url: str) -> str:
+ """Replace the password in a given url with ****."""
+ return _transform_url(url, _redact_netloc)[0]
+
+
+def redact_auth_from_requirement(req: Requirement) -> str:
+ """Replace the password in a given requirement url with ****."""
+ if not req.url:
+ return str(req)
+ return str(req).replace(req.url, redact_auth_from_url(req.url))
+
+
+@dataclass(frozen=True)
+class HiddenText:
+ secret: str
+ redacted: str
+
+ def __repr__(self) -> str:
+ return f""
+
+ def __str__(self) -> str:
+ return self.redacted
+
+ # This is useful for testing.
+ def __eq__(self, other: Any) -> bool:
+ if type(self) is not type(other):
+ return False
+
+ # The string being used for redaction doesn't also have to match,
+ # just the raw, original string.
+ return self.secret == other.secret
+
+
+def hide_value(value: str) -> HiddenText:
+ return HiddenText(value, redacted="****")
+
+
+def hide_url(url: str) -> HiddenText:
+ redacted = redact_auth_from_url(url)
+ return HiddenText(url, redacted=redacted)
+
+
+def protect_pip_from_modification_on_windows(modifying_pip: bool) -> None:
+ """Protection of pip.exe from modification on Windows
+
+ On Windows, any operation modifying pip should be run as:
+ python -m pip ...
+ """
+ pip_names = [
+ "pip",
+ f"pip{sys.version_info.major}",
+ f"pip{sys.version_info.major}.{sys.version_info.minor}",
+ ]
+
+ # See https://github.com/pypa/pip/issues/1299 for more discussion
+ should_show_use_python_msg = (
+ modifying_pip and WINDOWS and os.path.basename(sys.argv[0]) in pip_names
+ )
+
+ if should_show_use_python_msg:
+ new_command = [sys.executable, "-m", "pip"] + sys.argv[1:]
+ raise CommandError(
+ "To modify pip, please run the following command:\n{}".format(
+ " ".join(new_command)
+ )
+ )
+
+
+def check_externally_managed() -> None:
+ """Check whether the current environment is externally managed.
+
+ If the ``EXTERNALLY-MANAGED`` config file is found, the current environment
+ is considered externally managed, and an ExternallyManagedEnvironment is
+ raised.
+ """
+ if running_under_virtualenv():
+ return
+ marker = os.path.join(sysconfig.get_path("stdlib"), "EXTERNALLY-MANAGED")
+ if not os.path.isfile(marker):
+ return
+ raise ExternallyManagedEnvironment.from_config(marker)
+
+
+def is_console_interactive() -> bool:
+ """Is this console interactive?"""
+ return sys.stdin is not None and sys.stdin.isatty()
+
+
+def hash_file(path: str, blocksize: int = 1 << 20) -> Tuple[Any, int]:
+ """Return (hash, length) for path using hashlib.sha256()"""
+
+ h = hashlib.sha256()
+ length = 0
+ with open(path, "rb") as f:
+ for block in read_chunks(f, size=blocksize):
+ length += len(block)
+ h.update(block)
+ return h, length
+
+
+def pairwise(iterable: Iterable[Any]) -> Iterator[Tuple[Any, Any]]:
+ """
+ Return paired elements.
+
+ For example:
+ s -> (s0, s1), (s2, s3), (s4, s5), ...
+ """
+ iterable = iter(iterable)
+ return zip_longest(iterable, iterable)
+
+
+def partition(
+ pred: Callable[[T], bool], iterable: Iterable[T]
+) -> Tuple[Iterable[T], Iterable[T]]:
+ """
+ Use a predicate to partition entries into false entries and true entries,
+ like
+
+ partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9
+ """
+ t1, t2 = tee(iterable)
+ return filterfalse(pred, t1), filter(pred, t2)
+
+
+class ConfiguredBuildBackendHookCaller(BuildBackendHookCaller):
+ def __init__(
+ self,
+ config_holder: Any,
+ source_dir: str,
+ build_backend: str,
+ backend_path: Optional[str] = None,
+ runner: Optional[Callable[..., None]] = None,
+ python_executable: Optional[str] = None,
+ ):
+ super().__init__(
+ source_dir, build_backend, backend_path, runner, python_executable
+ )
+ self.config_holder = config_holder
+
+ def build_wheel(
+ self,
+ wheel_directory: str,
+ config_settings: Optional[Mapping[str, Any]] = None,
+ metadata_directory: Optional[str] = None,
+ ) -> str:
+ cs = self.config_holder.config_settings
+ return super().build_wheel(
+ wheel_directory, config_settings=cs, metadata_directory=metadata_directory
+ )
+
+ def build_sdist(
+ self,
+ sdist_directory: str,
+ config_settings: Optional[Mapping[str, Any]] = None,
+ ) -> str:
+ cs = self.config_holder.config_settings
+ return super().build_sdist(sdist_directory, config_settings=cs)
+
+ def build_editable(
+ self,
+ wheel_directory: str,
+ config_settings: Optional[Mapping[str, Any]] = None,
+ metadata_directory: Optional[str] = None,
+ ) -> str:
+ cs = self.config_holder.config_settings
+ return super().build_editable(
+ wheel_directory, config_settings=cs, metadata_directory=metadata_directory
+ )
+
+ def get_requires_for_build_wheel(
+ self, config_settings: Optional[Mapping[str, Any]] = None
+ ) -> Sequence[str]:
+ cs = self.config_holder.config_settings
+ return super().get_requires_for_build_wheel(config_settings=cs)
+
+ def get_requires_for_build_sdist(
+ self, config_settings: Optional[Mapping[str, Any]] = None
+ ) -> Sequence[str]:
+ cs = self.config_holder.config_settings
+ return super().get_requires_for_build_sdist(config_settings=cs)
+
+ def get_requires_for_build_editable(
+ self, config_settings: Optional[Mapping[str, Any]] = None
+ ) -> Sequence[str]:
+ cs = self.config_holder.config_settings
+ return super().get_requires_for_build_editable(config_settings=cs)
+
+ def prepare_metadata_for_build_wheel(
+ self,
+ metadata_directory: str,
+ config_settings: Optional[Mapping[str, Any]] = None,
+ _allow_fallback: bool = True,
+ ) -> str:
+ cs = self.config_holder.config_settings
+ return super().prepare_metadata_for_build_wheel(
+ metadata_directory=metadata_directory,
+ config_settings=cs,
+ _allow_fallback=_allow_fallback,
+ )
+
+ def prepare_metadata_for_build_editable(
+ self,
+ metadata_directory: str,
+ config_settings: Optional[Mapping[str, Any]] = None,
+ _allow_fallback: bool = True,
+ ) -> Optional[str]:
+ cs = self.config_holder.config_settings
+ return super().prepare_metadata_for_build_editable(
+ metadata_directory=metadata_directory,
+ config_settings=cs,
+ _allow_fallback=_allow_fallback,
+ )
+
+
+def warn_if_run_as_root() -> None:
+ """Output a warning for sudo users on Unix.
+
+ In a virtual environment, sudo pip still writes to virtualenv.
+ On Windows, users may run pip as Administrator without issues.
+ This warning only applies to Unix root users outside of virtualenv.
+ """
+ if running_under_virtualenv():
+ return
+ if not hasattr(os, "getuid"):
+ return
+ # On Windows, there are no "system managed" Python packages. Installing as
+ # Administrator via pip is the correct way of updating system environments.
+ #
+ # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform
+ # checks: https://mypy.readthedocs.io/en/stable/common_issues.html
+ if sys.platform == "win32" or sys.platform == "cygwin":
+ return
+
+ if os.getuid() != 0:
+ return
+
+ logger.warning(
+ "Running pip as the 'root' user can result in broken permissions and "
+ "conflicting behaviour with the system package manager, possibly "
+ "rendering your system unusable. "
+ "It is recommended to use a virtual environment instead: "
+ "https://pip.pypa.io/warnings/venv. "
+ "Use the --root-user-action option if you know what you are doing and "
+ "want to suppress this warning."
+ )
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/utils/retry.py b/vllm/lib/python3.10/site-packages/pip/_internal/utils/retry.py
new file mode 100644
index 0000000000000000000000000000000000000000..abfe07286ea747f656ea73f5a6919f1d66215847
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/utils/retry.py
@@ -0,0 +1,42 @@
+import functools
+from time import perf_counter, sleep
+from typing import Callable, TypeVar
+
+from pip._vendor.typing_extensions import ParamSpec
+
+T = TypeVar("T")
+P = ParamSpec("P")
+
+
+def retry(
+ wait: float, stop_after_delay: float
+) -> Callable[[Callable[P, T]], Callable[P, T]]:
+ """Decorator to automatically retry a function on error.
+
+ If the function raises, the function is recalled with the same arguments
+ until it returns or the time limit is reached. When the time limit is
+ surpassed, the last exception raised is reraised.
+
+ :param wait: The time to wait after an error before retrying, in seconds.
+ :param stop_after_delay: The time limit after which retries will cease,
+ in seconds.
+ """
+
+ def wrapper(func: Callable[P, T]) -> Callable[P, T]:
+
+ @functools.wraps(func)
+ def retry_wrapped(*args: P.args, **kwargs: P.kwargs) -> T:
+ # The performance counter is monotonic on all platforms we care
+ # about and has much better resolution than time.monotonic().
+ start_time = perf_counter()
+ while True:
+ try:
+ return func(*args, **kwargs)
+ except Exception:
+ if perf_counter() - start_time > stop_after_delay:
+ raise
+ sleep(wait)
+
+ return retry_wrapped
+
+ return wrapper
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/utils/temp_dir.py b/vllm/lib/python3.10/site-packages/pip/_internal/utils/temp_dir.py
new file mode 100644
index 0000000000000000000000000000000000000000..06668e8ab2dad131106cd9e4963d871cea147997
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/utils/temp_dir.py
@@ -0,0 +1,296 @@
+import errno
+import itertools
+import logging
+import os.path
+import tempfile
+import traceback
+from contextlib import ExitStack, contextmanager
+from pathlib import Path
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Generator,
+ List,
+ Optional,
+ TypeVar,
+ Union,
+)
+
+from pip._internal.utils.misc import enum, rmtree
+
+logger = logging.getLogger(__name__)
+
+_T = TypeVar("_T", bound="TempDirectory")
+
+
+# Kinds of temporary directories. Only needed for ones that are
+# globally-managed.
+tempdir_kinds = enum(
+ BUILD_ENV="build-env",
+ EPHEM_WHEEL_CACHE="ephem-wheel-cache",
+ REQ_BUILD="req-build",
+)
+
+
+_tempdir_manager: Optional[ExitStack] = None
+
+
+@contextmanager
+def global_tempdir_manager() -> Generator[None, None, None]:
+ global _tempdir_manager
+ with ExitStack() as stack:
+ old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack
+ try:
+ yield
+ finally:
+ _tempdir_manager = old_tempdir_manager
+
+
+class TempDirectoryTypeRegistry:
+ """Manages temp directory behavior"""
+
+ def __init__(self) -> None:
+ self._should_delete: Dict[str, bool] = {}
+
+ def set_delete(self, kind: str, value: bool) -> None:
+ """Indicate whether a TempDirectory of the given kind should be
+ auto-deleted.
+ """
+ self._should_delete[kind] = value
+
+ def get_delete(self, kind: str) -> bool:
+ """Get configured auto-delete flag for a given TempDirectory type,
+ default True.
+ """
+ return self._should_delete.get(kind, True)
+
+
+_tempdir_registry: Optional[TempDirectoryTypeRegistry] = None
+
+
+@contextmanager
+def tempdir_registry() -> Generator[TempDirectoryTypeRegistry, None, None]:
+ """Provides a scoped global tempdir registry that can be used to dictate
+ whether directories should be deleted.
+ """
+ global _tempdir_registry
+ old_tempdir_registry = _tempdir_registry
+ _tempdir_registry = TempDirectoryTypeRegistry()
+ try:
+ yield _tempdir_registry
+ finally:
+ _tempdir_registry = old_tempdir_registry
+
+
+class _Default:
+ pass
+
+
+_default = _Default()
+
+
+class TempDirectory:
+ """Helper class that owns and cleans up a temporary directory.
+
+ This class can be used as a context manager or as an OO representation of a
+ temporary directory.
+
+ Attributes:
+ path
+ Location to the created temporary directory
+ delete
+ Whether the directory should be deleted when exiting
+ (when used as a contextmanager)
+
+ Methods:
+ cleanup()
+ Deletes the temporary directory
+
+ When used as a context manager, if the delete attribute is True, on
+ exiting the context the temporary directory is deleted.
+ """
+
+ def __init__(
+ self,
+ path: Optional[str] = None,
+ delete: Union[bool, None, _Default] = _default,
+ kind: str = "temp",
+ globally_managed: bool = False,
+ ignore_cleanup_errors: bool = True,
+ ):
+ super().__init__()
+
+ if delete is _default:
+ if path is not None:
+ # If we were given an explicit directory, resolve delete option
+ # now.
+ delete = False
+ else:
+ # Otherwise, we wait until cleanup and see what
+ # tempdir_registry says.
+ delete = None
+
+ # The only time we specify path is in for editables where it
+ # is the value of the --src option.
+ if path is None:
+ path = self._create(kind)
+
+ self._path = path
+ self._deleted = False
+ self.delete = delete
+ self.kind = kind
+ self.ignore_cleanup_errors = ignore_cleanup_errors
+
+ if globally_managed:
+ assert _tempdir_manager is not None
+ _tempdir_manager.enter_context(self)
+
+ @property
+ def path(self) -> str:
+ assert not self._deleted, f"Attempted to access deleted path: {self._path}"
+ return self._path
+
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__} {self.path!r}>"
+
+ def __enter__(self: _T) -> _T:
+ return self
+
+ def __exit__(self, exc: Any, value: Any, tb: Any) -> None:
+ if self.delete is not None:
+ delete = self.delete
+ elif _tempdir_registry:
+ delete = _tempdir_registry.get_delete(self.kind)
+ else:
+ delete = True
+
+ if delete:
+ self.cleanup()
+
+ def _create(self, kind: str) -> str:
+ """Create a temporary directory and store its path in self.path"""
+ # We realpath here because some systems have their default tmpdir
+ # symlinked to another directory. This tends to confuse build
+ # scripts, so we canonicalize the path by traversing potential
+ # symlinks here.
+ path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-"))
+ logger.debug("Created temporary directory: %s", path)
+ return path
+
+ def cleanup(self) -> None:
+ """Remove the temporary directory created and reset state"""
+ self._deleted = True
+ if not os.path.exists(self._path):
+ return
+
+ errors: List[BaseException] = []
+
+ def onerror(
+ func: Callable[..., Any],
+ path: Path,
+ exc_val: BaseException,
+ ) -> None:
+ """Log a warning for a `rmtree` error and continue"""
+ formatted_exc = "\n".join(
+ traceback.format_exception_only(type(exc_val), exc_val)
+ )
+ formatted_exc = formatted_exc.rstrip() # remove trailing new line
+ if func in (os.unlink, os.remove, os.rmdir):
+ logger.debug(
+ "Failed to remove a temporary file '%s' due to %s.\n",
+ path,
+ formatted_exc,
+ )
+ else:
+ logger.debug("%s failed with %s.", func.__qualname__, formatted_exc)
+ errors.append(exc_val)
+
+ if self.ignore_cleanup_errors:
+ try:
+ # first try with @retry; retrying to handle ephemeral errors
+ rmtree(self._path, ignore_errors=False)
+ except OSError:
+ # last pass ignore/log all errors
+ rmtree(self._path, onexc=onerror)
+ if errors:
+ logger.warning(
+ "Failed to remove contents in a temporary directory '%s'.\n"
+ "You can safely remove it manually.",
+ self._path,
+ )
+ else:
+ rmtree(self._path)
+
+
+class AdjacentTempDirectory(TempDirectory):
+ """Helper class that creates a temporary directory adjacent to a real one.
+
+ Attributes:
+ original
+ The original directory to create a temp directory for.
+ path
+ After calling create() or entering, contains the full
+ path to the temporary directory.
+ delete
+ Whether the directory should be deleted when exiting
+ (when used as a contextmanager)
+
+ """
+
+ # The characters that may be used to name the temp directory
+ # We always prepend a ~ and then rotate through these until
+ # a usable name is found.
+ # pkg_resources raises a different error for .dist-info folder
+ # with leading '-' and invalid metadata
+ LEADING_CHARS = "-~.=%0123456789"
+
+ def __init__(self, original: str, delete: Optional[bool] = None) -> None:
+ self.original = original.rstrip("/\\")
+ super().__init__(delete=delete)
+
+ @classmethod
+ def _generate_names(cls, name: str) -> Generator[str, None, None]:
+ """Generates a series of temporary names.
+
+ The algorithm replaces the leading characters in the name
+ with ones that are valid filesystem characters, but are not
+ valid package names (for both Python and pip definitions of
+ package).
+ """
+ for i in range(1, len(name)):
+ for candidate in itertools.combinations_with_replacement(
+ cls.LEADING_CHARS, i - 1
+ ):
+ new_name = "~" + "".join(candidate) + name[i:]
+ if new_name != name:
+ yield new_name
+
+ # If we make it this far, we will have to make a longer name
+ for i in range(len(cls.LEADING_CHARS)):
+ for candidate in itertools.combinations_with_replacement(
+ cls.LEADING_CHARS, i
+ ):
+ new_name = "~" + "".join(candidate) + name
+ if new_name != name:
+ yield new_name
+
+ def _create(self, kind: str) -> str:
+ root, name = os.path.split(self.original)
+ for candidate in self._generate_names(name):
+ path = os.path.join(root, candidate)
+ try:
+ os.mkdir(path)
+ except OSError as ex:
+ # Continue if the name exists already
+ if ex.errno != errno.EEXIST:
+ raise
+ else:
+ path = os.path.realpath(path)
+ break
+ else:
+ # Final fallback on the default behavior.
+ path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-"))
+
+ logger.debug("Created temporary directory: %s", path)
+ return path
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/utils/urls.py b/vllm/lib/python3.10/site-packages/pip/_internal/utils/urls.py
new file mode 100644
index 0000000000000000000000000000000000000000..9f34f882a1a6b7bf8e8ec5eb42c5d28f2c4e30aa
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/utils/urls.py
@@ -0,0 +1,55 @@
+import os
+import string
+import urllib.parse
+import urllib.request
+
+from .compat import WINDOWS
+
+
+def path_to_url(path: str) -> str:
+ """
+ Convert a path to a file: URL. The path will be made absolute and have
+ quoted path parts.
+ """
+ path = os.path.normpath(os.path.abspath(path))
+ url = urllib.parse.urljoin("file:", urllib.request.pathname2url(path))
+ return url
+
+
+def url_to_path(url: str) -> str:
+ """
+ Convert a file: URL to a path.
+ """
+ assert url.startswith(
+ "file:"
+ ), f"You can only turn file: urls into filenames (not {url!r})"
+
+ _, netloc, path, _, _ = urllib.parse.urlsplit(url)
+
+ if not netloc or netloc == "localhost":
+ # According to RFC 8089, same as empty authority.
+ netloc = ""
+ elif WINDOWS:
+ # If we have a UNC path, prepend UNC share notation.
+ netloc = "\\\\" + netloc
+ else:
+ raise ValueError(
+ f"non-local file URIs are not supported on this platform: {url!r}"
+ )
+
+ path = urllib.request.url2pathname(netloc + path)
+
+ # On Windows, urlsplit parses the path as something like "/C:/Users/foo".
+ # This creates issues for path-related functions like io.open(), so we try
+ # to detect and strip the leading slash.
+ if (
+ WINDOWS
+ and not netloc # Not UNC.
+ and len(path) >= 3
+ and path[0] == "/" # Leading slash to strip.
+ and path[1] in string.ascii_letters # Drive letter.
+ and path[2:4] in (":", ":/") # Colon + end of string, or colon + absolute path.
+ ):
+ path = path[1:]
+
+ return path
diff --git a/vllm/lib/python3.10/site-packages/pip/_internal/utils/wheel.py b/vllm/lib/python3.10/site-packages/pip/_internal/utils/wheel.py
new file mode 100644
index 0000000000000000000000000000000000000000..f85aee8a3f925ad831431de5251c4e9daa6877ea
--- /dev/null
+++ b/vllm/lib/python3.10/site-packages/pip/_internal/utils/wheel.py
@@ -0,0 +1,134 @@
+"""Support functions for working with wheel files.
+"""
+
+import logging
+from email.message import Message
+from email.parser import Parser
+from typing import Tuple
+from zipfile import BadZipFile, ZipFile
+
+from pip._vendor.packaging.utils import canonicalize_name
+
+from pip._internal.exceptions import UnsupportedWheel
+
+VERSION_COMPATIBLE = (1, 0)
+
+
+logger = logging.getLogger(__name__)
+
+
+def parse_wheel(wheel_zip: ZipFile, name: str) -> Tuple[str, Message]:
+ """Extract information from the provided wheel, ensuring it meets basic
+ standards.
+
+ Returns the name of the .dist-info directory and the parsed WHEEL metadata.
+ """
+ try:
+ info_dir = wheel_dist_info_dir(wheel_zip, name)
+ metadata = wheel_metadata(wheel_zip, info_dir)
+ version = wheel_version(metadata)
+ except UnsupportedWheel as e:
+ raise UnsupportedWheel(f"{name} has an invalid wheel, {e}")
+
+ check_compatibility(version, name)
+
+ return info_dir, metadata
+
+
+def wheel_dist_info_dir(source: ZipFile, name: str) -> str:
+ """Returns the name of the contained .dist-info directory.
+
+ Raises AssertionError or UnsupportedWheel if not found, >1 found, or
+ it doesn't match the provided name.
+ """
+ # Zip file path separators must be /
+ subdirs = {p.split("/", 1)[0] for p in source.namelist()}
+
+ info_dirs = [s for s in subdirs if s.endswith(".dist-info")]
+
+ if not info_dirs:
+ raise UnsupportedWheel(".dist-info directory not found")
+
+ if len(info_dirs) > 1:
+ raise UnsupportedWheel(
+ "multiple .dist-info directories found: {}".format(", ".join(info_dirs))
+ )
+
+ info_dir = info_dirs[0]
+
+ info_dir_name = canonicalize_name(info_dir)
+ canonical_name = canonicalize_name(name)
+ if not info_dir_name.startswith(canonical_name):
+ raise UnsupportedWheel(
+ f".dist-info directory {info_dir!r} does not start with {canonical_name!r}"
+ )
+
+ return info_dir
+
+
+def read_wheel_metadata_file(source: ZipFile, path: str) -> bytes:
+ try:
+ return source.read(path)
+ # BadZipFile for general corruption, KeyError for missing entry,
+ # and RuntimeError for password-protected files
+ except (BadZipFile, KeyError, RuntimeError) as e:
+ raise UnsupportedWheel(f"could not read {path!r} file: {e!r}")
+
+
+def wheel_metadata(source: ZipFile, dist_info_dir: str) -> Message:
+ """Return the WHEEL metadata of an extracted wheel, if possible.
+ Otherwise, raise UnsupportedWheel.
+ """
+ path = f"{dist_info_dir}/WHEEL"
+ # Zip file path separators must be /
+ wheel_contents = read_wheel_metadata_file(source, path)
+
+ try:
+ wheel_text = wheel_contents.decode()
+ except UnicodeDecodeError as e:
+ raise UnsupportedWheel(f"error decoding {path!r}: {e!r}")
+
+ # FeedParser (used by Parser) does not raise any exceptions. The returned
+ # message may have .defects populated, but for backwards-compatibility we
+ # currently ignore them.
+ return Parser().parsestr(wheel_text)
+
+
+def wheel_version(wheel_data: Message) -> Tuple[int, ...]:
+ """Given WHEEL metadata, return the parsed Wheel-Version.
+ Otherwise, raise UnsupportedWheel.
+ """
+ version_text = wheel_data["Wheel-Version"]
+ if version_text is None:
+ raise UnsupportedWheel("WHEEL is missing Wheel-Version")
+
+ version = version_text.strip()
+
+ try:
+ return tuple(map(int, version.split(".")))
+ except ValueError:
+ raise UnsupportedWheel(f"invalid Wheel-Version: {version!r}")
+
+
+def check_compatibility(version: Tuple[int, ...], name: str) -> None:
+ """Raises errors or warns if called with an incompatible Wheel-Version.
+
+ pip should refuse to install a Wheel-Version that's a major series
+ ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
+ installing a version only minor version ahead (e.g 1.2 > 1.1).
+
+ version: a 2-tuple representing a Wheel-Version (Major, Minor)
+ name: name of wheel or package to raise exception about
+
+ :raises UnsupportedWheel: when an incompatible Wheel-Version is given
+ """
+ if version[0] > VERSION_COMPATIBLE[0]:
+ raise UnsupportedWheel(
+ "{}'s Wheel-Version ({}) is not compatible with this version "
+ "of pip".format(name, ".".join(map(str, version)))
+ )
+ elif version > VERSION_COMPATIBLE:
+ logger.warning(
+ "Installing from a newer Wheel-Version (%s)",
+ ".".join(map(str, version)),
+ )