ZTWHHH commited on
Commit
7bedf6a
·
verified ·
1 Parent(s): 404affd

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. vllm/lib/python3.10/site-packages/pip/_internal/distributions/__init__.py +21 -0
  2. vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-310.pyc +0 -0
  3. vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/base.cpython-310.pyc +0 -0
  4. vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-310.pyc +0 -0
  5. vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-310.pyc +0 -0
  6. vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-310.pyc +0 -0
  7. vllm/lib/python3.10/site-packages/pip/_internal/distributions/base.py +53 -0
  8. vllm/lib/python3.10/site-packages/pip/_internal/distributions/installed.py +29 -0
  9. vllm/lib/python3.10/site-packages/pip/_internal/distributions/sdist.py +158 -0
  10. vllm/lib/python3.10/site-packages/pip/_internal/distributions/wheel.py +42 -0
  11. vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/__init__.cpython-310.pyc +0 -0
  12. vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/candidate.cpython-310.pyc +0 -0
  13. vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-310.pyc +0 -0
  14. vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/format_control.cpython-310.pyc +0 -0
  15. vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/link.cpython-310.pyc +0 -0
  16. vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/scheme.cpython-310.pyc +0 -0
  17. vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-310.pyc +0 -0
  18. vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-310.pyc +0 -0
  19. vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/target_python.cpython-310.pyc +0 -0
  20. vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/wheel.cpython-310.pyc +0 -0
  21. vllm/lib/python3.10/site-packages/pip/_internal/models/direct_url.py +224 -0
  22. vllm/lib/python3.10/site-packages/pip/_internal/models/link.py +604 -0
  23. vllm/lib/python3.10/site-packages/pip/_internal/models/target_python.py +121 -0
  24. vllm/lib/python3.10/site-packages/pip/_internal/operations/__init__.py +0 -0
  25. vllm/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/check.cpython-310.pyc +0 -0
  26. vllm/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-310.pyc +0 -0
  27. vllm/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-310.pyc +0 -0
  28. vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__init__.py +0 -0
  29. vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-310.pyc +0 -0
  30. vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-310.pyc +0 -0
  31. vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-310.pyc +0 -0
  32. vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-310.pyc +0 -0
  33. vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-310.pyc +0 -0
  34. vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-310.pyc +0 -0
  35. vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-310.pyc +0 -0
  36. vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-310.pyc +0 -0
  37. vllm/lib/python3.10/site-packages/pip/_internal/operations/build/build_tracker.py +138 -0
  38. vllm/lib/python3.10/site-packages/pip/_internal/operations/build/metadata.py +39 -0
  39. vllm/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_editable.py +42 -0
  40. vllm/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_legacy.py +74 -0
  41. vllm/lib/python3.10/site-packages/pip/_internal/operations/build/wheel.py +37 -0
  42. vllm/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_editable.py +46 -0
  43. vllm/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_legacy.py +102 -0
  44. vllm/lib/python3.10/site-packages/pip/_internal/operations/check.py +181 -0
  45. vllm/lib/python3.10/site-packages/pip/_internal/operations/freeze.py +256 -0
  46. vllm/lib/python3.10/site-packages/pip/_internal/operations/install/__init__.py +2 -0
  47. vllm/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-310.pyc +0 -0
  48. vllm/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-310.pyc +0 -0
  49. vllm/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-310.pyc +0 -0
  50. vllm/lib/python3.10/site-packages/pip/_internal/operations/install/editable_legacy.py +47 -0
vllm/lib/python3.10/site-packages/pip/_internal/distributions/__init__.py ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pip._internal.distributions.base import AbstractDistribution
2
+ from pip._internal.distributions.sdist import SourceDistribution
3
+ from pip._internal.distributions.wheel import WheelDistribution
4
+ from pip._internal.req.req_install import InstallRequirement
5
+
6
+
7
+ def make_distribution_for_install_requirement(
8
+ install_req: InstallRequirement,
9
+ ) -> AbstractDistribution:
10
+ """Returns a Distribution for the given InstallRequirement"""
11
+ # Editable requirements will always be source distributions. They use the
12
+ # legacy logic until we create a modern standard for them.
13
+ if install_req.editable:
14
+ return SourceDistribution(install_req)
15
+
16
+ # If it's a wheel, it's a WheelDistribution
17
+ if install_req.is_wheel:
18
+ return WheelDistribution(install_req)
19
+
20
+ # Otherwise, a SourceDistribution
21
+ return SourceDistribution(install_req)
vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (777 Bytes). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/base.cpython-310.pyc ADDED
Binary file (2.49 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-310.pyc ADDED
Binary file (1.46 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-310.pyc ADDED
Binary file (5.29 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-310.pyc ADDED
Binary file (1.86 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/distributions/base.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import abc
2
+ from typing import TYPE_CHECKING, Optional
3
+
4
+ from pip._internal.metadata.base import BaseDistribution
5
+ from pip._internal.req import InstallRequirement
6
+
7
+ if TYPE_CHECKING:
8
+ from pip._internal.index.package_finder import PackageFinder
9
+
10
+
11
+ class AbstractDistribution(metaclass=abc.ABCMeta):
12
+ """A base class for handling installable artifacts.
13
+
14
+ The requirements for anything installable are as follows:
15
+
16
+ - we must be able to determine the requirement name
17
+ (or we can't correctly handle the non-upgrade case).
18
+
19
+ - for packages with setup requirements, we must also be able
20
+ to determine their requirements without installing additional
21
+ packages (for the same reason as run-time dependencies)
22
+
23
+ - we must be able to create a Distribution object exposing the
24
+ above metadata.
25
+
26
+ - if we need to do work in the build tracker, we must be able to generate a unique
27
+ string to identify the requirement in the build tracker.
28
+ """
29
+
30
+ def __init__(self, req: InstallRequirement) -> None:
31
+ super().__init__()
32
+ self.req = req
33
+
34
+ @abc.abstractproperty
35
+ def build_tracker_id(self) -> Optional[str]:
36
+ """A string that uniquely identifies this requirement to the build tracker.
37
+
38
+ If None, then this dist has no work to do in the build tracker, and
39
+ ``.prepare_distribution_metadata()`` will not be called."""
40
+ raise NotImplementedError()
41
+
42
+ @abc.abstractmethod
43
+ def get_metadata_distribution(self) -> BaseDistribution:
44
+ raise NotImplementedError()
45
+
46
+ @abc.abstractmethod
47
+ def prepare_distribution_metadata(
48
+ self,
49
+ finder: "PackageFinder",
50
+ build_isolation: bool,
51
+ check_build_deps: bool,
52
+ ) -> None:
53
+ raise NotImplementedError()
vllm/lib/python3.10/site-packages/pip/_internal/distributions/installed.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional
2
+
3
+ from pip._internal.distributions.base import AbstractDistribution
4
+ from pip._internal.index.package_finder import PackageFinder
5
+ from pip._internal.metadata import BaseDistribution
6
+
7
+
8
+ class InstalledDistribution(AbstractDistribution):
9
+ """Represents an installed package.
10
+
11
+ This does not need any preparation as the required information has already
12
+ been computed.
13
+ """
14
+
15
+ @property
16
+ def build_tracker_id(self) -> Optional[str]:
17
+ return None
18
+
19
+ def get_metadata_distribution(self) -> BaseDistribution:
20
+ assert self.req.satisfied_by is not None, "not actually installed"
21
+ return self.req.satisfied_by
22
+
23
+ def prepare_distribution_metadata(
24
+ self,
25
+ finder: PackageFinder,
26
+ build_isolation: bool,
27
+ check_build_deps: bool,
28
+ ) -> None:
29
+ pass
vllm/lib/python3.10/site-packages/pip/_internal/distributions/sdist.py ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from typing import TYPE_CHECKING, Iterable, Optional, Set, Tuple
3
+
4
+ from pip._internal.build_env import BuildEnvironment
5
+ from pip._internal.distributions.base import AbstractDistribution
6
+ from pip._internal.exceptions import InstallationError
7
+ from pip._internal.metadata import BaseDistribution
8
+ from pip._internal.utils.subprocess import runner_with_spinner_message
9
+
10
+ if TYPE_CHECKING:
11
+ from pip._internal.index.package_finder import PackageFinder
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ class SourceDistribution(AbstractDistribution):
17
+ """Represents a source distribution.
18
+
19
+ The preparation step for these needs metadata for the packages to be
20
+ generated, either using PEP 517 or using the legacy `setup.py egg_info`.
21
+ """
22
+
23
+ @property
24
+ def build_tracker_id(self) -> Optional[str]:
25
+ """Identify this requirement uniquely by its link."""
26
+ assert self.req.link
27
+ return self.req.link.url_without_fragment
28
+
29
+ def get_metadata_distribution(self) -> BaseDistribution:
30
+ return self.req.get_dist()
31
+
32
+ def prepare_distribution_metadata(
33
+ self,
34
+ finder: "PackageFinder",
35
+ build_isolation: bool,
36
+ check_build_deps: bool,
37
+ ) -> None:
38
+ # Load pyproject.toml, to determine whether PEP 517 is to be used
39
+ self.req.load_pyproject_toml()
40
+
41
+ # Set up the build isolation, if this requirement should be isolated
42
+ should_isolate = self.req.use_pep517 and build_isolation
43
+ if should_isolate:
44
+ # Setup an isolated environment and install the build backend static
45
+ # requirements in it.
46
+ self._prepare_build_backend(finder)
47
+ # Check that if the requirement is editable, it either supports PEP 660 or
48
+ # has a setup.py or a setup.cfg. This cannot be done earlier because we need
49
+ # to setup the build backend to verify it supports build_editable, nor can
50
+ # it be done later, because we want to avoid installing build requirements
51
+ # needlessly. Doing it here also works around setuptools generating
52
+ # UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory
53
+ # without setup.py nor setup.cfg.
54
+ self.req.isolated_editable_sanity_check()
55
+ # Install the dynamic build requirements.
56
+ self._install_build_reqs(finder)
57
+ # Check if the current environment provides build dependencies
58
+ should_check_deps = self.req.use_pep517 and check_build_deps
59
+ if should_check_deps:
60
+ pyproject_requires = self.req.pyproject_requires
61
+ assert pyproject_requires is not None
62
+ conflicting, missing = self.req.build_env.check_requirements(
63
+ pyproject_requires
64
+ )
65
+ if conflicting:
66
+ self._raise_conflicts("the backend dependencies", conflicting)
67
+ if missing:
68
+ self._raise_missing_reqs(missing)
69
+ self.req.prepare_metadata()
70
+
71
+ def _prepare_build_backend(self, finder: "PackageFinder") -> None:
72
+ # Isolate in a BuildEnvironment and install the build-time
73
+ # requirements.
74
+ pyproject_requires = self.req.pyproject_requires
75
+ assert pyproject_requires is not None
76
+
77
+ self.req.build_env = BuildEnvironment()
78
+ self.req.build_env.install_requirements(
79
+ finder, pyproject_requires, "overlay", kind="build dependencies"
80
+ )
81
+ conflicting, missing = self.req.build_env.check_requirements(
82
+ self.req.requirements_to_check
83
+ )
84
+ if conflicting:
85
+ self._raise_conflicts("PEP 517/518 supported requirements", conflicting)
86
+ if missing:
87
+ logger.warning(
88
+ "Missing build requirements in pyproject.toml for %s.",
89
+ self.req,
90
+ )
91
+ logger.warning(
92
+ "The project does not specify a build backend, and "
93
+ "pip cannot fall back to setuptools without %s.",
94
+ " and ".join(map(repr, sorted(missing))),
95
+ )
96
+
97
+ def _get_build_requires_wheel(self) -> Iterable[str]:
98
+ with self.req.build_env:
99
+ runner = runner_with_spinner_message("Getting requirements to build wheel")
100
+ backend = self.req.pep517_backend
101
+ assert backend is not None
102
+ with backend.subprocess_runner(runner):
103
+ return backend.get_requires_for_build_wheel()
104
+
105
+ def _get_build_requires_editable(self) -> Iterable[str]:
106
+ with self.req.build_env:
107
+ runner = runner_with_spinner_message(
108
+ "Getting requirements to build editable"
109
+ )
110
+ backend = self.req.pep517_backend
111
+ assert backend is not None
112
+ with backend.subprocess_runner(runner):
113
+ return backend.get_requires_for_build_editable()
114
+
115
+ def _install_build_reqs(self, finder: "PackageFinder") -> None:
116
+ # Install any extra build dependencies that the backend requests.
117
+ # This must be done in a second pass, as the pyproject.toml
118
+ # dependencies must be installed before we can call the backend.
119
+ if (
120
+ self.req.editable
121
+ and self.req.permit_editable_wheels
122
+ and self.req.supports_pyproject_editable
123
+ ):
124
+ build_reqs = self._get_build_requires_editable()
125
+ else:
126
+ build_reqs = self._get_build_requires_wheel()
127
+ conflicting, missing = self.req.build_env.check_requirements(build_reqs)
128
+ if conflicting:
129
+ self._raise_conflicts("the backend dependencies", conflicting)
130
+ self.req.build_env.install_requirements(
131
+ finder, missing, "normal", kind="backend dependencies"
132
+ )
133
+
134
+ def _raise_conflicts(
135
+ self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]]
136
+ ) -> None:
137
+ format_string = (
138
+ "Some build dependencies for {requirement} "
139
+ "conflict with {conflicting_with}: {description}."
140
+ )
141
+ error_message = format_string.format(
142
+ requirement=self.req,
143
+ conflicting_with=conflicting_with,
144
+ description=", ".join(
145
+ f"{installed} is incompatible with {wanted}"
146
+ for installed, wanted in sorted(conflicting_reqs)
147
+ ),
148
+ )
149
+ raise InstallationError(error_message)
150
+
151
+ def _raise_missing_reqs(self, missing: Set[str]) -> None:
152
+ format_string = (
153
+ "Some build dependencies for {requirement} are missing: {missing}."
154
+ )
155
+ error_message = format_string.format(
156
+ requirement=self.req, missing=", ".join(map(repr, sorted(missing)))
157
+ )
158
+ raise InstallationError(error_message)
vllm/lib/python3.10/site-packages/pip/_internal/distributions/wheel.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import TYPE_CHECKING, Optional
2
+
3
+ from pip._vendor.packaging.utils import canonicalize_name
4
+
5
+ from pip._internal.distributions.base import AbstractDistribution
6
+ from pip._internal.metadata import (
7
+ BaseDistribution,
8
+ FilesystemWheel,
9
+ get_wheel_distribution,
10
+ )
11
+
12
+ if TYPE_CHECKING:
13
+ from pip._internal.index.package_finder import PackageFinder
14
+
15
+
16
+ class WheelDistribution(AbstractDistribution):
17
+ """Represents a wheel distribution.
18
+
19
+ This does not need any preparation as wheels can be directly unpacked.
20
+ """
21
+
22
+ @property
23
+ def build_tracker_id(self) -> Optional[str]:
24
+ return None
25
+
26
+ def get_metadata_distribution(self) -> BaseDistribution:
27
+ """Loads the metadata from the wheel file into memory and returns a
28
+ Distribution that uses it, not relying on the wheel file or
29
+ requirement.
30
+ """
31
+ assert self.req.local_file_path, "Set as part of preparation during download"
32
+ assert self.req.name, "Wheels are never unnamed"
33
+ wheel = FilesystemWheel(self.req.local_file_path)
34
+ return get_wheel_distribution(wheel, canonicalize_name(self.req.name))
35
+
36
+ def prepare_distribution_metadata(
37
+ self,
38
+ finder: "PackageFinder",
39
+ build_isolation: bool,
40
+ check_build_deps: bool,
41
+ ) -> None:
42
+ pass
vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (238 Bytes). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/candidate.cpython-310.pyc ADDED
Binary file (1.21 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-310.pyc ADDED
Binary file (7.38 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/format_control.cpython-310.pyc ADDED
Binary file (2.71 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/link.cpython-310.pyc ADDED
Binary file (18.7 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/scheme.cpython-310.pyc ADDED
Binary file (909 Bytes). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-310.pyc ADDED
Binary file (3.45 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-310.pyc ADDED
Binary file (1.67 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/target_python.cpython-310.pyc ADDED
Binary file (3.79 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/models/__pycache__/wheel.cpython-310.pyc ADDED
Binary file (5.07 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/models/direct_url.py ADDED
@@ -0,0 +1,224 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ PEP 610 """
2
+
3
+ import json
4
+ import re
5
+ import urllib.parse
6
+ from dataclasses import dataclass
7
+ from typing import Any, ClassVar, Dict, Iterable, Optional, Type, TypeVar, Union
8
+
9
+ __all__ = [
10
+ "DirectUrl",
11
+ "DirectUrlValidationError",
12
+ "DirInfo",
13
+ "ArchiveInfo",
14
+ "VcsInfo",
15
+ ]
16
+
17
+ T = TypeVar("T")
18
+
19
+ DIRECT_URL_METADATA_NAME = "direct_url.json"
20
+ ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$")
21
+
22
+
23
+ class DirectUrlValidationError(Exception):
24
+ pass
25
+
26
+
27
+ def _get(
28
+ d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
29
+ ) -> Optional[T]:
30
+ """Get value from dictionary and verify expected type."""
31
+ if key not in d:
32
+ return default
33
+ value = d[key]
34
+ if not isinstance(value, expected_type):
35
+ raise DirectUrlValidationError(
36
+ f"{value!r} has unexpected type for {key} (expected {expected_type})"
37
+ )
38
+ return value
39
+
40
+
41
+ def _get_required(
42
+ d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
43
+ ) -> T:
44
+ value = _get(d, expected_type, key, default)
45
+ if value is None:
46
+ raise DirectUrlValidationError(f"{key} must have a value")
47
+ return value
48
+
49
+
50
+ def _exactly_one_of(infos: Iterable[Optional["InfoType"]]) -> "InfoType":
51
+ infos = [info for info in infos if info is not None]
52
+ if not infos:
53
+ raise DirectUrlValidationError(
54
+ "missing one of archive_info, dir_info, vcs_info"
55
+ )
56
+ if len(infos) > 1:
57
+ raise DirectUrlValidationError(
58
+ "more than one of archive_info, dir_info, vcs_info"
59
+ )
60
+ assert infos[0] is not None
61
+ return infos[0]
62
+
63
+
64
+ def _filter_none(**kwargs: Any) -> Dict[str, Any]:
65
+ """Make dict excluding None values."""
66
+ return {k: v for k, v in kwargs.items() if v is not None}
67
+
68
+
69
+ @dataclass
70
+ class VcsInfo:
71
+ name: ClassVar = "vcs_info"
72
+
73
+ vcs: str
74
+ commit_id: str
75
+ requested_revision: Optional[str] = None
76
+
77
+ @classmethod
78
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]:
79
+ if d is None:
80
+ return None
81
+ return cls(
82
+ vcs=_get_required(d, str, "vcs"),
83
+ commit_id=_get_required(d, str, "commit_id"),
84
+ requested_revision=_get(d, str, "requested_revision"),
85
+ )
86
+
87
+ def _to_dict(self) -> Dict[str, Any]:
88
+ return _filter_none(
89
+ vcs=self.vcs,
90
+ requested_revision=self.requested_revision,
91
+ commit_id=self.commit_id,
92
+ )
93
+
94
+
95
+ class ArchiveInfo:
96
+ name = "archive_info"
97
+
98
+ def __init__(
99
+ self,
100
+ hash: Optional[str] = None,
101
+ hashes: Optional[Dict[str, str]] = None,
102
+ ) -> None:
103
+ # set hashes before hash, since the hash setter will further populate hashes
104
+ self.hashes = hashes
105
+ self.hash = hash
106
+
107
+ @property
108
+ def hash(self) -> Optional[str]:
109
+ return self._hash
110
+
111
+ @hash.setter
112
+ def hash(self, value: Optional[str]) -> None:
113
+ if value is not None:
114
+ # Auto-populate the hashes key to upgrade to the new format automatically.
115
+ # We don't back-populate the legacy hash key from hashes.
116
+ try:
117
+ hash_name, hash_value = value.split("=", 1)
118
+ except ValueError:
119
+ raise DirectUrlValidationError(
120
+ f"invalid archive_info.hash format: {value!r}"
121
+ )
122
+ if self.hashes is None:
123
+ self.hashes = {hash_name: hash_value}
124
+ elif hash_name not in self.hashes:
125
+ self.hashes = self.hashes.copy()
126
+ self.hashes[hash_name] = hash_value
127
+ self._hash = value
128
+
129
+ @classmethod
130
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]:
131
+ if d is None:
132
+ return None
133
+ return cls(hash=_get(d, str, "hash"), hashes=_get(d, dict, "hashes"))
134
+
135
+ def _to_dict(self) -> Dict[str, Any]:
136
+ return _filter_none(hash=self.hash, hashes=self.hashes)
137
+
138
+
139
+ @dataclass
140
+ class DirInfo:
141
+ name: ClassVar = "dir_info"
142
+
143
+ editable: bool = False
144
+
145
+ @classmethod
146
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]:
147
+ if d is None:
148
+ return None
149
+ return cls(editable=_get_required(d, bool, "editable", default=False))
150
+
151
+ def _to_dict(self) -> Dict[str, Any]:
152
+ return _filter_none(editable=self.editable or None)
153
+
154
+
155
+ InfoType = Union[ArchiveInfo, DirInfo, VcsInfo]
156
+
157
+
158
+ @dataclass
159
+ class DirectUrl:
160
+ url: str
161
+ info: InfoType
162
+ subdirectory: Optional[str] = None
163
+
164
+ def _remove_auth_from_netloc(self, netloc: str) -> str:
165
+ if "@" not in netloc:
166
+ return netloc
167
+ user_pass, netloc_no_user_pass = netloc.split("@", 1)
168
+ if (
169
+ isinstance(self.info, VcsInfo)
170
+ and self.info.vcs == "git"
171
+ and user_pass == "git"
172
+ ):
173
+ return netloc
174
+ if ENV_VAR_RE.match(user_pass):
175
+ return netloc
176
+ return netloc_no_user_pass
177
+
178
+ @property
179
+ def redacted_url(self) -> str:
180
+ """url with user:password part removed unless it is formed with
181
+ environment variables as specified in PEP 610, or it is ``git``
182
+ in the case of a git URL.
183
+ """
184
+ purl = urllib.parse.urlsplit(self.url)
185
+ netloc = self._remove_auth_from_netloc(purl.netloc)
186
+ surl = urllib.parse.urlunsplit(
187
+ (purl.scheme, netloc, purl.path, purl.query, purl.fragment)
188
+ )
189
+ return surl
190
+
191
+ def validate(self) -> None:
192
+ self.from_dict(self.to_dict())
193
+
194
+ @classmethod
195
+ def from_dict(cls, d: Dict[str, Any]) -> "DirectUrl":
196
+ return DirectUrl(
197
+ url=_get_required(d, str, "url"),
198
+ subdirectory=_get(d, str, "subdirectory"),
199
+ info=_exactly_one_of(
200
+ [
201
+ ArchiveInfo._from_dict(_get(d, dict, "archive_info")),
202
+ DirInfo._from_dict(_get(d, dict, "dir_info")),
203
+ VcsInfo._from_dict(_get(d, dict, "vcs_info")),
204
+ ]
205
+ ),
206
+ )
207
+
208
+ def to_dict(self) -> Dict[str, Any]:
209
+ res = _filter_none(
210
+ url=self.redacted_url,
211
+ subdirectory=self.subdirectory,
212
+ )
213
+ res[self.info.name] = self.info._to_dict()
214
+ return res
215
+
216
+ @classmethod
217
+ def from_json(cls, s: str) -> "DirectUrl":
218
+ return cls.from_dict(json.loads(s))
219
+
220
+ def to_json(self) -> str:
221
+ return json.dumps(self.to_dict(), sort_keys=True)
222
+
223
+ def is_local_editable(self) -> bool:
224
+ return isinstance(self.info, DirInfo) and self.info.editable
vllm/lib/python3.10/site-packages/pip/_internal/models/link.py ADDED
@@ -0,0 +1,604 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import itertools
3
+ import logging
4
+ import os
5
+ import posixpath
6
+ import re
7
+ import urllib.parse
8
+ from dataclasses import dataclass
9
+ from typing import (
10
+ TYPE_CHECKING,
11
+ Any,
12
+ Dict,
13
+ List,
14
+ Mapping,
15
+ NamedTuple,
16
+ Optional,
17
+ Tuple,
18
+ Union,
19
+ )
20
+
21
+ from pip._internal.utils.deprecation import deprecated
22
+ from pip._internal.utils.filetypes import WHEEL_EXTENSION
23
+ from pip._internal.utils.hashes import Hashes
24
+ from pip._internal.utils.misc import (
25
+ pairwise,
26
+ redact_auth_from_url,
27
+ split_auth_from_netloc,
28
+ splitext,
29
+ )
30
+ from pip._internal.utils.urls import path_to_url, url_to_path
31
+
32
+ if TYPE_CHECKING:
33
+ from pip._internal.index.collector import IndexContent
34
+
35
+ logger = logging.getLogger(__name__)
36
+
37
+
38
+ # Order matters, earlier hashes have a precedence over later hashes for what
39
+ # we will pick to use.
40
+ _SUPPORTED_HASHES = ("sha512", "sha384", "sha256", "sha224", "sha1", "md5")
41
+
42
+
43
+ @dataclass(frozen=True)
44
+ class LinkHash:
45
+ """Links to content may have embedded hash values. This class parses those.
46
+
47
+ `name` must be any member of `_SUPPORTED_HASHES`.
48
+
49
+ This class can be converted to and from `ArchiveInfo`. While ArchiveInfo intends to
50
+ be JSON-serializable to conform to PEP 610, this class contains the logic for
51
+ parsing a hash name and value for correctness, and then checking whether that hash
52
+ conforms to a schema with `.is_hash_allowed()`."""
53
+
54
+ name: str
55
+ value: str
56
+
57
+ _hash_url_fragment_re = re.compile(
58
+ # NB: we do not validate that the second group (.*) is a valid hex
59
+ # digest. Instead, we simply keep that string in this class, and then check it
60
+ # against Hashes when hash-checking is needed. This is easier to debug than
61
+ # proactively discarding an invalid hex digest, as we handle incorrect hashes
62
+ # and malformed hashes in the same place.
63
+ r"[#&]({choices})=([^&]*)".format(
64
+ choices="|".join(re.escape(hash_name) for hash_name in _SUPPORTED_HASHES)
65
+ ),
66
+ )
67
+
68
+ def __post_init__(self) -> None:
69
+ assert self.name in _SUPPORTED_HASHES
70
+
71
+ @classmethod
72
+ @functools.lru_cache(maxsize=None)
73
+ def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]:
74
+ """Search a string for a checksum algorithm name and encoded output value."""
75
+ match = cls._hash_url_fragment_re.search(url)
76
+ if match is None:
77
+ return None
78
+ name, value = match.groups()
79
+ return cls(name=name, value=value)
80
+
81
+ def as_dict(self) -> Dict[str, str]:
82
+ return {self.name: self.value}
83
+
84
+ def as_hashes(self) -> Hashes:
85
+ """Return a Hashes instance which checks only for the current hash."""
86
+ return Hashes({self.name: [self.value]})
87
+
88
+ def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
89
+ """
90
+ Return True if the current hash is allowed by `hashes`.
91
+ """
92
+ if hashes is None:
93
+ return False
94
+ return hashes.is_hash_allowed(self.name, hex_digest=self.value)
95
+
96
+
97
+ @dataclass(frozen=True)
98
+ class MetadataFile:
99
+ """Information about a core metadata file associated with a distribution."""
100
+
101
+ hashes: Optional[Dict[str, str]]
102
+
103
+ def __post_init__(self) -> None:
104
+ if self.hashes is not None:
105
+ assert all(name in _SUPPORTED_HASHES for name in self.hashes)
106
+
107
+
108
+ def supported_hashes(hashes: Optional[Dict[str, str]]) -> Optional[Dict[str, str]]:
109
+ # Remove any unsupported hash types from the mapping. If this leaves no
110
+ # supported hashes, return None
111
+ if hashes is None:
112
+ return None
113
+ hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES}
114
+ if not hashes:
115
+ return None
116
+ return hashes
117
+
118
+
119
+ def _clean_url_path_part(part: str) -> str:
120
+ """
121
+ Clean a "part" of a URL path (i.e. after splitting on "@" characters).
122
+ """
123
+ # We unquote prior to quoting to make sure nothing is double quoted.
124
+ return urllib.parse.quote(urllib.parse.unquote(part))
125
+
126
+
127
+ def _clean_file_url_path(part: str) -> str:
128
+ """
129
+ Clean the first part of a URL path that corresponds to a local
130
+ filesystem path (i.e. the first part after splitting on "@" characters).
131
+ """
132
+ # We unquote prior to quoting to make sure nothing is double quoted.
133
+ # Also, on Windows the path part might contain a drive letter which
134
+ # should not be quoted. On Linux where drive letters do not
135
+ # exist, the colon should be quoted. We rely on urllib.request
136
+ # to do the right thing here.
137
+ return urllib.request.pathname2url(urllib.request.url2pathname(part))
138
+
139
+
140
+ # percent-encoded: /
141
+ _reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE)
142
+
143
+
144
+ def _clean_url_path(path: str, is_local_path: bool) -> str:
145
+ """
146
+ Clean the path portion of a URL.
147
+ """
148
+ if is_local_path:
149
+ clean_func = _clean_file_url_path
150
+ else:
151
+ clean_func = _clean_url_path_part
152
+
153
+ # Split on the reserved characters prior to cleaning so that
154
+ # revision strings in VCS URLs are properly preserved.
155
+ parts = _reserved_chars_re.split(path)
156
+
157
+ cleaned_parts = []
158
+ for to_clean, reserved in pairwise(itertools.chain(parts, [""])):
159
+ cleaned_parts.append(clean_func(to_clean))
160
+ # Normalize %xx escapes (e.g. %2f -> %2F)
161
+ cleaned_parts.append(reserved.upper())
162
+
163
+ return "".join(cleaned_parts)
164
+
165
+
166
+ def _ensure_quoted_url(url: str) -> str:
167
+ """
168
+ Make sure a link is fully quoted.
169
+ For example, if ' ' occurs in the URL, it will be replaced with "%20",
170
+ and without double-quoting other characters.
171
+ """
172
+ # Split the URL into parts according to the general structure
173
+ # `scheme://netloc/path?query#fragment`.
174
+ result = urllib.parse.urlsplit(url)
175
+ # If the netloc is empty, then the URL refers to a local filesystem path.
176
+ is_local_path = not result.netloc
177
+ path = _clean_url_path(result.path, is_local_path=is_local_path)
178
+ return urllib.parse.urlunsplit(result._replace(path=path))
179
+
180
+
181
+ def _absolute_link_url(base_url: str, url: str) -> str:
182
+ """
183
+ A faster implementation of urllib.parse.urljoin with a shortcut
184
+ for absolute http/https URLs.
185
+ """
186
+ if url.startswith(("https://", "http://")):
187
+ return url
188
+ else:
189
+ return urllib.parse.urljoin(base_url, url)
190
+
191
+
192
+ @functools.total_ordering
193
+ class Link:
194
+ """Represents a parsed link from a Package Index's simple URL"""
195
+
196
+ __slots__ = [
197
+ "_parsed_url",
198
+ "_url",
199
+ "_path",
200
+ "_hashes",
201
+ "comes_from",
202
+ "requires_python",
203
+ "yanked_reason",
204
+ "metadata_file_data",
205
+ "cache_link_parsing",
206
+ "egg_fragment",
207
+ ]
208
+
209
+ def __init__(
210
+ self,
211
+ url: str,
212
+ comes_from: Optional[Union[str, "IndexContent"]] = None,
213
+ requires_python: Optional[str] = None,
214
+ yanked_reason: Optional[str] = None,
215
+ metadata_file_data: Optional[MetadataFile] = None,
216
+ cache_link_parsing: bool = True,
217
+ hashes: Optional[Mapping[str, str]] = None,
218
+ ) -> None:
219
+ """
220
+ :param url: url of the resource pointed to (href of the link)
221
+ :param comes_from: instance of IndexContent where the link was found,
222
+ or string.
223
+ :param requires_python: String containing the `Requires-Python`
224
+ metadata field, specified in PEP 345. This may be specified by
225
+ a data-requires-python attribute in the HTML link tag, as
226
+ described in PEP 503.
227
+ :param yanked_reason: the reason the file has been yanked, if the
228
+ file has been yanked, or None if the file hasn't been yanked.
229
+ This is the value of the "data-yanked" attribute, if present, in
230
+ a simple repository HTML link. If the file has been yanked but
231
+ no reason was provided, this should be the empty string. See
232
+ PEP 592 for more information and the specification.
233
+ :param metadata_file_data: the metadata attached to the file, or None if
234
+ no such metadata is provided. This argument, if not None, indicates
235
+ that a separate metadata file exists, and also optionally supplies
236
+ hashes for that file.
237
+ :param cache_link_parsing: A flag that is used elsewhere to determine
238
+ whether resources retrieved from this link should be cached. PyPI
239
+ URLs should generally have this set to False, for example.
240
+ :param hashes: A mapping of hash names to digests to allow us to
241
+ determine the validity of a download.
242
+ """
243
+
244
+ # The comes_from, requires_python, and metadata_file_data arguments are
245
+ # only used by classmethods of this class, and are not used in client
246
+ # code directly.
247
+
248
+ # url can be a UNC windows share
249
+ if url.startswith("\\\\"):
250
+ url = path_to_url(url)
251
+
252
+ self._parsed_url = urllib.parse.urlsplit(url)
253
+ # Store the url as a private attribute to prevent accidentally
254
+ # trying to set a new value.
255
+ self._url = url
256
+ # The .path property is hot, so calculate its value ahead of time.
257
+ self._path = urllib.parse.unquote(self._parsed_url.path)
258
+
259
+ link_hash = LinkHash.find_hash_url_fragment(url)
260
+ hashes_from_link = {} if link_hash is None else link_hash.as_dict()
261
+ if hashes is None:
262
+ self._hashes = hashes_from_link
263
+ else:
264
+ self._hashes = {**hashes, **hashes_from_link}
265
+
266
+ self.comes_from = comes_from
267
+ self.requires_python = requires_python if requires_python else None
268
+ self.yanked_reason = yanked_reason
269
+ self.metadata_file_data = metadata_file_data
270
+
271
+ self.cache_link_parsing = cache_link_parsing
272
+ self.egg_fragment = self._egg_fragment()
273
+
274
+ @classmethod
275
+ def from_json(
276
+ cls,
277
+ file_data: Dict[str, Any],
278
+ page_url: str,
279
+ ) -> Optional["Link"]:
280
+ """
281
+ Convert an pypi json document from a simple repository page into a Link.
282
+ """
283
+ file_url = file_data.get("url")
284
+ if file_url is None:
285
+ return None
286
+
287
+ url = _ensure_quoted_url(_absolute_link_url(page_url, file_url))
288
+ pyrequire = file_data.get("requires-python")
289
+ yanked_reason = file_data.get("yanked")
290
+ hashes = file_data.get("hashes", {})
291
+
292
+ # PEP 714: Indexes must use the name core-metadata, but
293
+ # clients should support the old name as a fallback for compatibility.
294
+ metadata_info = file_data.get("core-metadata")
295
+ if metadata_info is None:
296
+ metadata_info = file_data.get("dist-info-metadata")
297
+
298
+ # The metadata info value may be a boolean, or a dict of hashes.
299
+ if isinstance(metadata_info, dict):
300
+ # The file exists, and hashes have been supplied
301
+ metadata_file_data = MetadataFile(supported_hashes(metadata_info))
302
+ elif metadata_info:
303
+ # The file exists, but there are no hashes
304
+ metadata_file_data = MetadataFile(None)
305
+ else:
306
+ # False or not present: the file does not exist
307
+ metadata_file_data = None
308
+
309
+ # The Link.yanked_reason expects an empty string instead of a boolean.
310
+ if yanked_reason and not isinstance(yanked_reason, str):
311
+ yanked_reason = ""
312
+ # The Link.yanked_reason expects None instead of False.
313
+ elif not yanked_reason:
314
+ yanked_reason = None
315
+
316
+ return cls(
317
+ url,
318
+ comes_from=page_url,
319
+ requires_python=pyrequire,
320
+ yanked_reason=yanked_reason,
321
+ hashes=hashes,
322
+ metadata_file_data=metadata_file_data,
323
+ )
324
+
325
+ @classmethod
326
+ def from_element(
327
+ cls,
328
+ anchor_attribs: Dict[str, Optional[str]],
329
+ page_url: str,
330
+ base_url: str,
331
+ ) -> Optional["Link"]:
332
+ """
333
+ Convert an anchor element's attributes in a simple repository page to a Link.
334
+ """
335
+ href = anchor_attribs.get("href")
336
+ if not href:
337
+ return None
338
+
339
+ url = _ensure_quoted_url(_absolute_link_url(base_url, href))
340
+ pyrequire = anchor_attribs.get("data-requires-python")
341
+ yanked_reason = anchor_attribs.get("data-yanked")
342
+
343
+ # PEP 714: Indexes must use the name data-core-metadata, but
344
+ # clients should support the old name as a fallback for compatibility.
345
+ metadata_info = anchor_attribs.get("data-core-metadata")
346
+ if metadata_info is None:
347
+ metadata_info = anchor_attribs.get("data-dist-info-metadata")
348
+ # The metadata info value may be the string "true", or a string of
349
+ # the form "hashname=hashval"
350
+ if metadata_info == "true":
351
+ # The file exists, but there are no hashes
352
+ metadata_file_data = MetadataFile(None)
353
+ elif metadata_info is None:
354
+ # The file does not exist
355
+ metadata_file_data = None
356
+ else:
357
+ # The file exists, and hashes have been supplied
358
+ hashname, sep, hashval = metadata_info.partition("=")
359
+ if sep == "=":
360
+ metadata_file_data = MetadataFile(supported_hashes({hashname: hashval}))
361
+ else:
362
+ # Error - data is wrong. Treat as no hashes supplied.
363
+ logger.debug(
364
+ "Index returned invalid data-dist-info-metadata value: %s",
365
+ metadata_info,
366
+ )
367
+ metadata_file_data = MetadataFile(None)
368
+
369
+ return cls(
370
+ url,
371
+ comes_from=page_url,
372
+ requires_python=pyrequire,
373
+ yanked_reason=yanked_reason,
374
+ metadata_file_data=metadata_file_data,
375
+ )
376
+
377
+ def __str__(self) -> str:
378
+ if self.requires_python:
379
+ rp = f" (requires-python:{self.requires_python})"
380
+ else:
381
+ rp = ""
382
+ if self.comes_from:
383
+ return f"{redact_auth_from_url(self._url)} (from {self.comes_from}){rp}"
384
+ else:
385
+ return redact_auth_from_url(str(self._url))
386
+
387
+ def __repr__(self) -> str:
388
+ return f"<Link {self}>"
389
+
390
+ def __hash__(self) -> int:
391
+ return hash(self.url)
392
+
393
+ def __eq__(self, other: Any) -> bool:
394
+ if not isinstance(other, Link):
395
+ return NotImplemented
396
+ return self.url == other.url
397
+
398
+ def __lt__(self, other: Any) -> bool:
399
+ if not isinstance(other, Link):
400
+ return NotImplemented
401
+ return self.url < other.url
402
+
403
+ @property
404
+ def url(self) -> str:
405
+ return self._url
406
+
407
+ @property
408
+ def filename(self) -> str:
409
+ path = self.path.rstrip("/")
410
+ name = posixpath.basename(path)
411
+ if not name:
412
+ # Make sure we don't leak auth information if the netloc
413
+ # includes a username and password.
414
+ netloc, user_pass = split_auth_from_netloc(self.netloc)
415
+ return netloc
416
+
417
+ name = urllib.parse.unquote(name)
418
+ assert name, f"URL {self._url!r} produced no filename"
419
+ return name
420
+
421
+ @property
422
+ def file_path(self) -> str:
423
+ return url_to_path(self.url)
424
+
425
+ @property
426
+ def scheme(self) -> str:
427
+ return self._parsed_url.scheme
428
+
429
+ @property
430
+ def netloc(self) -> str:
431
+ """
432
+ This can contain auth information.
433
+ """
434
+ return self._parsed_url.netloc
435
+
436
+ @property
437
+ def path(self) -> str:
438
+ return self._path
439
+
440
+ def splitext(self) -> Tuple[str, str]:
441
+ return splitext(posixpath.basename(self.path.rstrip("/")))
442
+
443
+ @property
444
+ def ext(self) -> str:
445
+ return self.splitext()[1]
446
+
447
+ @property
448
+ def url_without_fragment(self) -> str:
449
+ scheme, netloc, path, query, fragment = self._parsed_url
450
+ return urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
451
+
452
+ _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)")
453
+
454
+ # Per PEP 508.
455
+ _project_name_re = re.compile(
456
+ r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
457
+ )
458
+
459
+ def _egg_fragment(self) -> Optional[str]:
460
+ match = self._egg_fragment_re.search(self._url)
461
+ if not match:
462
+ return None
463
+
464
+ # An egg fragment looks like a PEP 508 project name, along with
465
+ # an optional extras specifier. Anything else is invalid.
466
+ project_name = match.group(1)
467
+ if not self._project_name_re.match(project_name):
468
+ deprecated(
469
+ reason=f"{self} contains an egg fragment with a non-PEP 508 name.",
470
+ replacement="to use the req @ url syntax, and remove the egg fragment",
471
+ gone_in="25.1",
472
+ issue=13157,
473
+ )
474
+
475
+ return project_name
476
+
477
+ _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)")
478
+
479
+ @property
480
+ def subdirectory_fragment(self) -> Optional[str]:
481
+ match = self._subdirectory_fragment_re.search(self._url)
482
+ if not match:
483
+ return None
484
+ return match.group(1)
485
+
486
+ def metadata_link(self) -> Optional["Link"]:
487
+ """Return a link to the associated core metadata file (if any)."""
488
+ if self.metadata_file_data is None:
489
+ return None
490
+ metadata_url = f"{self.url_without_fragment}.metadata"
491
+ if self.metadata_file_data.hashes is None:
492
+ return Link(metadata_url)
493
+ return Link(metadata_url, hashes=self.metadata_file_data.hashes)
494
+
495
+ def as_hashes(self) -> Hashes:
496
+ return Hashes({k: [v] for k, v in self._hashes.items()})
497
+
498
+ @property
499
+ def hash(self) -> Optional[str]:
500
+ return next(iter(self._hashes.values()), None)
501
+
502
+ @property
503
+ def hash_name(self) -> Optional[str]:
504
+ return next(iter(self._hashes), None)
505
+
506
+ @property
507
+ def show_url(self) -> str:
508
+ return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0])
509
+
510
+ @property
511
+ def is_file(self) -> bool:
512
+ return self.scheme == "file"
513
+
514
+ def is_existing_dir(self) -> bool:
515
+ return self.is_file and os.path.isdir(self.file_path)
516
+
517
+ @property
518
+ def is_wheel(self) -> bool:
519
+ return self.ext == WHEEL_EXTENSION
520
+
521
+ @property
522
+ def is_vcs(self) -> bool:
523
+ from pip._internal.vcs import vcs
524
+
525
+ return self.scheme in vcs.all_schemes
526
+
527
+ @property
528
+ def is_yanked(self) -> bool:
529
+ return self.yanked_reason is not None
530
+
531
+ @property
532
+ def has_hash(self) -> bool:
533
+ return bool(self._hashes)
534
+
535
+ def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
536
+ """
537
+ Return True if the link has a hash and it is allowed by `hashes`.
538
+ """
539
+ if hashes is None:
540
+ return False
541
+ return any(hashes.is_hash_allowed(k, v) for k, v in self._hashes.items())
542
+
543
+
544
+ class _CleanResult(NamedTuple):
545
+ """Convert link for equivalency check.
546
+
547
+ This is used in the resolver to check whether two URL-specified requirements
548
+ likely point to the same distribution and can be considered equivalent. This
549
+ equivalency logic avoids comparing URLs literally, which can be too strict
550
+ (e.g. "a=1&b=2" vs "b=2&a=1") and produce conflicts unexpecting to users.
551
+
552
+ Currently this does three things:
553
+
554
+ 1. Drop the basic auth part. This is technically wrong since a server can
555
+ serve different content based on auth, but if it does that, it is even
556
+ impossible to guarantee two URLs without auth are equivalent, since
557
+ the user can input different auth information when prompted. So the
558
+ practical solution is to assume the auth doesn't affect the response.
559
+ 2. Parse the query to avoid the ordering issue. Note that ordering under the
560
+ same key in the query are NOT cleaned; i.e. "a=1&a=2" and "a=2&a=1" are
561
+ still considered different.
562
+ 3. Explicitly drop most of the fragment part, except ``subdirectory=`` and
563
+ hash values, since it should have no impact the downloaded content. Note
564
+ that this drops the "egg=" part historically used to denote the requested
565
+ project (and extras), which is wrong in the strictest sense, but too many
566
+ people are supplying it inconsistently to cause superfluous resolution
567
+ conflicts, so we choose to also ignore them.
568
+ """
569
+
570
+ parsed: urllib.parse.SplitResult
571
+ query: Dict[str, List[str]]
572
+ subdirectory: str
573
+ hashes: Dict[str, str]
574
+
575
+
576
+ def _clean_link(link: Link) -> _CleanResult:
577
+ parsed = link._parsed_url
578
+ netloc = parsed.netloc.rsplit("@", 1)[-1]
579
+ # According to RFC 8089, an empty host in file: means localhost.
580
+ if parsed.scheme == "file" and not netloc:
581
+ netloc = "localhost"
582
+ fragment = urllib.parse.parse_qs(parsed.fragment)
583
+ if "egg" in fragment:
584
+ logger.debug("Ignoring egg= fragment in %s", link)
585
+ try:
586
+ # If there are multiple subdirectory values, use the first one.
587
+ # This matches the behavior of Link.subdirectory_fragment.
588
+ subdirectory = fragment["subdirectory"][0]
589
+ except (IndexError, KeyError):
590
+ subdirectory = ""
591
+ # If there are multiple hash values under the same algorithm, use the
592
+ # first one. This matches the behavior of Link.hash_value.
593
+ hashes = {k: fragment[k][0] for k in _SUPPORTED_HASHES if k in fragment}
594
+ return _CleanResult(
595
+ parsed=parsed._replace(netloc=netloc, query="", fragment=""),
596
+ query=urllib.parse.parse_qs(parsed.query),
597
+ subdirectory=subdirectory,
598
+ hashes=hashes,
599
+ )
600
+
601
+
602
+ @functools.lru_cache(maxsize=None)
603
+ def links_equivalent(link1: Link, link2: Link) -> bool:
604
+ return _clean_link(link1) == _clean_link(link2)
vllm/lib/python3.10/site-packages/pip/_internal/models/target_python.py ADDED
@@ -0,0 +1,121 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ from typing import List, Optional, Set, Tuple
3
+
4
+ from pip._vendor.packaging.tags import Tag
5
+
6
+ from pip._internal.utils.compatibility_tags import get_supported, version_info_to_nodot
7
+ from pip._internal.utils.misc import normalize_version_info
8
+
9
+
10
+ class TargetPython:
11
+ """
12
+ Encapsulates the properties of a Python interpreter one is targeting
13
+ for a package install, download, etc.
14
+ """
15
+
16
+ __slots__ = [
17
+ "_given_py_version_info",
18
+ "abis",
19
+ "implementation",
20
+ "platforms",
21
+ "py_version",
22
+ "py_version_info",
23
+ "_valid_tags",
24
+ "_valid_tags_set",
25
+ ]
26
+
27
+ def __init__(
28
+ self,
29
+ platforms: Optional[List[str]] = None,
30
+ py_version_info: Optional[Tuple[int, ...]] = None,
31
+ abis: Optional[List[str]] = None,
32
+ implementation: Optional[str] = None,
33
+ ) -> None:
34
+ """
35
+ :param platforms: A list of strings or None. If None, searches for
36
+ packages that are supported by the current system. Otherwise, will
37
+ find packages that can be built on the platforms passed in. These
38
+ packages will only be downloaded for distribution: they will
39
+ not be built locally.
40
+ :param py_version_info: An optional tuple of ints representing the
41
+ Python version information to use (e.g. `sys.version_info[:3]`).
42
+ This can have length 1, 2, or 3 when provided.
43
+ :param abis: A list of strings or None. This is passed to
44
+ compatibility_tags.py's get_supported() function as is.
45
+ :param implementation: A string or None. This is passed to
46
+ compatibility_tags.py's get_supported() function as is.
47
+ """
48
+ # Store the given py_version_info for when we call get_supported().
49
+ self._given_py_version_info = py_version_info
50
+
51
+ if py_version_info is None:
52
+ py_version_info = sys.version_info[:3]
53
+ else:
54
+ py_version_info = normalize_version_info(py_version_info)
55
+
56
+ py_version = ".".join(map(str, py_version_info[:2]))
57
+
58
+ self.abis = abis
59
+ self.implementation = implementation
60
+ self.platforms = platforms
61
+ self.py_version = py_version
62
+ self.py_version_info = py_version_info
63
+
64
+ # This is used to cache the return value of get_(un)sorted_tags.
65
+ self._valid_tags: Optional[List[Tag]] = None
66
+ self._valid_tags_set: Optional[Set[Tag]] = None
67
+
68
+ def format_given(self) -> str:
69
+ """
70
+ Format the given, non-None attributes for display.
71
+ """
72
+ display_version = None
73
+ if self._given_py_version_info is not None:
74
+ display_version = ".".join(
75
+ str(part) for part in self._given_py_version_info
76
+ )
77
+
78
+ key_values = [
79
+ ("platforms", self.platforms),
80
+ ("version_info", display_version),
81
+ ("abis", self.abis),
82
+ ("implementation", self.implementation),
83
+ ]
84
+ return " ".join(
85
+ f"{key}={value!r}" for key, value in key_values if value is not None
86
+ )
87
+
88
+ def get_sorted_tags(self) -> List[Tag]:
89
+ """
90
+ Return the supported PEP 425 tags to check wheel candidates against.
91
+
92
+ The tags are returned in order of preference (most preferred first).
93
+ """
94
+ if self._valid_tags is None:
95
+ # Pass versions=None if no py_version_info was given since
96
+ # versions=None uses special default logic.
97
+ py_version_info = self._given_py_version_info
98
+ if py_version_info is None:
99
+ version = None
100
+ else:
101
+ version = version_info_to_nodot(py_version_info)
102
+
103
+ tags = get_supported(
104
+ version=version,
105
+ platforms=self.platforms,
106
+ abis=self.abis,
107
+ impl=self.implementation,
108
+ )
109
+ self._valid_tags = tags
110
+
111
+ return self._valid_tags
112
+
113
+ def get_unsorted_tags(self) -> Set[Tag]:
114
+ """Exactly the same as get_sorted_tags, but returns a set.
115
+
116
+ This is important for performance.
117
+ """
118
+ if self._valid_tags_set is None:
119
+ self._valid_tags_set = set(self.get_sorted_tags())
120
+
121
+ return self._valid_tags_set
vllm/lib/python3.10/site-packages/pip/_internal/operations/__init__.py ADDED
File without changes
vllm/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/check.cpython-310.pyc ADDED
Binary file (4.79 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-310.pyc ADDED
Binary file (6.32 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-310.pyc ADDED
Binary file (15.7 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__init__.py ADDED
File without changes
vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (180 Bytes). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-310.pyc ADDED
Binary file (4.95 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-310.pyc ADDED
Binary file (1.41 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-310.pyc ADDED
Binary file (1.46 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-310.pyc ADDED
Binary file (2.34 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-310.pyc ADDED
Binary file (1.2 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-310.pyc ADDED
Binary file (1.43 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-310.pyc ADDED
Binary file (2.71 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/operations/build/build_tracker.py ADDED
@@ -0,0 +1,138 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import contextlib
2
+ import hashlib
3
+ import logging
4
+ import os
5
+ from types import TracebackType
6
+ from typing import Dict, Generator, Optional, Type, Union
7
+
8
+ from pip._internal.req.req_install import InstallRequirement
9
+ from pip._internal.utils.temp_dir import TempDirectory
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ @contextlib.contextmanager
15
+ def update_env_context_manager(**changes: str) -> Generator[None, None, None]:
16
+ target = os.environ
17
+
18
+ # Save values from the target and change them.
19
+ non_existent_marker = object()
20
+ saved_values: Dict[str, Union[object, str]] = {}
21
+ for name, new_value in changes.items():
22
+ try:
23
+ saved_values[name] = target[name]
24
+ except KeyError:
25
+ saved_values[name] = non_existent_marker
26
+ target[name] = new_value
27
+
28
+ try:
29
+ yield
30
+ finally:
31
+ # Restore original values in the target.
32
+ for name, original_value in saved_values.items():
33
+ if original_value is non_existent_marker:
34
+ del target[name]
35
+ else:
36
+ assert isinstance(original_value, str) # for mypy
37
+ target[name] = original_value
38
+
39
+
40
+ @contextlib.contextmanager
41
+ def get_build_tracker() -> Generator["BuildTracker", None, None]:
42
+ root = os.environ.get("PIP_BUILD_TRACKER")
43
+ with contextlib.ExitStack() as ctx:
44
+ if root is None:
45
+ root = ctx.enter_context(TempDirectory(kind="build-tracker")).path
46
+ ctx.enter_context(update_env_context_manager(PIP_BUILD_TRACKER=root))
47
+ logger.debug("Initialized build tracking at %s", root)
48
+
49
+ with BuildTracker(root) as tracker:
50
+ yield tracker
51
+
52
+
53
+ class TrackerId(str):
54
+ """Uniquely identifying string provided to the build tracker."""
55
+
56
+
57
+ class BuildTracker:
58
+ """Ensure that an sdist cannot request itself as a setup requirement.
59
+
60
+ When an sdist is prepared, it identifies its setup requirements in the
61
+ context of ``BuildTracker.track()``. If a requirement shows up recursively, this
62
+ raises an exception.
63
+
64
+ This stops fork bombs embedded in malicious packages."""
65
+
66
+ def __init__(self, root: str) -> None:
67
+ self._root = root
68
+ self._entries: Dict[TrackerId, InstallRequirement] = {}
69
+ logger.debug("Created build tracker: %s", self._root)
70
+
71
+ def __enter__(self) -> "BuildTracker":
72
+ logger.debug("Entered build tracker: %s", self._root)
73
+ return self
74
+
75
+ def __exit__(
76
+ self,
77
+ exc_type: Optional[Type[BaseException]],
78
+ exc_val: Optional[BaseException],
79
+ exc_tb: Optional[TracebackType],
80
+ ) -> None:
81
+ self.cleanup()
82
+
83
+ def _entry_path(self, key: TrackerId) -> str:
84
+ hashed = hashlib.sha224(key.encode()).hexdigest()
85
+ return os.path.join(self._root, hashed)
86
+
87
+ def add(self, req: InstallRequirement, key: TrackerId) -> None:
88
+ """Add an InstallRequirement to build tracking."""
89
+
90
+ # Get the file to write information about this requirement.
91
+ entry_path = self._entry_path(key)
92
+
93
+ # Try reading from the file. If it exists and can be read from, a build
94
+ # is already in progress, so a LookupError is raised.
95
+ try:
96
+ with open(entry_path) as fp:
97
+ contents = fp.read()
98
+ except FileNotFoundError:
99
+ pass
100
+ else:
101
+ message = f"{req.link} is already being built: {contents}"
102
+ raise LookupError(message)
103
+
104
+ # If we're here, req should really not be building already.
105
+ assert key not in self._entries
106
+
107
+ # Start tracking this requirement.
108
+ with open(entry_path, "w", encoding="utf-8") as fp:
109
+ fp.write(str(req))
110
+ self._entries[key] = req
111
+
112
+ logger.debug("Added %s to build tracker %r", req, self._root)
113
+
114
+ def remove(self, req: InstallRequirement, key: TrackerId) -> None:
115
+ """Remove an InstallRequirement from build tracking."""
116
+
117
+ # Delete the created file and the corresponding entry.
118
+ os.unlink(self._entry_path(key))
119
+ del self._entries[key]
120
+
121
+ logger.debug("Removed %s from build tracker %r", req, self._root)
122
+
123
+ def cleanup(self) -> None:
124
+ for key, req in list(self._entries.items()):
125
+ self.remove(req, key)
126
+
127
+ logger.debug("Removed build tracker: %r", self._root)
128
+
129
+ @contextlib.contextmanager
130
+ def track(self, req: InstallRequirement, key: str) -> Generator[None, None, None]:
131
+ """Ensure that `key` cannot install itself as a setup requirement.
132
+
133
+ :raises LookupError: If `key` was already provided in a parent invocation of
134
+ the context introduced by this method."""
135
+ tracker_id = TrackerId(key)
136
+ self.add(req, tracker_id)
137
+ yield
138
+ self.remove(req, tracker_id)
vllm/lib/python3.10/site-packages/pip/_internal/operations/build/metadata.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Metadata generation logic for source distributions.
2
+ """
3
+
4
+ import os
5
+
6
+ from pip._vendor.pyproject_hooks import BuildBackendHookCaller
7
+
8
+ from pip._internal.build_env import BuildEnvironment
9
+ from pip._internal.exceptions import (
10
+ InstallationSubprocessError,
11
+ MetadataGenerationFailed,
12
+ )
13
+ from pip._internal.utils.subprocess import runner_with_spinner_message
14
+ from pip._internal.utils.temp_dir import TempDirectory
15
+
16
+
17
+ def generate_metadata(
18
+ build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str
19
+ ) -> str:
20
+ """Generate metadata using mechanisms described in PEP 517.
21
+
22
+ Returns the generated metadata directory.
23
+ """
24
+ metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
25
+
26
+ metadata_dir = metadata_tmpdir.path
27
+
28
+ with build_env:
29
+ # Note that BuildBackendHookCaller implements a fallback for
30
+ # prepare_metadata_for_build_wheel, so we don't have to
31
+ # consider the possibility that this hook doesn't exist.
32
+ runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)")
33
+ with backend.subprocess_runner(runner):
34
+ try:
35
+ distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir)
36
+ except InstallationSubprocessError as error:
37
+ raise MetadataGenerationFailed(package_details=details) from error
38
+
39
+ return os.path.join(metadata_dir, distinfo_dir)
vllm/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_editable.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Metadata generation logic for source distributions.
2
+ """
3
+
4
+ import os
5
+
6
+ from pip._vendor.pyproject_hooks import BuildBackendHookCaller
7
+
8
+ from pip._internal.build_env import BuildEnvironment
9
+ from pip._internal.exceptions import (
10
+ InstallationSubprocessError,
11
+ MetadataGenerationFailed,
12
+ )
13
+ from pip._internal.utils.subprocess import runner_with_spinner_message
14
+ from pip._internal.utils.temp_dir import TempDirectory
15
+
16
+
17
+ def generate_editable_metadata(
18
+ build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str
19
+ ) -> str:
20
+ """Generate metadata using mechanisms described in PEP 660.
21
+
22
+ Returns the generated metadata directory.
23
+ """
24
+ metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
25
+
26
+ metadata_dir = metadata_tmpdir.path
27
+
28
+ with build_env:
29
+ # Note that BuildBackendHookCaller implements a fallback for
30
+ # prepare_metadata_for_build_wheel/editable, so we don't have to
31
+ # consider the possibility that this hook doesn't exist.
32
+ runner = runner_with_spinner_message(
33
+ "Preparing editable metadata (pyproject.toml)"
34
+ )
35
+ with backend.subprocess_runner(runner):
36
+ try:
37
+ distinfo_dir = backend.prepare_metadata_for_build_editable(metadata_dir)
38
+ except InstallationSubprocessError as error:
39
+ raise MetadataGenerationFailed(package_details=details) from error
40
+
41
+ assert distinfo_dir is not None
42
+ return os.path.join(metadata_dir, distinfo_dir)
vllm/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_legacy.py ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Metadata generation logic for legacy source distributions.
2
+ """
3
+
4
+ import logging
5
+ import os
6
+
7
+ from pip._internal.build_env import BuildEnvironment
8
+ from pip._internal.cli.spinners import open_spinner
9
+ from pip._internal.exceptions import (
10
+ InstallationError,
11
+ InstallationSubprocessError,
12
+ MetadataGenerationFailed,
13
+ )
14
+ from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
15
+ from pip._internal.utils.subprocess import call_subprocess
16
+ from pip._internal.utils.temp_dir import TempDirectory
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ def _find_egg_info(directory: str) -> str:
22
+ """Find an .egg-info subdirectory in `directory`."""
23
+ filenames = [f for f in os.listdir(directory) if f.endswith(".egg-info")]
24
+
25
+ if not filenames:
26
+ raise InstallationError(f"No .egg-info directory found in {directory}")
27
+
28
+ if len(filenames) > 1:
29
+ raise InstallationError(
30
+ f"More than one .egg-info directory found in {directory}"
31
+ )
32
+
33
+ return os.path.join(directory, filenames[0])
34
+
35
+
36
+ def generate_metadata(
37
+ build_env: BuildEnvironment,
38
+ setup_py_path: str,
39
+ source_dir: str,
40
+ isolated: bool,
41
+ details: str,
42
+ ) -> str:
43
+ """Generate metadata using setup.py-based defacto mechanisms.
44
+
45
+ Returns the generated metadata directory.
46
+ """
47
+ logger.debug(
48
+ "Running setup.py (path:%s) egg_info for package %s",
49
+ setup_py_path,
50
+ details,
51
+ )
52
+
53
+ egg_info_dir = TempDirectory(kind="pip-egg-info", globally_managed=True).path
54
+
55
+ args = make_setuptools_egg_info_args(
56
+ setup_py_path,
57
+ egg_info_dir=egg_info_dir,
58
+ no_user_config=isolated,
59
+ )
60
+
61
+ with build_env:
62
+ with open_spinner("Preparing metadata (setup.py)") as spinner:
63
+ try:
64
+ call_subprocess(
65
+ args,
66
+ cwd=source_dir,
67
+ command_desc="python setup.py egg_info",
68
+ spinner=spinner,
69
+ )
70
+ except InstallationSubprocessError as error:
71
+ raise MetadataGenerationFailed(package_details=details) from error
72
+
73
+ # Return the .egg-info directory.
74
+ return _find_egg_info(egg_info_dir)
vllm/lib/python3.10/site-packages/pip/_internal/operations/build/wheel.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os
3
+ from typing import Optional
4
+
5
+ from pip._vendor.pyproject_hooks import BuildBackendHookCaller
6
+
7
+ from pip._internal.utils.subprocess import runner_with_spinner_message
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ def build_wheel_pep517(
13
+ name: str,
14
+ backend: BuildBackendHookCaller,
15
+ metadata_directory: str,
16
+ tempd: str,
17
+ ) -> Optional[str]:
18
+ """Build one InstallRequirement using the PEP 517 build process.
19
+
20
+ Returns path to wheel if successfully built. Otherwise, returns None.
21
+ """
22
+ assert metadata_directory is not None
23
+ try:
24
+ logger.debug("Destination directory: %s", tempd)
25
+
26
+ runner = runner_with_spinner_message(
27
+ f"Building wheel for {name} (pyproject.toml)"
28
+ )
29
+ with backend.subprocess_runner(runner):
30
+ wheel_name = backend.build_wheel(
31
+ tempd,
32
+ metadata_directory=metadata_directory,
33
+ )
34
+ except Exception:
35
+ logger.error("Failed building wheel for %s", name)
36
+ return None
37
+ return os.path.join(tempd, wheel_name)
vllm/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_editable.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os
3
+ from typing import Optional
4
+
5
+ from pip._vendor.pyproject_hooks import BuildBackendHookCaller, HookMissing
6
+
7
+ from pip._internal.utils.subprocess import runner_with_spinner_message
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ def build_wheel_editable(
13
+ name: str,
14
+ backend: BuildBackendHookCaller,
15
+ metadata_directory: str,
16
+ tempd: str,
17
+ ) -> Optional[str]:
18
+ """Build one InstallRequirement using the PEP 660 build process.
19
+
20
+ Returns path to wheel if successfully built. Otherwise, returns None.
21
+ """
22
+ assert metadata_directory is not None
23
+ try:
24
+ logger.debug("Destination directory: %s", tempd)
25
+
26
+ runner = runner_with_spinner_message(
27
+ f"Building editable for {name} (pyproject.toml)"
28
+ )
29
+ with backend.subprocess_runner(runner):
30
+ try:
31
+ wheel_name = backend.build_editable(
32
+ tempd,
33
+ metadata_directory=metadata_directory,
34
+ )
35
+ except HookMissing as e:
36
+ logger.error(
37
+ "Cannot build editable %s because the build "
38
+ "backend does not have the %s hook",
39
+ name,
40
+ e,
41
+ )
42
+ return None
43
+ except Exception:
44
+ logger.error("Failed building editable for %s", name)
45
+ return None
46
+ return os.path.join(tempd, wheel_name)
vllm/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_legacy.py ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os.path
3
+ from typing import List, Optional
4
+
5
+ from pip._internal.cli.spinners import open_spinner
6
+ from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args
7
+ from pip._internal.utils.subprocess import call_subprocess, format_command_args
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ def format_command_result(
13
+ command_args: List[str],
14
+ command_output: str,
15
+ ) -> str:
16
+ """Format command information for logging."""
17
+ command_desc = format_command_args(command_args)
18
+ text = f"Command arguments: {command_desc}\n"
19
+
20
+ if not command_output:
21
+ text += "Command output: None"
22
+ elif logger.getEffectiveLevel() > logging.DEBUG:
23
+ text += "Command output: [use --verbose to show]"
24
+ else:
25
+ if not command_output.endswith("\n"):
26
+ command_output += "\n"
27
+ text += f"Command output:\n{command_output}"
28
+
29
+ return text
30
+
31
+
32
+ def get_legacy_build_wheel_path(
33
+ names: List[str],
34
+ temp_dir: str,
35
+ name: str,
36
+ command_args: List[str],
37
+ command_output: str,
38
+ ) -> Optional[str]:
39
+ """Return the path to the wheel in the temporary build directory."""
40
+ # Sort for determinism.
41
+ names = sorted(names)
42
+ if not names:
43
+ msg = f"Legacy build of wheel for {name!r} created no files.\n"
44
+ msg += format_command_result(command_args, command_output)
45
+ logger.warning(msg)
46
+ return None
47
+
48
+ if len(names) > 1:
49
+ msg = (
50
+ f"Legacy build of wheel for {name!r} created more than one file.\n"
51
+ f"Filenames (choosing first): {names}\n"
52
+ )
53
+ msg += format_command_result(command_args, command_output)
54
+ logger.warning(msg)
55
+
56
+ return os.path.join(temp_dir, names[0])
57
+
58
+
59
+ def build_wheel_legacy(
60
+ name: str,
61
+ setup_py_path: str,
62
+ source_dir: str,
63
+ global_options: List[str],
64
+ build_options: List[str],
65
+ tempd: str,
66
+ ) -> Optional[str]:
67
+ """Build one unpacked package using the "legacy" build process.
68
+
69
+ Returns path to wheel if successfully built. Otherwise, returns None.
70
+ """
71
+ wheel_args = make_setuptools_bdist_wheel_args(
72
+ setup_py_path,
73
+ global_options=global_options,
74
+ build_options=build_options,
75
+ destination_dir=tempd,
76
+ )
77
+
78
+ spin_message = f"Building wheel for {name} (setup.py)"
79
+ with open_spinner(spin_message) as spinner:
80
+ logger.debug("Destination directory: %s", tempd)
81
+
82
+ try:
83
+ output = call_subprocess(
84
+ wheel_args,
85
+ command_desc="python setup.py bdist_wheel",
86
+ cwd=source_dir,
87
+ spinner=spinner,
88
+ )
89
+ except Exception:
90
+ spinner.finish("error")
91
+ logger.error("Failed building wheel for %s", name)
92
+ return None
93
+
94
+ names = os.listdir(tempd)
95
+ wheel_path = get_legacy_build_wheel_path(
96
+ names=names,
97
+ temp_dir=tempd,
98
+ name=name,
99
+ command_args=wheel_args,
100
+ command_output=output,
101
+ )
102
+ return wheel_path
vllm/lib/python3.10/site-packages/pip/_internal/operations/check.py ADDED
@@ -0,0 +1,181 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Validation of dependencies of packages
2
+ """
3
+
4
+ import logging
5
+ from contextlib import suppress
6
+ from email.parser import Parser
7
+ from functools import reduce
8
+ from typing import (
9
+ Callable,
10
+ Dict,
11
+ FrozenSet,
12
+ Generator,
13
+ Iterable,
14
+ List,
15
+ NamedTuple,
16
+ Optional,
17
+ Set,
18
+ Tuple,
19
+ )
20
+
21
+ from pip._vendor.packaging.requirements import Requirement
22
+ from pip._vendor.packaging.tags import Tag, parse_tag
23
+ from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
24
+ from pip._vendor.packaging.version import Version
25
+
26
+ from pip._internal.distributions import make_distribution_for_install_requirement
27
+ from pip._internal.metadata import get_default_environment
28
+ from pip._internal.metadata.base import BaseDistribution
29
+ from pip._internal.req.req_install import InstallRequirement
30
+
31
+ logger = logging.getLogger(__name__)
32
+
33
+
34
+ class PackageDetails(NamedTuple):
35
+ version: Version
36
+ dependencies: List[Requirement]
37
+
38
+
39
+ # Shorthands
40
+ PackageSet = Dict[NormalizedName, PackageDetails]
41
+ Missing = Tuple[NormalizedName, Requirement]
42
+ Conflicting = Tuple[NormalizedName, Version, Requirement]
43
+
44
+ MissingDict = Dict[NormalizedName, List[Missing]]
45
+ ConflictingDict = Dict[NormalizedName, List[Conflicting]]
46
+ CheckResult = Tuple[MissingDict, ConflictingDict]
47
+ ConflictDetails = Tuple[PackageSet, CheckResult]
48
+
49
+
50
+ def create_package_set_from_installed() -> Tuple[PackageSet, bool]:
51
+ """Converts a list of distributions into a PackageSet."""
52
+ package_set = {}
53
+ problems = False
54
+ env = get_default_environment()
55
+ for dist in env.iter_installed_distributions(local_only=False, skip=()):
56
+ name = dist.canonical_name
57
+ try:
58
+ dependencies = list(dist.iter_dependencies())
59
+ package_set[name] = PackageDetails(dist.version, dependencies)
60
+ except (OSError, ValueError) as e:
61
+ # Don't crash on unreadable or broken metadata.
62
+ logger.warning("Error parsing dependencies of %s: %s", name, e)
63
+ problems = True
64
+ return package_set, problems
65
+
66
+
67
+ def check_package_set(
68
+ package_set: PackageSet, should_ignore: Optional[Callable[[str], bool]] = None
69
+ ) -> CheckResult:
70
+ """Check if a package set is consistent
71
+
72
+ If should_ignore is passed, it should be a callable that takes a
73
+ package name and returns a boolean.
74
+ """
75
+
76
+ missing = {}
77
+ conflicting = {}
78
+
79
+ for package_name, package_detail in package_set.items():
80
+ # Info about dependencies of package_name
81
+ missing_deps: Set[Missing] = set()
82
+ conflicting_deps: Set[Conflicting] = set()
83
+
84
+ if should_ignore and should_ignore(package_name):
85
+ continue
86
+
87
+ for req in package_detail.dependencies:
88
+ name = canonicalize_name(req.name)
89
+
90
+ # Check if it's missing
91
+ if name not in package_set:
92
+ missed = True
93
+ if req.marker is not None:
94
+ missed = req.marker.evaluate({"extra": ""})
95
+ if missed:
96
+ missing_deps.add((name, req))
97
+ continue
98
+
99
+ # Check if there's a conflict
100
+ version = package_set[name].version
101
+ if not req.specifier.contains(version, prereleases=True):
102
+ conflicting_deps.add((name, version, req))
103
+
104
+ if missing_deps:
105
+ missing[package_name] = sorted(missing_deps, key=str)
106
+ if conflicting_deps:
107
+ conflicting[package_name] = sorted(conflicting_deps, key=str)
108
+
109
+ return missing, conflicting
110
+
111
+
112
+ def check_install_conflicts(to_install: List[InstallRequirement]) -> ConflictDetails:
113
+ """For checking if the dependency graph would be consistent after \
114
+ installing given requirements
115
+ """
116
+ # Start from the current state
117
+ package_set, _ = create_package_set_from_installed()
118
+ # Install packages
119
+ would_be_installed = _simulate_installation_of(to_install, package_set)
120
+
121
+ # Only warn about directly-dependent packages; create a whitelist of them
122
+ whitelist = _create_whitelist(would_be_installed, package_set)
123
+
124
+ return (
125
+ package_set,
126
+ check_package_set(
127
+ package_set, should_ignore=lambda name: name not in whitelist
128
+ ),
129
+ )
130
+
131
+
132
+ def check_unsupported(
133
+ packages: Iterable[BaseDistribution],
134
+ supported_tags: Iterable[Tag],
135
+ ) -> Generator[BaseDistribution, None, None]:
136
+ for p in packages:
137
+ with suppress(FileNotFoundError):
138
+ wheel_file = p.read_text("WHEEL")
139
+ wheel_tags: FrozenSet[Tag] = reduce(
140
+ frozenset.union,
141
+ map(parse_tag, Parser().parsestr(wheel_file).get_all("Tag", [])),
142
+ frozenset(),
143
+ )
144
+ if wheel_tags.isdisjoint(supported_tags):
145
+ yield p
146
+
147
+
148
+ def _simulate_installation_of(
149
+ to_install: List[InstallRequirement], package_set: PackageSet
150
+ ) -> Set[NormalizedName]:
151
+ """Computes the version of packages after installing to_install."""
152
+ # Keep track of packages that were installed
153
+ installed = set()
154
+
155
+ # Modify it as installing requirement_set would (assuming no errors)
156
+ for inst_req in to_install:
157
+ abstract_dist = make_distribution_for_install_requirement(inst_req)
158
+ dist = abstract_dist.get_metadata_distribution()
159
+ name = dist.canonical_name
160
+ package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies()))
161
+
162
+ installed.add(name)
163
+
164
+ return installed
165
+
166
+
167
+ def _create_whitelist(
168
+ would_be_installed: Set[NormalizedName], package_set: PackageSet
169
+ ) -> Set[NormalizedName]:
170
+ packages_affected = set(would_be_installed)
171
+
172
+ for package_name in package_set:
173
+ if package_name in packages_affected:
174
+ continue
175
+
176
+ for req in package_set[package_name].dependencies:
177
+ if canonicalize_name(req.name) in packages_affected:
178
+ packages_affected.add(package_name)
179
+ break
180
+
181
+ return packages_affected
vllm/lib/python3.10/site-packages/pip/_internal/operations/freeze.py ADDED
@@ -0,0 +1,256 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import collections
2
+ import logging
3
+ import os
4
+ from dataclasses import dataclass, field
5
+ from typing import Container, Dict, Generator, Iterable, List, NamedTuple, Optional, Set
6
+
7
+ from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
8
+ from pip._vendor.packaging.version import InvalidVersion
9
+
10
+ from pip._internal.exceptions import BadCommand, InstallationError
11
+ from pip._internal.metadata import BaseDistribution, get_environment
12
+ from pip._internal.req.constructors import (
13
+ install_req_from_editable,
14
+ install_req_from_line,
15
+ )
16
+ from pip._internal.req.req_file import COMMENT_RE
17
+ from pip._internal.utils.direct_url_helpers import direct_url_as_pep440_direct_reference
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+
22
+ class _EditableInfo(NamedTuple):
23
+ requirement: str
24
+ comments: List[str]
25
+
26
+
27
+ def freeze(
28
+ requirement: Optional[List[str]] = None,
29
+ local_only: bool = False,
30
+ user_only: bool = False,
31
+ paths: Optional[List[str]] = None,
32
+ isolated: bool = False,
33
+ exclude_editable: bool = False,
34
+ skip: Container[str] = (),
35
+ ) -> Generator[str, None, None]:
36
+ installations: Dict[str, FrozenRequirement] = {}
37
+
38
+ dists = get_environment(paths).iter_installed_distributions(
39
+ local_only=local_only,
40
+ skip=(),
41
+ user_only=user_only,
42
+ )
43
+ for dist in dists:
44
+ req = FrozenRequirement.from_dist(dist)
45
+ if exclude_editable and req.editable:
46
+ continue
47
+ installations[req.canonical_name] = req
48
+
49
+ if requirement:
50
+ # the options that don't get turned into an InstallRequirement
51
+ # should only be emitted once, even if the same option is in multiple
52
+ # requirements files, so we need to keep track of what has been emitted
53
+ # so that we don't emit it again if it's seen again
54
+ emitted_options: Set[str] = set()
55
+ # keep track of which files a requirement is in so that we can
56
+ # give an accurate warning if a requirement appears multiple times.
57
+ req_files: Dict[str, List[str]] = collections.defaultdict(list)
58
+ for req_file_path in requirement:
59
+ with open(req_file_path) as req_file:
60
+ for line in req_file:
61
+ if (
62
+ not line.strip()
63
+ or line.strip().startswith("#")
64
+ or line.startswith(
65
+ (
66
+ "-r",
67
+ "--requirement",
68
+ "-f",
69
+ "--find-links",
70
+ "-i",
71
+ "--index-url",
72
+ "--pre",
73
+ "--trusted-host",
74
+ "--process-dependency-links",
75
+ "--extra-index-url",
76
+ "--use-feature",
77
+ )
78
+ )
79
+ ):
80
+ line = line.rstrip()
81
+ if line not in emitted_options:
82
+ emitted_options.add(line)
83
+ yield line
84
+ continue
85
+
86
+ if line.startswith("-e") or line.startswith("--editable"):
87
+ if line.startswith("-e"):
88
+ line = line[2:].strip()
89
+ else:
90
+ line = line[len("--editable") :].strip().lstrip("=")
91
+ line_req = install_req_from_editable(
92
+ line,
93
+ isolated=isolated,
94
+ )
95
+ else:
96
+ line_req = install_req_from_line(
97
+ COMMENT_RE.sub("", line).strip(),
98
+ isolated=isolated,
99
+ )
100
+
101
+ if not line_req.name:
102
+ logger.info(
103
+ "Skipping line in requirement file [%s] because "
104
+ "it's not clear what it would install: %s",
105
+ req_file_path,
106
+ line.strip(),
107
+ )
108
+ logger.info(
109
+ " (add #egg=PackageName to the URL to avoid"
110
+ " this warning)"
111
+ )
112
+ else:
113
+ line_req_canonical_name = canonicalize_name(line_req.name)
114
+ if line_req_canonical_name not in installations:
115
+ # either it's not installed, or it is installed
116
+ # but has been processed already
117
+ if not req_files[line_req.name]:
118
+ logger.warning(
119
+ "Requirement file [%s] contains %s, but "
120
+ "package %r is not installed",
121
+ req_file_path,
122
+ COMMENT_RE.sub("", line).strip(),
123
+ line_req.name,
124
+ )
125
+ else:
126
+ req_files[line_req.name].append(req_file_path)
127
+ else:
128
+ yield str(installations[line_req_canonical_name]).rstrip()
129
+ del installations[line_req_canonical_name]
130
+ req_files[line_req.name].append(req_file_path)
131
+
132
+ # Warn about requirements that were included multiple times (in a
133
+ # single requirements file or in different requirements files).
134
+ for name, files in req_files.items():
135
+ if len(files) > 1:
136
+ logger.warning(
137
+ "Requirement %s included multiple times [%s]",
138
+ name,
139
+ ", ".join(sorted(set(files))),
140
+ )
141
+
142
+ yield ("## The following requirements were added by pip freeze:")
143
+ for installation in sorted(installations.values(), key=lambda x: x.name.lower()):
144
+ if installation.canonical_name not in skip:
145
+ yield str(installation).rstrip()
146
+
147
+
148
+ def _format_as_name_version(dist: BaseDistribution) -> str:
149
+ try:
150
+ dist_version = dist.version
151
+ except InvalidVersion:
152
+ # legacy version
153
+ return f"{dist.raw_name}==={dist.raw_version}"
154
+ else:
155
+ return f"{dist.raw_name}=={dist_version}"
156
+
157
+
158
+ def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:
159
+ """
160
+ Compute and return values (req, comments) for use in
161
+ FrozenRequirement.from_dist().
162
+ """
163
+ editable_project_location = dist.editable_project_location
164
+ assert editable_project_location
165
+ location = os.path.normcase(os.path.abspath(editable_project_location))
166
+
167
+ from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs
168
+
169
+ vcs_backend = vcs.get_backend_for_dir(location)
170
+
171
+ if vcs_backend is None:
172
+ display = _format_as_name_version(dist)
173
+ logger.debug(
174
+ 'No VCS found for editable requirement "%s" in: %r',
175
+ display,
176
+ location,
177
+ )
178
+ return _EditableInfo(
179
+ requirement=location,
180
+ comments=[f"# Editable install with no version control ({display})"],
181
+ )
182
+
183
+ vcs_name = type(vcs_backend).__name__
184
+
185
+ try:
186
+ req = vcs_backend.get_src_requirement(location, dist.raw_name)
187
+ except RemoteNotFoundError:
188
+ display = _format_as_name_version(dist)
189
+ return _EditableInfo(
190
+ requirement=location,
191
+ comments=[f"# Editable {vcs_name} install with no remote ({display})"],
192
+ )
193
+ except RemoteNotValidError as ex:
194
+ display = _format_as_name_version(dist)
195
+ return _EditableInfo(
196
+ requirement=location,
197
+ comments=[
198
+ f"# Editable {vcs_name} install ({display}) with either a deleted "
199
+ f"local remote or invalid URI:",
200
+ f"# '{ex.url}'",
201
+ ],
202
+ )
203
+ except BadCommand:
204
+ logger.warning(
205
+ "cannot determine version of editable source in %s "
206
+ "(%s command not found in path)",
207
+ location,
208
+ vcs_backend.name,
209
+ )
210
+ return _EditableInfo(requirement=location, comments=[])
211
+ except InstallationError as exc:
212
+ logger.warning("Error when trying to get requirement for VCS system %s", exc)
213
+ else:
214
+ return _EditableInfo(requirement=req, comments=[])
215
+
216
+ logger.warning("Could not determine repository location of %s", location)
217
+
218
+ return _EditableInfo(
219
+ requirement=location,
220
+ comments=["## !! Could not determine repository location"],
221
+ )
222
+
223
+
224
+ @dataclass(frozen=True)
225
+ class FrozenRequirement:
226
+ name: str
227
+ req: str
228
+ editable: bool
229
+ comments: Iterable[str] = field(default_factory=tuple)
230
+
231
+ @property
232
+ def canonical_name(self) -> NormalizedName:
233
+ return canonicalize_name(self.name)
234
+
235
+ @classmethod
236
+ def from_dist(cls, dist: BaseDistribution) -> "FrozenRequirement":
237
+ editable = dist.editable
238
+ if editable:
239
+ req, comments = _get_editable_info(dist)
240
+ else:
241
+ comments = []
242
+ direct_url = dist.direct_url
243
+ if direct_url:
244
+ # if PEP 610 metadata is present, use it
245
+ req = direct_url_as_pep440_direct_reference(direct_url, dist.raw_name)
246
+ else:
247
+ # name==version requirement
248
+ req = _format_as_name_version(dist)
249
+
250
+ return cls(dist.raw_name, req, editable, comments=comments)
251
+
252
+ def __str__(self) -> str:
253
+ req = self.req
254
+ if self.editable:
255
+ req = f"-e {req}"
256
+ return "\n".join(list(self.comments) + [str(req)]) + "\n"
vllm/lib/python3.10/site-packages/pip/_internal/operations/install/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ """For modules related to installing packages.
2
+ """
vllm/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (238 Bytes). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-310.pyc ADDED
Binary file (1.47 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-310.pyc ADDED
Binary file (21.5 kB). View file
 
vllm/lib/python3.10/site-packages/pip/_internal/operations/install/editable_legacy.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Legacy editable installation process, i.e. `setup.py develop`.
2
+ """
3
+
4
+ import logging
5
+ from typing import Optional, Sequence
6
+
7
+ from pip._internal.build_env import BuildEnvironment
8
+ from pip._internal.utils.logging import indent_log
9
+ from pip._internal.utils.setuptools_build import make_setuptools_develop_args
10
+ from pip._internal.utils.subprocess import call_subprocess
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ def install_editable(
16
+ *,
17
+ global_options: Sequence[str],
18
+ prefix: Optional[str],
19
+ home: Optional[str],
20
+ use_user_site: bool,
21
+ name: str,
22
+ setup_py_path: str,
23
+ isolated: bool,
24
+ build_env: BuildEnvironment,
25
+ unpacked_source_directory: str,
26
+ ) -> None:
27
+ """Install a package in editable mode. Most arguments are pass-through
28
+ to setuptools.
29
+ """
30
+ logger.info("Running setup.py develop for %s", name)
31
+
32
+ args = make_setuptools_develop_args(
33
+ setup_py_path,
34
+ global_options=global_options,
35
+ no_user_config=isolated,
36
+ prefix=prefix,
37
+ home=home,
38
+ use_user_site=use_user_site,
39
+ )
40
+
41
+ with indent_log():
42
+ with build_env:
43
+ call_subprocess(
44
+ args,
45
+ command_desc="python setup.py develop",
46
+ cwd=unpacked_source_directory,
47
+ )