koichi12 commited on
Commit
19160d9
·
verified ·
1 Parent(s): 2614c3c

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .venv/lib/python3.11/site-packages/cffi-1.17.1.dist-info/LICENSE +26 -0
  2. .venv/lib/python3.11/site-packages/cffi-1.17.1.dist-info/METADATA +40 -0
  3. .venv/lib/python3.11/site-packages/cffi-1.17.1.dist-info/WHEEL +6 -0
  4. .venv/lib/python3.11/site-packages/pip/__init__.py +13 -0
  5. .venv/lib/python3.11/site-packages/pip/__main__.py +24 -0
  6. .venv/lib/python3.11/site-packages/pip/__pip-runner__.py +50 -0
  7. .venv/lib/python3.11/site-packages/pip/__pycache__/__init__.cpython-311.pyc +0 -0
  8. .venv/lib/python3.11/site-packages/pip/__pycache__/__main__.cpython-311.pyc +0 -0
  9. .venv/lib/python3.11/site-packages/pip/__pycache__/__pip-runner__.cpython-311.pyc +0 -0
  10. .venv/lib/python3.11/site-packages/pip/_internal/__init__.py +18 -0
  11. .venv/lib/python3.11/site-packages/pip/_internal/build_env.py +311 -0
  12. .venv/lib/python3.11/site-packages/pip/_internal/cache.py +290 -0
  13. .venv/lib/python3.11/site-packages/pip/_internal/configuration.py +383 -0
  14. .venv/lib/python3.11/site-packages/pip/_internal/exceptions.py +728 -0
  15. .venv/lib/python3.11/site-packages/pip/_internal/index/__init__.py +2 -0
  16. .venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/__init__.cpython-311.pyc +0 -0
  17. .venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/collector.cpython-311.pyc +0 -0
  18. .venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-311.pyc +0 -0
  19. .venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/sources.cpython-311.pyc +0 -0
  20. .venv/lib/python3.11/site-packages/pip/_internal/index/collector.py +507 -0
  21. .venv/lib/python3.11/site-packages/pip/_internal/index/package_finder.py +1027 -0
  22. .venv/lib/python3.11/site-packages/pip/_internal/index/sources.py +285 -0
  23. .venv/lib/python3.11/site-packages/pip/_internal/main.py +12 -0
  24. .venv/lib/python3.11/site-packages/pip/_internal/operations/__init__.py +0 -0
  25. .venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-311.pyc +0 -0
  26. .venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/check.cpython-311.pyc +0 -0
  27. .venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-311.pyc +0 -0
  28. .venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-311.pyc +0 -0
  29. .venv/lib/python3.11/site-packages/pip/_internal/operations/build/__init__.py +0 -0
  30. .venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-311.pyc +0 -0
  31. .venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-311.pyc +0 -0
  32. .venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-311.pyc +0 -0
  33. .venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-311.pyc +0 -0
  34. .venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-311.pyc +0 -0
  35. .venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-311.pyc +0 -0
  36. .venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-311.pyc +0 -0
  37. .venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-311.pyc +0 -0
  38. .venv/lib/python3.11/site-packages/pip/_internal/operations/build/build_tracker.py +139 -0
  39. .venv/lib/python3.11/site-packages/pip/_internal/operations/build/metadata.py +39 -0
  40. .venv/lib/python3.11/site-packages/pip/_internal/operations/build/metadata_editable.py +41 -0
  41. .venv/lib/python3.11/site-packages/pip/_internal/operations/build/metadata_legacy.py +74 -0
  42. .venv/lib/python3.11/site-packages/pip/_internal/operations/build/wheel.py +37 -0
  43. .venv/lib/python3.11/site-packages/pip/_internal/operations/build/wheel_editable.py +46 -0
  44. .venv/lib/python3.11/site-packages/pip/_internal/operations/build/wheel_legacy.py +102 -0
  45. .venv/lib/python3.11/site-packages/pip/_internal/operations/check.py +187 -0
  46. .venv/lib/python3.11/site-packages/pip/_internal/operations/freeze.py +255 -0
  47. .venv/lib/python3.11/site-packages/pip/_internal/operations/install/__init__.py +2 -0
  48. .venv/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-311.pyc +0 -0
  49. .venv/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-311.pyc +0 -0
  50. .venv/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-311.pyc +0 -0
.venv/lib/python3.11/site-packages/cffi-1.17.1.dist-info/LICENSE ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ Except when otherwise stated (look for LICENSE files in directories or
3
+ information at the beginning of each file) all software and
4
+ documentation is licensed as follows:
5
+
6
+ The MIT License
7
+
8
+ Permission is hereby granted, free of charge, to any person
9
+ obtaining a copy of this software and associated documentation
10
+ files (the "Software"), to deal in the Software without
11
+ restriction, including without limitation the rights to use,
12
+ copy, modify, merge, publish, distribute, sublicense, and/or
13
+ sell copies of the Software, and to permit persons to whom the
14
+ Software is furnished to do so, subject to the following conditions:
15
+
16
+ The above copyright notice and this permission notice shall be included
17
+ in all copies or substantial portions of the Software.
18
+
19
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
20
+ OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
22
+ THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
24
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
25
+ DEALINGS IN THE SOFTWARE.
26
+
.venv/lib/python3.11/site-packages/cffi-1.17.1.dist-info/METADATA ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: cffi
3
+ Version: 1.17.1
4
+ Summary: Foreign Function Interface for Python calling C code.
5
+ Home-page: http://cffi.readthedocs.org
6
+ Author: Armin Rigo, Maciej Fijalkowski
7
+ Author-email: python-cffi@googlegroups.com
8
+ License: MIT
9
+ Project-URL: Documentation, http://cffi.readthedocs.org/
10
+ Project-URL: Source Code, https://github.com/python-cffi/cffi
11
+ Project-URL: Issue Tracker, https://github.com/python-cffi/cffi/issues
12
+ Project-URL: Changelog, https://cffi.readthedocs.io/en/latest/whatsnew.html
13
+ Project-URL: Downloads, https://github.com/python-cffi/cffi/releases
14
+ Project-URL: Contact, https://groups.google.com/forum/#!forum/python-cffi
15
+ Classifier: Programming Language :: Python
16
+ Classifier: Programming Language :: Python :: 3
17
+ Classifier: Programming Language :: Python :: 3.8
18
+ Classifier: Programming Language :: Python :: 3.9
19
+ Classifier: Programming Language :: Python :: 3.10
20
+ Classifier: Programming Language :: Python :: 3.11
21
+ Classifier: Programming Language :: Python :: 3.12
22
+ Classifier: Programming Language :: Python :: 3.13
23
+ Classifier: Programming Language :: Python :: Implementation :: CPython
24
+ Classifier: Programming Language :: Python :: Implementation :: PyPy
25
+ Classifier: License :: OSI Approved :: MIT License
26
+ Requires-Python: >=3.8
27
+ License-File: LICENSE
28
+ Requires-Dist: pycparser
29
+
30
+
31
+ CFFI
32
+ ====
33
+
34
+ Foreign Function Interface for Python calling C code.
35
+ Please see the `Documentation <http://cffi.readthedocs.org/>`_.
36
+
37
+ Contact
38
+ -------
39
+
40
+ `Mailing list <https://groups.google.com/forum/#!forum/python-cffi>`_
.venv/lib/python3.11/site-packages/cffi-1.17.1.dist-info/WHEEL ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (74.1.1)
3
+ Root-Is-Purelib: false
4
+ Tag: cp311-cp311-manylinux_2_17_x86_64
5
+ Tag: cp311-cp311-manylinux2014_x86_64
6
+
.venv/lib/python3.11/site-packages/pip/__init__.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import List, Optional
2
+
3
+ __version__ = "24.0"
4
+
5
+
6
+ def main(args: Optional[List[str]] = None) -> int:
7
+ """This is an internal API only meant for use by pip's own console scripts.
8
+
9
+ For additional details, see https://github.com/pypa/pip/issues/7498.
10
+ """
11
+ from pip._internal.utils.entrypoints import _wrapper
12
+
13
+ return _wrapper(args)
.venv/lib/python3.11/site-packages/pip/__main__.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+
4
+ # Remove '' and current working directory from the first entry
5
+ # of sys.path, if present to avoid using current directory
6
+ # in pip commands check, freeze, install, list and show,
7
+ # when invoked as python -m pip <command>
8
+ if sys.path[0] in ("", os.getcwd()):
9
+ sys.path.pop(0)
10
+
11
+ # If we are running from a wheel, add the wheel to sys.path
12
+ # This allows the usage python pip-*.whl/pip install pip-*.whl
13
+ if __package__ == "":
14
+ # __file__ is pip-*.whl/pip/__main__.py
15
+ # first dirname call strips of '/__main__.py', second strips off '/pip'
16
+ # Resulting path is the name of the wheel itself
17
+ # Add that to sys.path so we can import pip
18
+ path = os.path.dirname(os.path.dirname(__file__))
19
+ sys.path.insert(0, path)
20
+
21
+ if __name__ == "__main__":
22
+ from pip._internal.cli.main import main as _main
23
+
24
+ sys.exit(_main())
.venv/lib/python3.11/site-packages/pip/__pip-runner__.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Execute exactly this copy of pip, within a different environment.
2
+
3
+ This file is named as it is, to ensure that this module can't be imported via
4
+ an import statement.
5
+ """
6
+
7
+ # /!\ This version compatibility check section must be Python 2 compatible. /!\
8
+
9
+ import sys
10
+
11
+ # Copied from setup.py
12
+ PYTHON_REQUIRES = (3, 7)
13
+
14
+
15
+ def version_str(version): # type: ignore
16
+ return ".".join(str(v) for v in version)
17
+
18
+
19
+ if sys.version_info[:2] < PYTHON_REQUIRES:
20
+ raise SystemExit(
21
+ "This version of pip does not support python {} (requires >={}).".format(
22
+ version_str(sys.version_info[:2]), version_str(PYTHON_REQUIRES)
23
+ )
24
+ )
25
+
26
+ # From here on, we can use Python 3 features, but the syntax must remain
27
+ # Python 2 compatible.
28
+
29
+ import runpy # noqa: E402
30
+ from importlib.machinery import PathFinder # noqa: E402
31
+ from os.path import dirname # noqa: E402
32
+
33
+ PIP_SOURCES_ROOT = dirname(dirname(__file__))
34
+
35
+
36
+ class PipImportRedirectingFinder:
37
+ @classmethod
38
+ def find_spec(self, fullname, path=None, target=None): # type: ignore
39
+ if fullname != "pip":
40
+ return None
41
+
42
+ spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target)
43
+ assert spec, (PIP_SOURCES_ROOT, fullname)
44
+ return spec
45
+
46
+
47
+ sys.meta_path.insert(0, PipImportRedirectingFinder())
48
+
49
+ assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module"
50
+ runpy.run_module("pip", run_name="__main__", alter_sys=True)
.venv/lib/python3.11/site-packages/pip/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (763 Bytes). View file
 
.venv/lib/python3.11/site-packages/pip/__pycache__/__main__.cpython-311.pyc ADDED
Binary file (882 Bytes). View file
 
.venv/lib/python3.11/site-packages/pip/__pycache__/__pip-runner__.cpython-311.pyc ADDED
Binary file (2.5 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/__init__.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import List, Optional
2
+
3
+ from pip._internal.utils import _log
4
+
5
+ # init_logging() must be called before any call to logging.getLogger()
6
+ # which happens at import of most modules.
7
+ _log.init_logging()
8
+
9
+
10
+ def main(args: (Optional[List[str]]) = None) -> int:
11
+ """This is preserved for old console scripts that may still be referencing
12
+ it.
13
+
14
+ For additional details, see https://github.com/pypa/pip/issues/7498.
15
+ """
16
+ from pip._internal.utils.entrypoints import _wrapper
17
+
18
+ return _wrapper(args)
.venv/lib/python3.11/site-packages/pip/_internal/build_env.py ADDED
@@ -0,0 +1,311 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Build Environment used for isolation during sdist building
2
+ """
3
+
4
+ import logging
5
+ import os
6
+ import pathlib
7
+ import site
8
+ import sys
9
+ import textwrap
10
+ from collections import OrderedDict
11
+ from types import TracebackType
12
+ from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type, Union
13
+
14
+ from pip._vendor.certifi import where
15
+ from pip._vendor.packaging.requirements import Requirement
16
+ from pip._vendor.packaging.version import Version
17
+
18
+ from pip import __file__ as pip_location
19
+ from pip._internal.cli.spinners import open_spinner
20
+ from pip._internal.locations import get_platlib, get_purelib, get_scheme
21
+ from pip._internal.metadata import get_default_environment, get_environment
22
+ from pip._internal.utils.subprocess import call_subprocess
23
+ from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
24
+
25
+ if TYPE_CHECKING:
26
+ from pip._internal.index.package_finder import PackageFinder
27
+
28
+ logger = logging.getLogger(__name__)
29
+
30
+
31
+ def _dedup(a: str, b: str) -> Union[Tuple[str], Tuple[str, str]]:
32
+ return (a, b) if a != b else (a,)
33
+
34
+
35
+ class _Prefix:
36
+ def __init__(self, path: str) -> None:
37
+ self.path = path
38
+ self.setup = False
39
+ scheme = get_scheme("", prefix=path)
40
+ self.bin_dir = scheme.scripts
41
+ self.lib_dirs = _dedup(scheme.purelib, scheme.platlib)
42
+
43
+
44
+ def get_runnable_pip() -> str:
45
+ """Get a file to pass to a Python executable, to run the currently-running pip.
46
+
47
+ This is used to run a pip subprocess, for installing requirements into the build
48
+ environment.
49
+ """
50
+ source = pathlib.Path(pip_location).resolve().parent
51
+
52
+ if not source.is_dir():
53
+ # This would happen if someone is using pip from inside a zip file. In that
54
+ # case, we can use that directly.
55
+ return str(source)
56
+
57
+ return os.fsdecode(source / "__pip-runner__.py")
58
+
59
+
60
+ def _get_system_sitepackages() -> Set[str]:
61
+ """Get system site packages
62
+
63
+ Usually from site.getsitepackages,
64
+ but fallback on `get_purelib()/get_platlib()` if unavailable
65
+ (e.g. in a virtualenv created by virtualenv<20)
66
+
67
+ Returns normalized set of strings.
68
+ """
69
+ if hasattr(site, "getsitepackages"):
70
+ system_sites = site.getsitepackages()
71
+ else:
72
+ # virtualenv < 20 overwrites site.py without getsitepackages
73
+ # fallback on get_purelib/get_platlib.
74
+ # this is known to miss things, but shouldn't in the cases
75
+ # where getsitepackages() has been removed (inside a virtualenv)
76
+ system_sites = [get_purelib(), get_platlib()]
77
+ return {os.path.normcase(path) for path in system_sites}
78
+
79
+
80
+ class BuildEnvironment:
81
+ """Creates and manages an isolated environment to install build deps"""
82
+
83
+ def __init__(self) -> None:
84
+ temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True)
85
+
86
+ self._prefixes = OrderedDict(
87
+ (name, _Prefix(os.path.join(temp_dir.path, name)))
88
+ for name in ("normal", "overlay")
89
+ )
90
+
91
+ self._bin_dirs: List[str] = []
92
+ self._lib_dirs: List[str] = []
93
+ for prefix in reversed(list(self._prefixes.values())):
94
+ self._bin_dirs.append(prefix.bin_dir)
95
+ self._lib_dirs.extend(prefix.lib_dirs)
96
+
97
+ # Customize site to:
98
+ # - ensure .pth files are honored
99
+ # - prevent access to system site packages
100
+ system_sites = _get_system_sitepackages()
101
+
102
+ self._site_dir = os.path.join(temp_dir.path, "site")
103
+ if not os.path.exists(self._site_dir):
104
+ os.mkdir(self._site_dir)
105
+ with open(
106
+ os.path.join(self._site_dir, "sitecustomize.py"), "w", encoding="utf-8"
107
+ ) as fp:
108
+ fp.write(
109
+ textwrap.dedent(
110
+ """
111
+ import os, site, sys
112
+
113
+ # First, drop system-sites related paths.
114
+ original_sys_path = sys.path[:]
115
+ known_paths = set()
116
+ for path in {system_sites!r}:
117
+ site.addsitedir(path, known_paths=known_paths)
118
+ system_paths = set(
119
+ os.path.normcase(path)
120
+ for path in sys.path[len(original_sys_path):]
121
+ )
122
+ original_sys_path = [
123
+ path for path in original_sys_path
124
+ if os.path.normcase(path) not in system_paths
125
+ ]
126
+ sys.path = original_sys_path
127
+
128
+ # Second, add lib directories.
129
+ # ensuring .pth file are processed.
130
+ for path in {lib_dirs!r}:
131
+ assert not path in sys.path
132
+ site.addsitedir(path)
133
+ """
134
+ ).format(system_sites=system_sites, lib_dirs=self._lib_dirs)
135
+ )
136
+
137
+ def __enter__(self) -> None:
138
+ self._save_env = {
139
+ name: os.environ.get(name, None)
140
+ for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH")
141
+ }
142
+
143
+ path = self._bin_dirs[:]
144
+ old_path = self._save_env["PATH"]
145
+ if old_path:
146
+ path.extend(old_path.split(os.pathsep))
147
+
148
+ pythonpath = [self._site_dir]
149
+
150
+ os.environ.update(
151
+ {
152
+ "PATH": os.pathsep.join(path),
153
+ "PYTHONNOUSERSITE": "1",
154
+ "PYTHONPATH": os.pathsep.join(pythonpath),
155
+ }
156
+ )
157
+
158
+ def __exit__(
159
+ self,
160
+ exc_type: Optional[Type[BaseException]],
161
+ exc_val: Optional[BaseException],
162
+ exc_tb: Optional[TracebackType],
163
+ ) -> None:
164
+ for varname, old_value in self._save_env.items():
165
+ if old_value is None:
166
+ os.environ.pop(varname, None)
167
+ else:
168
+ os.environ[varname] = old_value
169
+
170
+ def check_requirements(
171
+ self, reqs: Iterable[str]
172
+ ) -> Tuple[Set[Tuple[str, str]], Set[str]]:
173
+ """Return 2 sets:
174
+ - conflicting requirements: set of (installed, wanted) reqs tuples
175
+ - missing requirements: set of reqs
176
+ """
177
+ missing = set()
178
+ conflicting = set()
179
+ if reqs:
180
+ env = (
181
+ get_environment(self._lib_dirs)
182
+ if hasattr(self, "_lib_dirs")
183
+ else get_default_environment()
184
+ )
185
+ for req_str in reqs:
186
+ req = Requirement(req_str)
187
+ # We're explicitly evaluating with an empty extra value, since build
188
+ # environments are not provided any mechanism to select specific extras.
189
+ if req.marker is not None and not req.marker.evaluate({"extra": ""}):
190
+ continue
191
+ dist = env.get_distribution(req.name)
192
+ if not dist:
193
+ missing.add(req_str)
194
+ continue
195
+ if isinstance(dist.version, Version):
196
+ installed_req_str = f"{req.name}=={dist.version}"
197
+ else:
198
+ installed_req_str = f"{req.name}==={dist.version}"
199
+ if not req.specifier.contains(dist.version, prereleases=True):
200
+ conflicting.add((installed_req_str, req_str))
201
+ # FIXME: Consider direct URL?
202
+ return conflicting, missing
203
+
204
+ def install_requirements(
205
+ self,
206
+ finder: "PackageFinder",
207
+ requirements: Iterable[str],
208
+ prefix_as_string: str,
209
+ *,
210
+ kind: str,
211
+ ) -> None:
212
+ prefix = self._prefixes[prefix_as_string]
213
+ assert not prefix.setup
214
+ prefix.setup = True
215
+ if not requirements:
216
+ return
217
+ self._install_requirements(
218
+ get_runnable_pip(),
219
+ finder,
220
+ requirements,
221
+ prefix,
222
+ kind=kind,
223
+ )
224
+
225
+ @staticmethod
226
+ def _install_requirements(
227
+ pip_runnable: str,
228
+ finder: "PackageFinder",
229
+ requirements: Iterable[str],
230
+ prefix: _Prefix,
231
+ *,
232
+ kind: str,
233
+ ) -> None:
234
+ args: List[str] = [
235
+ sys.executable,
236
+ pip_runnable,
237
+ "install",
238
+ "--ignore-installed",
239
+ "--no-user",
240
+ "--prefix",
241
+ prefix.path,
242
+ "--no-warn-script-location",
243
+ ]
244
+ if logger.getEffectiveLevel() <= logging.DEBUG:
245
+ args.append("-v")
246
+ for format_control in ("no_binary", "only_binary"):
247
+ formats = getattr(finder.format_control, format_control)
248
+ args.extend(
249
+ (
250
+ "--" + format_control.replace("_", "-"),
251
+ ",".join(sorted(formats or {":none:"})),
252
+ )
253
+ )
254
+
255
+ index_urls = finder.index_urls
256
+ if index_urls:
257
+ args.extend(["-i", index_urls[0]])
258
+ for extra_index in index_urls[1:]:
259
+ args.extend(["--extra-index-url", extra_index])
260
+ else:
261
+ args.append("--no-index")
262
+ for link in finder.find_links:
263
+ args.extend(["--find-links", link])
264
+
265
+ for host in finder.trusted_hosts:
266
+ args.extend(["--trusted-host", host])
267
+ if finder.allow_all_prereleases:
268
+ args.append("--pre")
269
+ if finder.prefer_binary:
270
+ args.append("--prefer-binary")
271
+ args.append("--")
272
+ args.extend(requirements)
273
+ extra_environ = {"_PIP_STANDALONE_CERT": where()}
274
+ with open_spinner(f"Installing {kind}") as spinner:
275
+ call_subprocess(
276
+ args,
277
+ command_desc=f"pip subprocess to install {kind}",
278
+ spinner=spinner,
279
+ extra_environ=extra_environ,
280
+ )
281
+
282
+
283
+ class NoOpBuildEnvironment(BuildEnvironment):
284
+ """A no-op drop-in replacement for BuildEnvironment"""
285
+
286
+ def __init__(self) -> None:
287
+ pass
288
+
289
+ def __enter__(self) -> None:
290
+ pass
291
+
292
+ def __exit__(
293
+ self,
294
+ exc_type: Optional[Type[BaseException]],
295
+ exc_val: Optional[BaseException],
296
+ exc_tb: Optional[TracebackType],
297
+ ) -> None:
298
+ pass
299
+
300
+ def cleanup(self) -> None:
301
+ pass
302
+
303
+ def install_requirements(
304
+ self,
305
+ finder: "PackageFinder",
306
+ requirements: Iterable[str],
307
+ prefix_as_string: str,
308
+ *,
309
+ kind: str,
310
+ ) -> None:
311
+ raise NotImplementedError()
.venv/lib/python3.11/site-packages/pip/_internal/cache.py ADDED
@@ -0,0 +1,290 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Cache Management
2
+ """
3
+
4
+ import hashlib
5
+ import json
6
+ import logging
7
+ import os
8
+ from pathlib import Path
9
+ from typing import Any, Dict, List, Optional
10
+
11
+ from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
12
+ from pip._vendor.packaging.utils import canonicalize_name
13
+
14
+ from pip._internal.exceptions import InvalidWheelFilename
15
+ from pip._internal.models.direct_url import DirectUrl
16
+ from pip._internal.models.link import Link
17
+ from pip._internal.models.wheel import Wheel
18
+ from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
19
+ from pip._internal.utils.urls import path_to_url
20
+
21
+ logger = logging.getLogger(__name__)
22
+
23
+ ORIGIN_JSON_NAME = "origin.json"
24
+
25
+
26
+ def _hash_dict(d: Dict[str, str]) -> str:
27
+ """Return a stable sha224 of a dictionary."""
28
+ s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
29
+ return hashlib.sha224(s.encode("ascii")).hexdigest()
30
+
31
+
32
+ class Cache:
33
+ """An abstract class - provides cache directories for data from links
34
+
35
+ :param cache_dir: The root of the cache.
36
+ """
37
+
38
+ def __init__(self, cache_dir: str) -> None:
39
+ super().__init__()
40
+ assert not cache_dir or os.path.isabs(cache_dir)
41
+ self.cache_dir = cache_dir or None
42
+
43
+ def _get_cache_path_parts(self, link: Link) -> List[str]:
44
+ """Get parts of part that must be os.path.joined with cache_dir"""
45
+
46
+ # We want to generate an url to use as our cache key, we don't want to
47
+ # just re-use the URL because it might have other items in the fragment
48
+ # and we don't care about those.
49
+ key_parts = {"url": link.url_without_fragment}
50
+ if link.hash_name is not None and link.hash is not None:
51
+ key_parts[link.hash_name] = link.hash
52
+ if link.subdirectory_fragment:
53
+ key_parts["subdirectory"] = link.subdirectory_fragment
54
+
55
+ # Include interpreter name, major and minor version in cache key
56
+ # to cope with ill-behaved sdists that build a different wheel
57
+ # depending on the python version their setup.py is being run on,
58
+ # and don't encode the difference in compatibility tags.
59
+ # https://github.com/pypa/pip/issues/7296
60
+ key_parts["interpreter_name"] = interpreter_name()
61
+ key_parts["interpreter_version"] = interpreter_version()
62
+
63
+ # Encode our key url with sha224, we'll use this because it has similar
64
+ # security properties to sha256, but with a shorter total output (and
65
+ # thus less secure). However the differences don't make a lot of
66
+ # difference for our use case here.
67
+ hashed = _hash_dict(key_parts)
68
+
69
+ # We want to nest the directories some to prevent having a ton of top
70
+ # level directories where we might run out of sub directories on some
71
+ # FS.
72
+ parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
73
+
74
+ return parts
75
+
76
+ def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]:
77
+ can_not_cache = not self.cache_dir or not canonical_package_name or not link
78
+ if can_not_cache:
79
+ return []
80
+
81
+ path = self.get_path_for_link(link)
82
+ if os.path.isdir(path):
83
+ return [(candidate, path) for candidate in os.listdir(path)]
84
+ return []
85
+
86
+ def get_path_for_link(self, link: Link) -> str:
87
+ """Return a directory to store cached items in for link."""
88
+ raise NotImplementedError()
89
+
90
+ def get(
91
+ self,
92
+ link: Link,
93
+ package_name: Optional[str],
94
+ supported_tags: List[Tag],
95
+ ) -> Link:
96
+ """Returns a link to a cached item if it exists, otherwise returns the
97
+ passed link.
98
+ """
99
+ raise NotImplementedError()
100
+
101
+
102
+ class SimpleWheelCache(Cache):
103
+ """A cache of wheels for future installs."""
104
+
105
+ def __init__(self, cache_dir: str) -> None:
106
+ super().__init__(cache_dir)
107
+
108
+ def get_path_for_link(self, link: Link) -> str:
109
+ """Return a directory to store cached wheels for link
110
+
111
+ Because there are M wheels for any one sdist, we provide a directory
112
+ to cache them in, and then consult that directory when looking up
113
+ cache hits.
114
+
115
+ We only insert things into the cache if they have plausible version
116
+ numbers, so that we don't contaminate the cache with things that were
117
+ not unique. E.g. ./package might have dozens of installs done for it
118
+ and build a version of 0.0...and if we built and cached a wheel, we'd
119
+ end up using the same wheel even if the source has been edited.
120
+
121
+ :param link: The link of the sdist for which this will cache wheels.
122
+ """
123
+ parts = self._get_cache_path_parts(link)
124
+ assert self.cache_dir
125
+ # Store wheels within the root cache_dir
126
+ return os.path.join(self.cache_dir, "wheels", *parts)
127
+
128
+ def get(
129
+ self,
130
+ link: Link,
131
+ package_name: Optional[str],
132
+ supported_tags: List[Tag],
133
+ ) -> Link:
134
+ candidates = []
135
+
136
+ if not package_name:
137
+ return link
138
+
139
+ canonical_package_name = canonicalize_name(package_name)
140
+ for wheel_name, wheel_dir in self._get_candidates(link, canonical_package_name):
141
+ try:
142
+ wheel = Wheel(wheel_name)
143
+ except InvalidWheelFilename:
144
+ continue
145
+ if canonicalize_name(wheel.name) != canonical_package_name:
146
+ logger.debug(
147
+ "Ignoring cached wheel %s for %s as it "
148
+ "does not match the expected distribution name %s.",
149
+ wheel_name,
150
+ link,
151
+ package_name,
152
+ )
153
+ continue
154
+ if not wheel.supported(supported_tags):
155
+ # Built for a different python/arch/etc
156
+ continue
157
+ candidates.append(
158
+ (
159
+ wheel.support_index_min(supported_tags),
160
+ wheel_name,
161
+ wheel_dir,
162
+ )
163
+ )
164
+
165
+ if not candidates:
166
+ return link
167
+
168
+ _, wheel_name, wheel_dir = min(candidates)
169
+ return Link(path_to_url(os.path.join(wheel_dir, wheel_name)))
170
+
171
+
172
+ class EphemWheelCache(SimpleWheelCache):
173
+ """A SimpleWheelCache that creates it's own temporary cache directory"""
174
+
175
+ def __init__(self) -> None:
176
+ self._temp_dir = TempDirectory(
177
+ kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
178
+ globally_managed=True,
179
+ )
180
+
181
+ super().__init__(self._temp_dir.path)
182
+
183
+
184
+ class CacheEntry:
185
+ def __init__(
186
+ self,
187
+ link: Link,
188
+ persistent: bool,
189
+ ):
190
+ self.link = link
191
+ self.persistent = persistent
192
+ self.origin: Optional[DirectUrl] = None
193
+ origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME
194
+ if origin_direct_url_path.exists():
195
+ try:
196
+ self.origin = DirectUrl.from_json(
197
+ origin_direct_url_path.read_text(encoding="utf-8")
198
+ )
199
+ except Exception as e:
200
+ logger.warning(
201
+ "Ignoring invalid cache entry origin file %s for %s (%s)",
202
+ origin_direct_url_path,
203
+ link.filename,
204
+ e,
205
+ )
206
+
207
+
208
+ class WheelCache(Cache):
209
+ """Wraps EphemWheelCache and SimpleWheelCache into a single Cache
210
+
211
+ This Cache allows for gracefully degradation, using the ephem wheel cache
212
+ when a certain link is not found in the simple wheel cache first.
213
+ """
214
+
215
+ def __init__(self, cache_dir: str) -> None:
216
+ super().__init__(cache_dir)
217
+ self._wheel_cache = SimpleWheelCache(cache_dir)
218
+ self._ephem_cache = EphemWheelCache()
219
+
220
+ def get_path_for_link(self, link: Link) -> str:
221
+ return self._wheel_cache.get_path_for_link(link)
222
+
223
+ def get_ephem_path_for_link(self, link: Link) -> str:
224
+ return self._ephem_cache.get_path_for_link(link)
225
+
226
+ def get(
227
+ self,
228
+ link: Link,
229
+ package_name: Optional[str],
230
+ supported_tags: List[Tag],
231
+ ) -> Link:
232
+ cache_entry = self.get_cache_entry(link, package_name, supported_tags)
233
+ if cache_entry is None:
234
+ return link
235
+ return cache_entry.link
236
+
237
+ def get_cache_entry(
238
+ self,
239
+ link: Link,
240
+ package_name: Optional[str],
241
+ supported_tags: List[Tag],
242
+ ) -> Optional[CacheEntry]:
243
+ """Returns a CacheEntry with a link to a cached item if it exists or
244
+ None. The cache entry indicates if the item was found in the persistent
245
+ or ephemeral cache.
246
+ """
247
+ retval = self._wheel_cache.get(
248
+ link=link,
249
+ package_name=package_name,
250
+ supported_tags=supported_tags,
251
+ )
252
+ if retval is not link:
253
+ return CacheEntry(retval, persistent=True)
254
+
255
+ retval = self._ephem_cache.get(
256
+ link=link,
257
+ package_name=package_name,
258
+ supported_tags=supported_tags,
259
+ )
260
+ if retval is not link:
261
+ return CacheEntry(retval, persistent=False)
262
+
263
+ return None
264
+
265
+ @staticmethod
266
+ def record_download_origin(cache_dir: str, download_info: DirectUrl) -> None:
267
+ origin_path = Path(cache_dir) / ORIGIN_JSON_NAME
268
+ if origin_path.exists():
269
+ try:
270
+ origin = DirectUrl.from_json(origin_path.read_text(encoding="utf-8"))
271
+ except Exception as e:
272
+ logger.warning(
273
+ "Could not read origin file %s in cache entry (%s). "
274
+ "Will attempt to overwrite it.",
275
+ origin_path,
276
+ e,
277
+ )
278
+ else:
279
+ # TODO: use DirectUrl.equivalent when
280
+ # https://github.com/pypa/pip/pull/10564 is merged.
281
+ if origin.url != download_info.url:
282
+ logger.warning(
283
+ "Origin URL %s in cache entry %s does not match download URL "
284
+ "%s. This is likely a pip bug or a cache corruption issue. "
285
+ "Will overwrite it with the new value.",
286
+ origin.url,
287
+ cache_dir,
288
+ download_info.url,
289
+ )
290
+ origin_path.write_text(download_info.to_json(), encoding="utf-8")
.venv/lib/python3.11/site-packages/pip/_internal/configuration.py ADDED
@@ -0,0 +1,383 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Configuration management setup
2
+
3
+ Some terminology:
4
+ - name
5
+ As written in config files.
6
+ - value
7
+ Value associated with a name
8
+ - key
9
+ Name combined with it's section (section.name)
10
+ - variant
11
+ A single word describing where the configuration key-value pair came from
12
+ """
13
+
14
+ import configparser
15
+ import locale
16
+ import os
17
+ import sys
18
+ from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple
19
+
20
+ from pip._internal.exceptions import (
21
+ ConfigurationError,
22
+ ConfigurationFileCouldNotBeLoaded,
23
+ )
24
+ from pip._internal.utils import appdirs
25
+ from pip._internal.utils.compat import WINDOWS
26
+ from pip._internal.utils.logging import getLogger
27
+ from pip._internal.utils.misc import ensure_dir, enum
28
+
29
+ RawConfigParser = configparser.RawConfigParser # Shorthand
30
+ Kind = NewType("Kind", str)
31
+
32
+ CONFIG_BASENAME = "pip.ini" if WINDOWS else "pip.conf"
33
+ ENV_NAMES_IGNORED = "version", "help"
34
+
35
+ # The kinds of configurations there are.
36
+ kinds = enum(
37
+ USER="user", # User Specific
38
+ GLOBAL="global", # System Wide
39
+ SITE="site", # [Virtual] Environment Specific
40
+ ENV="env", # from PIP_CONFIG_FILE
41
+ ENV_VAR="env-var", # from Environment Variables
42
+ )
43
+ OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
44
+ VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE
45
+
46
+ logger = getLogger(__name__)
47
+
48
+
49
+ # NOTE: Maybe use the optionx attribute to normalize keynames.
50
+ def _normalize_name(name: str) -> str:
51
+ """Make a name consistent regardless of source (environment or file)"""
52
+ name = name.lower().replace("_", "-")
53
+ if name.startswith("--"):
54
+ name = name[2:] # only prefer long opts
55
+ return name
56
+
57
+
58
+ def _disassemble_key(name: str) -> List[str]:
59
+ if "." not in name:
60
+ error_message = (
61
+ "Key does not contain dot separated section and key. "
62
+ f"Perhaps you wanted to use 'global.{name}' instead?"
63
+ )
64
+ raise ConfigurationError(error_message)
65
+ return name.split(".", 1)
66
+
67
+
68
+ def get_configuration_files() -> Dict[Kind, List[str]]:
69
+ global_config_files = [
70
+ os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip")
71
+ ]
72
+
73
+ site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
74
+ legacy_config_file = os.path.join(
75
+ os.path.expanduser("~"),
76
+ "pip" if WINDOWS else ".pip",
77
+ CONFIG_BASENAME,
78
+ )
79
+ new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME)
80
+ return {
81
+ kinds.GLOBAL: global_config_files,
82
+ kinds.SITE: [site_config_file],
83
+ kinds.USER: [legacy_config_file, new_config_file],
84
+ }
85
+
86
+
87
+ class Configuration:
88
+ """Handles management of configuration.
89
+
90
+ Provides an interface to accessing and managing configuration files.
91
+
92
+ This class converts provides an API that takes "section.key-name" style
93
+ keys and stores the value associated with it as "key-name" under the
94
+ section "section".
95
+
96
+ This allows for a clean interface wherein the both the section and the
97
+ key-name are preserved in an easy to manage form in the configuration files
98
+ and the data stored is also nice.
99
+ """
100
+
101
+ def __init__(self, isolated: bool, load_only: Optional[Kind] = None) -> None:
102
+ super().__init__()
103
+
104
+ if load_only is not None and load_only not in VALID_LOAD_ONLY:
105
+ raise ConfigurationError(
106
+ "Got invalid value for load_only - should be one of {}".format(
107
+ ", ".join(map(repr, VALID_LOAD_ONLY))
108
+ )
109
+ )
110
+ self.isolated = isolated
111
+ self.load_only = load_only
112
+
113
+ # Because we keep track of where we got the data from
114
+ self._parsers: Dict[Kind, List[Tuple[str, RawConfigParser]]] = {
115
+ variant: [] for variant in OVERRIDE_ORDER
116
+ }
117
+ self._config: Dict[Kind, Dict[str, Any]] = {
118
+ variant: {} for variant in OVERRIDE_ORDER
119
+ }
120
+ self._modified_parsers: List[Tuple[str, RawConfigParser]] = []
121
+
122
+ def load(self) -> None:
123
+ """Loads configuration from configuration files and environment"""
124
+ self._load_config_files()
125
+ if not self.isolated:
126
+ self._load_environment_vars()
127
+
128
+ def get_file_to_edit(self) -> Optional[str]:
129
+ """Returns the file with highest priority in configuration"""
130
+ assert self.load_only is not None, "Need to be specified a file to be editing"
131
+
132
+ try:
133
+ return self._get_parser_to_modify()[0]
134
+ except IndexError:
135
+ return None
136
+
137
+ def items(self) -> Iterable[Tuple[str, Any]]:
138
+ """Returns key-value pairs like dict.items() representing the loaded
139
+ configuration
140
+ """
141
+ return self._dictionary.items()
142
+
143
+ def get_value(self, key: str) -> Any:
144
+ """Get a value from the configuration."""
145
+ orig_key = key
146
+ key = _normalize_name(key)
147
+ try:
148
+ return self._dictionary[key]
149
+ except KeyError:
150
+ # disassembling triggers a more useful error message than simply
151
+ # "No such key" in the case that the key isn't in the form command.option
152
+ _disassemble_key(key)
153
+ raise ConfigurationError(f"No such key - {orig_key}")
154
+
155
+ def set_value(self, key: str, value: Any) -> None:
156
+ """Modify a value in the configuration."""
157
+ key = _normalize_name(key)
158
+ self._ensure_have_load_only()
159
+
160
+ assert self.load_only
161
+ fname, parser = self._get_parser_to_modify()
162
+
163
+ if parser is not None:
164
+ section, name = _disassemble_key(key)
165
+
166
+ # Modify the parser and the configuration
167
+ if not parser.has_section(section):
168
+ parser.add_section(section)
169
+ parser.set(section, name, value)
170
+
171
+ self._config[self.load_only][key] = value
172
+ self._mark_as_modified(fname, parser)
173
+
174
+ def unset_value(self, key: str) -> None:
175
+ """Unset a value in the configuration."""
176
+ orig_key = key
177
+ key = _normalize_name(key)
178
+ self._ensure_have_load_only()
179
+
180
+ assert self.load_only
181
+ if key not in self._config[self.load_only]:
182
+ raise ConfigurationError(f"No such key - {orig_key}")
183
+
184
+ fname, parser = self._get_parser_to_modify()
185
+
186
+ if parser is not None:
187
+ section, name = _disassemble_key(key)
188
+ if not (
189
+ parser.has_section(section) and parser.remove_option(section, name)
190
+ ):
191
+ # The option was not removed.
192
+ raise ConfigurationError(
193
+ "Fatal Internal error [id=1]. Please report as a bug."
194
+ )
195
+
196
+ # The section may be empty after the option was removed.
197
+ if not parser.items(section):
198
+ parser.remove_section(section)
199
+ self._mark_as_modified(fname, parser)
200
+
201
+ del self._config[self.load_only][key]
202
+
203
+ def save(self) -> None:
204
+ """Save the current in-memory state."""
205
+ self._ensure_have_load_only()
206
+
207
+ for fname, parser in self._modified_parsers:
208
+ logger.info("Writing to %s", fname)
209
+
210
+ # Ensure directory exists.
211
+ ensure_dir(os.path.dirname(fname))
212
+
213
+ # Ensure directory's permission(need to be writeable)
214
+ try:
215
+ with open(fname, "w") as f:
216
+ parser.write(f)
217
+ except OSError as error:
218
+ raise ConfigurationError(
219
+ f"An error occurred while writing to the configuration file "
220
+ f"{fname}: {error}"
221
+ )
222
+
223
+ #
224
+ # Private routines
225
+ #
226
+
227
+ def _ensure_have_load_only(self) -> None:
228
+ if self.load_only is None:
229
+ raise ConfigurationError("Needed a specific file to be modifying.")
230
+ logger.debug("Will be working with %s variant only", self.load_only)
231
+
232
+ @property
233
+ def _dictionary(self) -> Dict[str, Any]:
234
+ """A dictionary representing the loaded configuration."""
235
+ # NOTE: Dictionaries are not populated if not loaded. So, conditionals
236
+ # are not needed here.
237
+ retval = {}
238
+
239
+ for variant in OVERRIDE_ORDER:
240
+ retval.update(self._config[variant])
241
+
242
+ return retval
243
+
244
+ def _load_config_files(self) -> None:
245
+ """Loads configuration from configuration files"""
246
+ config_files = dict(self.iter_config_files())
247
+ if config_files[kinds.ENV][0:1] == [os.devnull]:
248
+ logger.debug(
249
+ "Skipping loading configuration files due to "
250
+ "environment's PIP_CONFIG_FILE being os.devnull"
251
+ )
252
+ return
253
+
254
+ for variant, files in config_files.items():
255
+ for fname in files:
256
+ # If there's specific variant set in `load_only`, load only
257
+ # that variant, not the others.
258
+ if self.load_only is not None and variant != self.load_only:
259
+ logger.debug("Skipping file '%s' (variant: %s)", fname, variant)
260
+ continue
261
+
262
+ parser = self._load_file(variant, fname)
263
+
264
+ # Keeping track of the parsers used
265
+ self._parsers[variant].append((fname, parser))
266
+
267
+ def _load_file(self, variant: Kind, fname: str) -> RawConfigParser:
268
+ logger.verbose("For variant '%s', will try loading '%s'", variant, fname)
269
+ parser = self._construct_parser(fname)
270
+
271
+ for section in parser.sections():
272
+ items = parser.items(section)
273
+ self._config[variant].update(self._normalized_keys(section, items))
274
+
275
+ return parser
276
+
277
+ def _construct_parser(self, fname: str) -> RawConfigParser:
278
+ parser = configparser.RawConfigParser()
279
+ # If there is no such file, don't bother reading it but create the
280
+ # parser anyway, to hold the data.
281
+ # Doing this is useful when modifying and saving files, where we don't
282
+ # need to construct a parser.
283
+ if os.path.exists(fname):
284
+ locale_encoding = locale.getpreferredencoding(False)
285
+ try:
286
+ parser.read(fname, encoding=locale_encoding)
287
+ except UnicodeDecodeError:
288
+ # See https://github.com/pypa/pip/issues/4963
289
+ raise ConfigurationFileCouldNotBeLoaded(
290
+ reason=f"contains invalid {locale_encoding} characters",
291
+ fname=fname,
292
+ )
293
+ except configparser.Error as error:
294
+ # See https://github.com/pypa/pip/issues/4893
295
+ raise ConfigurationFileCouldNotBeLoaded(error=error)
296
+ return parser
297
+
298
+ def _load_environment_vars(self) -> None:
299
+ """Loads configuration from environment variables"""
300
+ self._config[kinds.ENV_VAR].update(
301
+ self._normalized_keys(":env:", self.get_environ_vars())
302
+ )
303
+
304
+ def _normalized_keys(
305
+ self, section: str, items: Iterable[Tuple[str, Any]]
306
+ ) -> Dict[str, Any]:
307
+ """Normalizes items to construct a dictionary with normalized keys.
308
+
309
+ This routine is where the names become keys and are made the same
310
+ regardless of source - configuration files or environment.
311
+ """
312
+ normalized = {}
313
+ for name, val in items:
314
+ key = section + "." + _normalize_name(name)
315
+ normalized[key] = val
316
+ return normalized
317
+
318
+ def get_environ_vars(self) -> Iterable[Tuple[str, str]]:
319
+ """Returns a generator with all environmental vars with prefix PIP_"""
320
+ for key, val in os.environ.items():
321
+ if key.startswith("PIP_"):
322
+ name = key[4:].lower()
323
+ if name not in ENV_NAMES_IGNORED:
324
+ yield name, val
325
+
326
+ # XXX: This is patched in the tests.
327
+ def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]:
328
+ """Yields variant and configuration files associated with it.
329
+
330
+ This should be treated like items of a dictionary. The order
331
+ here doesn't affect what gets overridden. That is controlled
332
+ by OVERRIDE_ORDER. However this does control the order they are
333
+ displayed to the user. It's probably most ergononmic to display
334
+ things in the same order as OVERRIDE_ORDER
335
+ """
336
+ # SMELL: Move the conditions out of this function
337
+
338
+ env_config_file = os.environ.get("PIP_CONFIG_FILE", None)
339
+ config_files = get_configuration_files()
340
+
341
+ yield kinds.GLOBAL, config_files[kinds.GLOBAL]
342
+
343
+ # per-user config is not loaded when env_config_file exists
344
+ should_load_user_config = not self.isolated and not (
345
+ env_config_file and os.path.exists(env_config_file)
346
+ )
347
+ if should_load_user_config:
348
+ # The legacy config file is overridden by the new config file
349
+ yield kinds.USER, config_files[kinds.USER]
350
+
351
+ # virtualenv config
352
+ yield kinds.SITE, config_files[kinds.SITE]
353
+
354
+ if env_config_file is not None:
355
+ yield kinds.ENV, [env_config_file]
356
+ else:
357
+ yield kinds.ENV, []
358
+
359
+ def get_values_in_config(self, variant: Kind) -> Dict[str, Any]:
360
+ """Get values present in a config file"""
361
+ return self._config[variant]
362
+
363
+ def _get_parser_to_modify(self) -> Tuple[str, RawConfigParser]:
364
+ # Determine which parser to modify
365
+ assert self.load_only
366
+ parsers = self._parsers[self.load_only]
367
+ if not parsers:
368
+ # This should not happen if everything works correctly.
369
+ raise ConfigurationError(
370
+ "Fatal Internal error [id=2]. Please report as a bug."
371
+ )
372
+
373
+ # Use the highest priority parser.
374
+ return parsers[-1]
375
+
376
+ # XXX: This is patched in the tests.
377
+ def _mark_as_modified(self, fname: str, parser: RawConfigParser) -> None:
378
+ file_parser_tuple = (fname, parser)
379
+ if file_parser_tuple not in self._modified_parsers:
380
+ self._modified_parsers.append(file_parser_tuple)
381
+
382
+ def __repr__(self) -> str:
383
+ return f"{self.__class__.__name__}({self._dictionary!r})"
.venv/lib/python3.11/site-packages/pip/_internal/exceptions.py ADDED
@@ -0,0 +1,728 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Exceptions used throughout package.
2
+
3
+ This module MUST NOT try to import from anything within `pip._internal` to
4
+ operate. This is expected to be importable from any/all files within the
5
+ subpackage and, thus, should not depend on them.
6
+ """
7
+
8
+ import configparser
9
+ import contextlib
10
+ import locale
11
+ import logging
12
+ import pathlib
13
+ import re
14
+ import sys
15
+ from itertools import chain, groupby, repeat
16
+ from typing import TYPE_CHECKING, Dict, Iterator, List, Optional, Union
17
+
18
+ from pip._vendor.requests.models import Request, Response
19
+ from pip._vendor.rich.console import Console, ConsoleOptions, RenderResult
20
+ from pip._vendor.rich.markup import escape
21
+ from pip._vendor.rich.text import Text
22
+
23
+ if TYPE_CHECKING:
24
+ from hashlib import _Hash
25
+ from typing import Literal
26
+
27
+ from pip._internal.metadata import BaseDistribution
28
+ from pip._internal.req.req_install import InstallRequirement
29
+
30
+ logger = logging.getLogger(__name__)
31
+
32
+
33
+ #
34
+ # Scaffolding
35
+ #
36
+ def _is_kebab_case(s: str) -> bool:
37
+ return re.match(r"^[a-z]+(-[a-z]+)*$", s) is not None
38
+
39
+
40
+ def _prefix_with_indent(
41
+ s: Union[Text, str],
42
+ console: Console,
43
+ *,
44
+ prefix: str,
45
+ indent: str,
46
+ ) -> Text:
47
+ if isinstance(s, Text):
48
+ text = s
49
+ else:
50
+ text = console.render_str(s)
51
+
52
+ return console.render_str(prefix, overflow="ignore") + console.render_str(
53
+ f"\n{indent}", overflow="ignore"
54
+ ).join(text.split(allow_blank=True))
55
+
56
+
57
+ class PipError(Exception):
58
+ """The base pip error."""
59
+
60
+
61
+ class DiagnosticPipError(PipError):
62
+ """An error, that presents diagnostic information to the user.
63
+
64
+ This contains a bunch of logic, to enable pretty presentation of our error
65
+ messages. Each error gets a unique reference. Each error can also include
66
+ additional context, a hint and/or a note -- which are presented with the
67
+ main error message in a consistent style.
68
+
69
+ This is adapted from the error output styling in `sphinx-theme-builder`.
70
+ """
71
+
72
+ reference: str
73
+
74
+ def __init__(
75
+ self,
76
+ *,
77
+ kind: 'Literal["error", "warning"]' = "error",
78
+ reference: Optional[str] = None,
79
+ message: Union[str, Text],
80
+ context: Optional[Union[str, Text]],
81
+ hint_stmt: Optional[Union[str, Text]],
82
+ note_stmt: Optional[Union[str, Text]] = None,
83
+ link: Optional[str] = None,
84
+ ) -> None:
85
+ # Ensure a proper reference is provided.
86
+ if reference is None:
87
+ assert hasattr(self, "reference"), "error reference not provided!"
88
+ reference = self.reference
89
+ assert _is_kebab_case(reference), "error reference must be kebab-case!"
90
+
91
+ self.kind = kind
92
+ self.reference = reference
93
+
94
+ self.message = message
95
+ self.context = context
96
+
97
+ self.note_stmt = note_stmt
98
+ self.hint_stmt = hint_stmt
99
+
100
+ self.link = link
101
+
102
+ super().__init__(f"<{self.__class__.__name__}: {self.reference}>")
103
+
104
+ def __repr__(self) -> str:
105
+ return (
106
+ f"<{self.__class__.__name__}("
107
+ f"reference={self.reference!r}, "
108
+ f"message={self.message!r}, "
109
+ f"context={self.context!r}, "
110
+ f"note_stmt={self.note_stmt!r}, "
111
+ f"hint_stmt={self.hint_stmt!r}"
112
+ ")>"
113
+ )
114
+
115
+ def __rich_console__(
116
+ self,
117
+ console: Console,
118
+ options: ConsoleOptions,
119
+ ) -> RenderResult:
120
+ colour = "red" if self.kind == "error" else "yellow"
121
+
122
+ yield f"[{colour} bold]{self.kind}[/]: [bold]{self.reference}[/]"
123
+ yield ""
124
+
125
+ if not options.ascii_only:
126
+ # Present the main message, with relevant context indented.
127
+ if self.context is not None:
128
+ yield _prefix_with_indent(
129
+ self.message,
130
+ console,
131
+ prefix=f"[{colour}]×[/] ",
132
+ indent=f"[{colour}]│[/] ",
133
+ )
134
+ yield _prefix_with_indent(
135
+ self.context,
136
+ console,
137
+ prefix=f"[{colour}]╰─>[/] ",
138
+ indent=f"[{colour}] [/] ",
139
+ )
140
+ else:
141
+ yield _prefix_with_indent(
142
+ self.message,
143
+ console,
144
+ prefix="[red]×[/] ",
145
+ indent=" ",
146
+ )
147
+ else:
148
+ yield self.message
149
+ if self.context is not None:
150
+ yield ""
151
+ yield self.context
152
+
153
+ if self.note_stmt is not None or self.hint_stmt is not None:
154
+ yield ""
155
+
156
+ if self.note_stmt is not None:
157
+ yield _prefix_with_indent(
158
+ self.note_stmt,
159
+ console,
160
+ prefix="[magenta bold]note[/]: ",
161
+ indent=" ",
162
+ )
163
+ if self.hint_stmt is not None:
164
+ yield _prefix_with_indent(
165
+ self.hint_stmt,
166
+ console,
167
+ prefix="[cyan bold]hint[/]: ",
168
+ indent=" ",
169
+ )
170
+
171
+ if self.link is not None:
172
+ yield ""
173
+ yield f"Link: {self.link}"
174
+
175
+
176
+ #
177
+ # Actual Errors
178
+ #
179
+ class ConfigurationError(PipError):
180
+ """General exception in configuration"""
181
+
182
+
183
+ class InstallationError(PipError):
184
+ """General exception during installation"""
185
+
186
+
187
+ class UninstallationError(PipError):
188
+ """General exception during uninstallation"""
189
+
190
+
191
+ class MissingPyProjectBuildRequires(DiagnosticPipError):
192
+ """Raised when pyproject.toml has `build-system`, but no `build-system.requires`."""
193
+
194
+ reference = "missing-pyproject-build-system-requires"
195
+
196
+ def __init__(self, *, package: str) -> None:
197
+ super().__init__(
198
+ message=f"Can not process {escape(package)}",
199
+ context=Text(
200
+ "This package has an invalid pyproject.toml file.\n"
201
+ "The [build-system] table is missing the mandatory `requires` key."
202
+ ),
203
+ note_stmt="This is an issue with the package mentioned above, not pip.",
204
+ hint_stmt=Text("See PEP 518 for the detailed specification."),
205
+ )
206
+
207
+
208
+ class InvalidPyProjectBuildRequires(DiagnosticPipError):
209
+ """Raised when pyproject.toml an invalid `build-system.requires`."""
210
+
211
+ reference = "invalid-pyproject-build-system-requires"
212
+
213
+ def __init__(self, *, package: str, reason: str) -> None:
214
+ super().__init__(
215
+ message=f"Can not process {escape(package)}",
216
+ context=Text(
217
+ "This package has an invalid `build-system.requires` key in "
218
+ f"pyproject.toml.\n{reason}"
219
+ ),
220
+ note_stmt="This is an issue with the package mentioned above, not pip.",
221
+ hint_stmt=Text("See PEP 518 for the detailed specification."),
222
+ )
223
+
224
+
225
+ class NoneMetadataError(PipError):
226
+ """Raised when accessing a Distribution's "METADATA" or "PKG-INFO".
227
+
228
+ This signifies an inconsistency, when the Distribution claims to have
229
+ the metadata file (if not, raise ``FileNotFoundError`` instead), but is
230
+ not actually able to produce its content. This may be due to permission
231
+ errors.
232
+ """
233
+
234
+ def __init__(
235
+ self,
236
+ dist: "BaseDistribution",
237
+ metadata_name: str,
238
+ ) -> None:
239
+ """
240
+ :param dist: A Distribution object.
241
+ :param metadata_name: The name of the metadata being accessed
242
+ (can be "METADATA" or "PKG-INFO").
243
+ """
244
+ self.dist = dist
245
+ self.metadata_name = metadata_name
246
+
247
+ def __str__(self) -> str:
248
+ # Use `dist` in the error message because its stringification
249
+ # includes more information, like the version and location.
250
+ return f"None {self.metadata_name} metadata found for distribution: {self.dist}"
251
+
252
+
253
+ class UserInstallationInvalid(InstallationError):
254
+ """A --user install is requested on an environment without user site."""
255
+
256
+ def __str__(self) -> str:
257
+ return "User base directory is not specified"
258
+
259
+
260
+ class InvalidSchemeCombination(InstallationError):
261
+ def __str__(self) -> str:
262
+ before = ", ".join(str(a) for a in self.args[:-1])
263
+ return f"Cannot set {before} and {self.args[-1]} together"
264
+
265
+
266
+ class DistributionNotFound(InstallationError):
267
+ """Raised when a distribution cannot be found to satisfy a requirement"""
268
+
269
+
270
+ class RequirementsFileParseError(InstallationError):
271
+ """Raised when a general error occurs parsing a requirements file line."""
272
+
273
+
274
+ class BestVersionAlreadyInstalled(PipError):
275
+ """Raised when the most up-to-date version of a package is already
276
+ installed."""
277
+
278
+
279
+ class BadCommand(PipError):
280
+ """Raised when virtualenv or a command is not found"""
281
+
282
+
283
+ class CommandError(PipError):
284
+ """Raised when there is an error in command-line arguments"""
285
+
286
+
287
+ class PreviousBuildDirError(PipError):
288
+ """Raised when there's a previous conflicting build directory"""
289
+
290
+
291
+ class NetworkConnectionError(PipError):
292
+ """HTTP connection error"""
293
+
294
+ def __init__(
295
+ self,
296
+ error_msg: str,
297
+ response: Optional[Response] = None,
298
+ request: Optional[Request] = None,
299
+ ) -> None:
300
+ """
301
+ Initialize NetworkConnectionError with `request` and `response`
302
+ objects.
303
+ """
304
+ self.response = response
305
+ self.request = request
306
+ self.error_msg = error_msg
307
+ if (
308
+ self.response is not None
309
+ and not self.request
310
+ and hasattr(response, "request")
311
+ ):
312
+ self.request = self.response.request
313
+ super().__init__(error_msg, response, request)
314
+
315
+ def __str__(self) -> str:
316
+ return str(self.error_msg)
317
+
318
+
319
+ class InvalidWheelFilename(InstallationError):
320
+ """Invalid wheel filename."""
321
+
322
+
323
+ class UnsupportedWheel(InstallationError):
324
+ """Unsupported wheel."""
325
+
326
+
327
+ class InvalidWheel(InstallationError):
328
+ """Invalid (e.g. corrupt) wheel."""
329
+
330
+ def __init__(self, location: str, name: str):
331
+ self.location = location
332
+ self.name = name
333
+
334
+ def __str__(self) -> str:
335
+ return f"Wheel '{self.name}' located at {self.location} is invalid."
336
+
337
+
338
+ class MetadataInconsistent(InstallationError):
339
+ """Built metadata contains inconsistent information.
340
+
341
+ This is raised when the metadata contains values (e.g. name and version)
342
+ that do not match the information previously obtained from sdist filename,
343
+ user-supplied ``#egg=`` value, or an install requirement name.
344
+ """
345
+
346
+ def __init__(
347
+ self, ireq: "InstallRequirement", field: str, f_val: str, m_val: str
348
+ ) -> None:
349
+ self.ireq = ireq
350
+ self.field = field
351
+ self.f_val = f_val
352
+ self.m_val = m_val
353
+
354
+ def __str__(self) -> str:
355
+ return (
356
+ f"Requested {self.ireq} has inconsistent {self.field}: "
357
+ f"expected {self.f_val!r}, but metadata has {self.m_val!r}"
358
+ )
359
+
360
+
361
+ class InstallationSubprocessError(DiagnosticPipError, InstallationError):
362
+ """A subprocess call failed."""
363
+
364
+ reference = "subprocess-exited-with-error"
365
+
366
+ def __init__(
367
+ self,
368
+ *,
369
+ command_description: str,
370
+ exit_code: int,
371
+ output_lines: Optional[List[str]],
372
+ ) -> None:
373
+ if output_lines is None:
374
+ output_prompt = Text("See above for output.")
375
+ else:
376
+ output_prompt = (
377
+ Text.from_markup(f"[red][{len(output_lines)} lines of output][/]\n")
378
+ + Text("".join(output_lines))
379
+ + Text.from_markup(R"[red]\[end of output][/]")
380
+ )
381
+
382
+ super().__init__(
383
+ message=(
384
+ f"[green]{escape(command_description)}[/] did not run successfully.\n"
385
+ f"exit code: {exit_code}"
386
+ ),
387
+ context=output_prompt,
388
+ hint_stmt=None,
389
+ note_stmt=(
390
+ "This error originates from a subprocess, and is likely not a "
391
+ "problem with pip."
392
+ ),
393
+ )
394
+
395
+ self.command_description = command_description
396
+ self.exit_code = exit_code
397
+
398
+ def __str__(self) -> str:
399
+ return f"{self.command_description} exited with {self.exit_code}"
400
+
401
+
402
+ class MetadataGenerationFailed(InstallationSubprocessError, InstallationError):
403
+ reference = "metadata-generation-failed"
404
+
405
+ def __init__(
406
+ self,
407
+ *,
408
+ package_details: str,
409
+ ) -> None:
410
+ super(InstallationSubprocessError, self).__init__(
411
+ message="Encountered error while generating package metadata.",
412
+ context=escape(package_details),
413
+ hint_stmt="See above for details.",
414
+ note_stmt="This is an issue with the package mentioned above, not pip.",
415
+ )
416
+
417
+ def __str__(self) -> str:
418
+ return "metadata generation failed"
419
+
420
+
421
+ class HashErrors(InstallationError):
422
+ """Multiple HashError instances rolled into one for reporting"""
423
+
424
+ def __init__(self) -> None:
425
+ self.errors: List["HashError"] = []
426
+
427
+ def append(self, error: "HashError") -> None:
428
+ self.errors.append(error)
429
+
430
+ def __str__(self) -> str:
431
+ lines = []
432
+ self.errors.sort(key=lambda e: e.order)
433
+ for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
434
+ lines.append(cls.head)
435
+ lines.extend(e.body() for e in errors_of_cls)
436
+ if lines:
437
+ return "\n".join(lines)
438
+ return ""
439
+
440
+ def __bool__(self) -> bool:
441
+ return bool(self.errors)
442
+
443
+
444
+ class HashError(InstallationError):
445
+ """
446
+ A failure to verify a package against known-good hashes
447
+
448
+ :cvar order: An int sorting hash exception classes by difficulty of
449
+ recovery (lower being harder), so the user doesn't bother fretting
450
+ about unpinned packages when he has deeper issues, like VCS
451
+ dependencies, to deal with. Also keeps error reports in a
452
+ deterministic order.
453
+ :cvar head: A section heading for display above potentially many
454
+ exceptions of this kind
455
+ :ivar req: The InstallRequirement that triggered this error. This is
456
+ pasted on after the exception is instantiated, because it's not
457
+ typically available earlier.
458
+
459
+ """
460
+
461
+ req: Optional["InstallRequirement"] = None
462
+ head = ""
463
+ order: int = -1
464
+
465
+ def body(self) -> str:
466
+ """Return a summary of me for display under the heading.
467
+
468
+ This default implementation simply prints a description of the
469
+ triggering requirement.
470
+
471
+ :param req: The InstallRequirement that provoked this error, with
472
+ its link already populated by the resolver's _populate_link().
473
+
474
+ """
475
+ return f" {self._requirement_name()}"
476
+
477
+ def __str__(self) -> str:
478
+ return f"{self.head}\n{self.body()}"
479
+
480
+ def _requirement_name(self) -> str:
481
+ """Return a description of the requirement that triggered me.
482
+
483
+ This default implementation returns long description of the req, with
484
+ line numbers
485
+
486
+ """
487
+ return str(self.req) if self.req else "unknown package"
488
+
489
+
490
+ class VcsHashUnsupported(HashError):
491
+ """A hash was provided for a version-control-system-based requirement, but
492
+ we don't have a method for hashing those."""
493
+
494
+ order = 0
495
+ head = (
496
+ "Can't verify hashes for these requirements because we don't "
497
+ "have a way to hash version control repositories:"
498
+ )
499
+
500
+
501
+ class DirectoryUrlHashUnsupported(HashError):
502
+ """A hash was provided for a version-control-system-based requirement, but
503
+ we don't have a method for hashing those."""
504
+
505
+ order = 1
506
+ head = (
507
+ "Can't verify hashes for these file:// requirements because they "
508
+ "point to directories:"
509
+ )
510
+
511
+
512
+ class HashMissing(HashError):
513
+ """A hash was needed for a requirement but is absent."""
514
+
515
+ order = 2
516
+ head = (
517
+ "Hashes are required in --require-hashes mode, but they are "
518
+ "missing from some requirements. Here is a list of those "
519
+ "requirements along with the hashes their downloaded archives "
520
+ "actually had. Add lines like these to your requirements files to "
521
+ "prevent tampering. (If you did not enable --require-hashes "
522
+ "manually, note that it turns on automatically when any package "
523
+ "has a hash.)"
524
+ )
525
+
526
+ def __init__(self, gotten_hash: str) -> None:
527
+ """
528
+ :param gotten_hash: The hash of the (possibly malicious) archive we
529
+ just downloaded
530
+ """
531
+ self.gotten_hash = gotten_hash
532
+
533
+ def body(self) -> str:
534
+ # Dodge circular import.
535
+ from pip._internal.utils.hashes import FAVORITE_HASH
536
+
537
+ package = None
538
+ if self.req:
539
+ # In the case of URL-based requirements, display the original URL
540
+ # seen in the requirements file rather than the package name,
541
+ # so the output can be directly copied into the requirements file.
542
+ package = (
543
+ self.req.original_link
544
+ if self.req.is_direct
545
+ # In case someone feeds something downright stupid
546
+ # to InstallRequirement's constructor.
547
+ else getattr(self.req, "req", None)
548
+ )
549
+ return " {} --hash={}:{}".format(
550
+ package or "unknown package", FAVORITE_HASH, self.gotten_hash
551
+ )
552
+
553
+
554
+ class HashUnpinned(HashError):
555
+ """A requirement had a hash specified but was not pinned to a specific
556
+ version."""
557
+
558
+ order = 3
559
+ head = (
560
+ "In --require-hashes mode, all requirements must have their "
561
+ "versions pinned with ==. These do not:"
562
+ )
563
+
564
+
565
+ class HashMismatch(HashError):
566
+ """
567
+ Distribution file hash values don't match.
568
+
569
+ :ivar package_name: The name of the package that triggered the hash
570
+ mismatch. Feel free to write to this after the exception is raise to
571
+ improve its error message.
572
+
573
+ """
574
+
575
+ order = 4
576
+ head = (
577
+ "THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS "
578
+ "FILE. If you have updated the package versions, please update "
579
+ "the hashes. Otherwise, examine the package contents carefully; "
580
+ "someone may have tampered with them."
581
+ )
582
+
583
+ def __init__(self, allowed: Dict[str, List[str]], gots: Dict[str, "_Hash"]) -> None:
584
+ """
585
+ :param allowed: A dict of algorithm names pointing to lists of allowed
586
+ hex digests
587
+ :param gots: A dict of algorithm names pointing to hashes we
588
+ actually got from the files under suspicion
589
+ """
590
+ self.allowed = allowed
591
+ self.gots = gots
592
+
593
+ def body(self) -> str:
594
+ return f" {self._requirement_name()}:\n{self._hash_comparison()}"
595
+
596
+ def _hash_comparison(self) -> str:
597
+ """
598
+ Return a comparison of actual and expected hash values.
599
+
600
+ Example::
601
+
602
+ Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
603
+ or 123451234512345123451234512345123451234512345
604
+ Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
605
+
606
+ """
607
+
608
+ def hash_then_or(hash_name: str) -> "chain[str]":
609
+ # For now, all the decent hashes have 6-char names, so we can get
610
+ # away with hard-coding space literals.
611
+ return chain([hash_name], repeat(" or"))
612
+
613
+ lines: List[str] = []
614
+ for hash_name, expecteds in self.allowed.items():
615
+ prefix = hash_then_or(hash_name)
616
+ lines.extend((f" Expected {next(prefix)} {e}") for e in expecteds)
617
+ lines.append(
618
+ f" Got {self.gots[hash_name].hexdigest()}\n"
619
+ )
620
+ return "\n".join(lines)
621
+
622
+
623
+ class UnsupportedPythonVersion(InstallationError):
624
+ """Unsupported python version according to Requires-Python package
625
+ metadata."""
626
+
627
+
628
+ class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
629
+ """When there are errors while loading a configuration file"""
630
+
631
+ def __init__(
632
+ self,
633
+ reason: str = "could not be loaded",
634
+ fname: Optional[str] = None,
635
+ error: Optional[configparser.Error] = None,
636
+ ) -> None:
637
+ super().__init__(error)
638
+ self.reason = reason
639
+ self.fname = fname
640
+ self.error = error
641
+
642
+ def __str__(self) -> str:
643
+ if self.fname is not None:
644
+ message_part = f" in {self.fname}."
645
+ else:
646
+ assert self.error is not None
647
+ message_part = f".\n{self.error}\n"
648
+ return f"Configuration file {self.reason}{message_part}"
649
+
650
+
651
+ _DEFAULT_EXTERNALLY_MANAGED_ERROR = f"""\
652
+ The Python environment under {sys.prefix} is managed externally, and may not be
653
+ manipulated by the user. Please use specific tooling from the distributor of
654
+ the Python installation to interact with this environment instead.
655
+ """
656
+
657
+
658
+ class ExternallyManagedEnvironment(DiagnosticPipError):
659
+ """The current environment is externally managed.
660
+
661
+ This is raised when the current environment is externally managed, as
662
+ defined by `PEP 668`_. The ``EXTERNALLY-MANAGED`` configuration is checked
663
+ and displayed when the error is bubbled up to the user.
664
+
665
+ :param error: The error message read from ``EXTERNALLY-MANAGED``.
666
+ """
667
+
668
+ reference = "externally-managed-environment"
669
+
670
+ def __init__(self, error: Optional[str]) -> None:
671
+ if error is None:
672
+ context = Text(_DEFAULT_EXTERNALLY_MANAGED_ERROR)
673
+ else:
674
+ context = Text(error)
675
+ super().__init__(
676
+ message="This environment is externally managed",
677
+ context=context,
678
+ note_stmt=(
679
+ "If you believe this is a mistake, please contact your "
680
+ "Python installation or OS distribution provider. "
681
+ "You can override this, at the risk of breaking your Python "
682
+ "installation or OS, by passing --break-system-packages."
683
+ ),
684
+ hint_stmt=Text("See PEP 668 for the detailed specification."),
685
+ )
686
+
687
+ @staticmethod
688
+ def _iter_externally_managed_error_keys() -> Iterator[str]:
689
+ # LC_MESSAGES is in POSIX, but not the C standard. The most common
690
+ # platform that does not implement this category is Windows, where
691
+ # using other categories for console message localization is equally
692
+ # unreliable, so we fall back to the locale-less vendor message. This
693
+ # can always be re-evaluated when a vendor proposes a new alternative.
694
+ try:
695
+ category = locale.LC_MESSAGES
696
+ except AttributeError:
697
+ lang: Optional[str] = None
698
+ else:
699
+ lang, _ = locale.getlocale(category)
700
+ if lang is not None:
701
+ yield f"Error-{lang}"
702
+ for sep in ("-", "_"):
703
+ before, found, _ = lang.partition(sep)
704
+ if not found:
705
+ continue
706
+ yield f"Error-{before}"
707
+ yield "Error"
708
+
709
+ @classmethod
710
+ def from_config(
711
+ cls,
712
+ config: Union[pathlib.Path, str],
713
+ ) -> "ExternallyManagedEnvironment":
714
+ parser = configparser.ConfigParser(interpolation=None)
715
+ try:
716
+ parser.read(config, encoding="utf-8")
717
+ section = parser["externally-managed"]
718
+ for key in cls._iter_externally_managed_error_keys():
719
+ with contextlib.suppress(KeyError):
720
+ return cls(section[key])
721
+ except KeyError:
722
+ pass
723
+ except (OSError, UnicodeDecodeError, configparser.ParsingError):
724
+ from pip._internal.utils._log import VERBOSE
725
+
726
+ exc_info = logger.isEnabledFor(VERBOSE)
727
+ logger.warning("Failed to read %s", config, exc_info=exc_info)
728
+ return cls(None)
.venv/lib/python3.11/site-packages/pip/_internal/index/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ """Index interaction code
2
+ """
.venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (237 Bytes). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/collector.cpython-311.pyc ADDED
Binary file (24.6 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-311.pyc ADDED
Binary file (44.1 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/sources.cpython-311.pyc ADDED
Binary file (14 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/index/collector.py ADDED
@@ -0,0 +1,507 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ The main purpose of this module is to expose LinkCollector.collect_sources().
3
+ """
4
+
5
+ import collections
6
+ import email.message
7
+ import functools
8
+ import itertools
9
+ import json
10
+ import logging
11
+ import os
12
+ import urllib.parse
13
+ import urllib.request
14
+ from html.parser import HTMLParser
15
+ from optparse import Values
16
+ from typing import (
17
+ TYPE_CHECKING,
18
+ Callable,
19
+ Dict,
20
+ Iterable,
21
+ List,
22
+ MutableMapping,
23
+ NamedTuple,
24
+ Optional,
25
+ Sequence,
26
+ Tuple,
27
+ Union,
28
+ )
29
+
30
+ from pip._vendor import requests
31
+ from pip._vendor.requests import Response
32
+ from pip._vendor.requests.exceptions import RetryError, SSLError
33
+
34
+ from pip._internal.exceptions import NetworkConnectionError
35
+ from pip._internal.models.link import Link
36
+ from pip._internal.models.search_scope import SearchScope
37
+ from pip._internal.network.session import PipSession
38
+ from pip._internal.network.utils import raise_for_status
39
+ from pip._internal.utils.filetypes import is_archive_file
40
+ from pip._internal.utils.misc import redact_auth_from_url
41
+ from pip._internal.vcs import vcs
42
+
43
+ from .sources import CandidatesFromPage, LinkSource, build_source
44
+
45
+ if TYPE_CHECKING:
46
+ from typing import Protocol
47
+ else:
48
+ Protocol = object
49
+
50
+ logger = logging.getLogger(__name__)
51
+
52
+ ResponseHeaders = MutableMapping[str, str]
53
+
54
+
55
+ def _match_vcs_scheme(url: str) -> Optional[str]:
56
+ """Look for VCS schemes in the URL.
57
+
58
+ Returns the matched VCS scheme, or None if there's no match.
59
+ """
60
+ for scheme in vcs.schemes:
61
+ if url.lower().startswith(scheme) and url[len(scheme)] in "+:":
62
+ return scheme
63
+ return None
64
+
65
+
66
+ class _NotAPIContent(Exception):
67
+ def __init__(self, content_type: str, request_desc: str) -> None:
68
+ super().__init__(content_type, request_desc)
69
+ self.content_type = content_type
70
+ self.request_desc = request_desc
71
+
72
+
73
+ def _ensure_api_header(response: Response) -> None:
74
+ """
75
+ Check the Content-Type header to ensure the response contains a Simple
76
+ API Response.
77
+
78
+ Raises `_NotAPIContent` if the content type is not a valid content-type.
79
+ """
80
+ content_type = response.headers.get("Content-Type", "Unknown")
81
+
82
+ content_type_l = content_type.lower()
83
+ if content_type_l.startswith(
84
+ (
85
+ "text/html",
86
+ "application/vnd.pypi.simple.v1+html",
87
+ "application/vnd.pypi.simple.v1+json",
88
+ )
89
+ ):
90
+ return
91
+
92
+ raise _NotAPIContent(content_type, response.request.method)
93
+
94
+
95
+ class _NotHTTP(Exception):
96
+ pass
97
+
98
+
99
+ def _ensure_api_response(url: str, session: PipSession) -> None:
100
+ """
101
+ Send a HEAD request to the URL, and ensure the response contains a simple
102
+ API Response.
103
+
104
+ Raises `_NotHTTP` if the URL is not available for a HEAD request, or
105
+ `_NotAPIContent` if the content type is not a valid content type.
106
+ """
107
+ scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url)
108
+ if scheme not in {"http", "https"}:
109
+ raise _NotHTTP()
110
+
111
+ resp = session.head(url, allow_redirects=True)
112
+ raise_for_status(resp)
113
+
114
+ _ensure_api_header(resp)
115
+
116
+
117
+ def _get_simple_response(url: str, session: PipSession) -> Response:
118
+ """Access an Simple API response with GET, and return the response.
119
+
120
+ This consists of three parts:
121
+
122
+ 1. If the URL looks suspiciously like an archive, send a HEAD first to
123
+ check the Content-Type is HTML or Simple API, to avoid downloading a
124
+ large file. Raise `_NotHTTP` if the content type cannot be determined, or
125
+ `_NotAPIContent` if it is not HTML or a Simple API.
126
+ 2. Actually perform the request. Raise HTTP exceptions on network failures.
127
+ 3. Check the Content-Type header to make sure we got a Simple API response,
128
+ and raise `_NotAPIContent` otherwise.
129
+ """
130
+ if is_archive_file(Link(url).filename):
131
+ _ensure_api_response(url, session=session)
132
+
133
+ logger.debug("Getting page %s", redact_auth_from_url(url))
134
+
135
+ resp = session.get(
136
+ url,
137
+ headers={
138
+ "Accept": ", ".join(
139
+ [
140
+ "application/vnd.pypi.simple.v1+json",
141
+ "application/vnd.pypi.simple.v1+html; q=0.1",
142
+ "text/html; q=0.01",
143
+ ]
144
+ ),
145
+ # We don't want to blindly returned cached data for
146
+ # /simple/, because authors generally expecting that
147
+ # twine upload && pip install will function, but if
148
+ # they've done a pip install in the last ~10 minutes
149
+ # it won't. Thus by setting this to zero we will not
150
+ # blindly use any cached data, however the benefit of
151
+ # using max-age=0 instead of no-cache, is that we will
152
+ # still support conditional requests, so we will still
153
+ # minimize traffic sent in cases where the page hasn't
154
+ # changed at all, we will just always incur the round
155
+ # trip for the conditional GET now instead of only
156
+ # once per 10 minutes.
157
+ # For more information, please see pypa/pip#5670.
158
+ "Cache-Control": "max-age=0",
159
+ },
160
+ )
161
+ raise_for_status(resp)
162
+
163
+ # The check for archives above only works if the url ends with
164
+ # something that looks like an archive. However that is not a
165
+ # requirement of an url. Unless we issue a HEAD request on every
166
+ # url we cannot know ahead of time for sure if something is a
167
+ # Simple API response or not. However we can check after we've
168
+ # downloaded it.
169
+ _ensure_api_header(resp)
170
+
171
+ logger.debug(
172
+ "Fetched page %s as %s",
173
+ redact_auth_from_url(url),
174
+ resp.headers.get("Content-Type", "Unknown"),
175
+ )
176
+
177
+ return resp
178
+
179
+
180
+ def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]:
181
+ """Determine if we have any encoding information in our headers."""
182
+ if headers and "Content-Type" in headers:
183
+ m = email.message.Message()
184
+ m["content-type"] = headers["Content-Type"]
185
+ charset = m.get_param("charset")
186
+ if charset:
187
+ return str(charset)
188
+ return None
189
+
190
+
191
+ class CacheablePageContent:
192
+ def __init__(self, page: "IndexContent") -> None:
193
+ assert page.cache_link_parsing
194
+ self.page = page
195
+
196
+ def __eq__(self, other: object) -> bool:
197
+ return isinstance(other, type(self)) and self.page.url == other.page.url
198
+
199
+ def __hash__(self) -> int:
200
+ return hash(self.page.url)
201
+
202
+
203
+ class ParseLinks(Protocol):
204
+ def __call__(self, page: "IndexContent") -> Iterable[Link]:
205
+ ...
206
+
207
+
208
+ def with_cached_index_content(fn: ParseLinks) -> ParseLinks:
209
+ """
210
+ Given a function that parses an Iterable[Link] from an IndexContent, cache the
211
+ function's result (keyed by CacheablePageContent), unless the IndexContent
212
+ `page` has `page.cache_link_parsing == False`.
213
+ """
214
+
215
+ @functools.lru_cache(maxsize=None)
216
+ def wrapper(cacheable_page: CacheablePageContent) -> List[Link]:
217
+ return list(fn(cacheable_page.page))
218
+
219
+ @functools.wraps(fn)
220
+ def wrapper_wrapper(page: "IndexContent") -> List[Link]:
221
+ if page.cache_link_parsing:
222
+ return wrapper(CacheablePageContent(page))
223
+ return list(fn(page))
224
+
225
+ return wrapper_wrapper
226
+
227
+
228
+ @with_cached_index_content
229
+ def parse_links(page: "IndexContent") -> Iterable[Link]:
230
+ """
231
+ Parse a Simple API's Index Content, and yield its anchor elements as Link objects.
232
+ """
233
+
234
+ content_type_l = page.content_type.lower()
235
+ if content_type_l.startswith("application/vnd.pypi.simple.v1+json"):
236
+ data = json.loads(page.content)
237
+ for file in data.get("files", []):
238
+ link = Link.from_json(file, page.url)
239
+ if link is None:
240
+ continue
241
+ yield link
242
+ return
243
+
244
+ parser = HTMLLinkParser(page.url)
245
+ encoding = page.encoding or "utf-8"
246
+ parser.feed(page.content.decode(encoding))
247
+
248
+ url = page.url
249
+ base_url = parser.base_url or url
250
+ for anchor in parser.anchors:
251
+ link = Link.from_element(anchor, page_url=url, base_url=base_url)
252
+ if link is None:
253
+ continue
254
+ yield link
255
+
256
+
257
+ class IndexContent:
258
+ """Represents one response (or page), along with its URL"""
259
+
260
+ def __init__(
261
+ self,
262
+ content: bytes,
263
+ content_type: str,
264
+ encoding: Optional[str],
265
+ url: str,
266
+ cache_link_parsing: bool = True,
267
+ ) -> None:
268
+ """
269
+ :param encoding: the encoding to decode the given content.
270
+ :param url: the URL from which the HTML was downloaded.
271
+ :param cache_link_parsing: whether links parsed from this page's url
272
+ should be cached. PyPI index urls should
273
+ have this set to False, for example.
274
+ """
275
+ self.content = content
276
+ self.content_type = content_type
277
+ self.encoding = encoding
278
+ self.url = url
279
+ self.cache_link_parsing = cache_link_parsing
280
+
281
+ def __str__(self) -> str:
282
+ return redact_auth_from_url(self.url)
283
+
284
+
285
+ class HTMLLinkParser(HTMLParser):
286
+ """
287
+ HTMLParser that keeps the first base HREF and a list of all anchor
288
+ elements' attributes.
289
+ """
290
+
291
+ def __init__(self, url: str) -> None:
292
+ super().__init__(convert_charrefs=True)
293
+
294
+ self.url: str = url
295
+ self.base_url: Optional[str] = None
296
+ self.anchors: List[Dict[str, Optional[str]]] = []
297
+
298
+ def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None:
299
+ if tag == "base" and self.base_url is None:
300
+ href = self.get_href(attrs)
301
+ if href is not None:
302
+ self.base_url = href
303
+ elif tag == "a":
304
+ self.anchors.append(dict(attrs))
305
+
306
+ def get_href(self, attrs: List[Tuple[str, Optional[str]]]) -> Optional[str]:
307
+ for name, value in attrs:
308
+ if name == "href":
309
+ return value
310
+ return None
311
+
312
+
313
+ def _handle_get_simple_fail(
314
+ link: Link,
315
+ reason: Union[str, Exception],
316
+ meth: Optional[Callable[..., None]] = None,
317
+ ) -> None:
318
+ if meth is None:
319
+ meth = logger.debug
320
+ meth("Could not fetch URL %s: %s - skipping", link, reason)
321
+
322
+
323
+ def _make_index_content(
324
+ response: Response, cache_link_parsing: bool = True
325
+ ) -> IndexContent:
326
+ encoding = _get_encoding_from_headers(response.headers)
327
+ return IndexContent(
328
+ response.content,
329
+ response.headers["Content-Type"],
330
+ encoding=encoding,
331
+ url=response.url,
332
+ cache_link_parsing=cache_link_parsing,
333
+ )
334
+
335
+
336
+ def _get_index_content(link: Link, *, session: PipSession) -> Optional["IndexContent"]:
337
+ url = link.url.split("#", 1)[0]
338
+
339
+ # Check for VCS schemes that do not support lookup as web pages.
340
+ vcs_scheme = _match_vcs_scheme(url)
341
+ if vcs_scheme:
342
+ logger.warning(
343
+ "Cannot look at %s URL %s because it does not support lookup as web pages.",
344
+ vcs_scheme,
345
+ link,
346
+ )
347
+ return None
348
+
349
+ # Tack index.html onto file:// URLs that point to directories
350
+ scheme, _, path, _, _, _ = urllib.parse.urlparse(url)
351
+ if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)):
352
+ # add trailing slash if not present so urljoin doesn't trim
353
+ # final segment
354
+ if not url.endswith("/"):
355
+ url += "/"
356
+ # TODO: In the future, it would be nice if pip supported PEP 691
357
+ # style responses in the file:// URLs, however there's no
358
+ # standard file extension for application/vnd.pypi.simple.v1+json
359
+ # so we'll need to come up with something on our own.
360
+ url = urllib.parse.urljoin(url, "index.html")
361
+ logger.debug(" file: URL is directory, getting %s", url)
362
+
363
+ try:
364
+ resp = _get_simple_response(url, session=session)
365
+ except _NotHTTP:
366
+ logger.warning(
367
+ "Skipping page %s because it looks like an archive, and cannot "
368
+ "be checked by a HTTP HEAD request.",
369
+ link,
370
+ )
371
+ except _NotAPIContent as exc:
372
+ logger.warning(
373
+ "Skipping page %s because the %s request got Content-Type: %s. "
374
+ "The only supported Content-Types are application/vnd.pypi.simple.v1+json, "
375
+ "application/vnd.pypi.simple.v1+html, and text/html",
376
+ link,
377
+ exc.request_desc,
378
+ exc.content_type,
379
+ )
380
+ except NetworkConnectionError as exc:
381
+ _handle_get_simple_fail(link, exc)
382
+ except RetryError as exc:
383
+ _handle_get_simple_fail(link, exc)
384
+ except SSLError as exc:
385
+ reason = "There was a problem confirming the ssl certificate: "
386
+ reason += str(exc)
387
+ _handle_get_simple_fail(link, reason, meth=logger.info)
388
+ except requests.ConnectionError as exc:
389
+ _handle_get_simple_fail(link, f"connection error: {exc}")
390
+ except requests.Timeout:
391
+ _handle_get_simple_fail(link, "timed out")
392
+ else:
393
+ return _make_index_content(resp, cache_link_parsing=link.cache_link_parsing)
394
+ return None
395
+
396
+
397
+ class CollectedSources(NamedTuple):
398
+ find_links: Sequence[Optional[LinkSource]]
399
+ index_urls: Sequence[Optional[LinkSource]]
400
+
401
+
402
+ class LinkCollector:
403
+
404
+ """
405
+ Responsible for collecting Link objects from all configured locations,
406
+ making network requests as needed.
407
+
408
+ The class's main method is its collect_sources() method.
409
+ """
410
+
411
+ def __init__(
412
+ self,
413
+ session: PipSession,
414
+ search_scope: SearchScope,
415
+ ) -> None:
416
+ self.search_scope = search_scope
417
+ self.session = session
418
+
419
+ @classmethod
420
+ def create(
421
+ cls,
422
+ session: PipSession,
423
+ options: Values,
424
+ suppress_no_index: bool = False,
425
+ ) -> "LinkCollector":
426
+ """
427
+ :param session: The Session to use to make requests.
428
+ :param suppress_no_index: Whether to ignore the --no-index option
429
+ when constructing the SearchScope object.
430
+ """
431
+ index_urls = [options.index_url] + options.extra_index_urls
432
+ if options.no_index and not suppress_no_index:
433
+ logger.debug(
434
+ "Ignoring indexes: %s",
435
+ ",".join(redact_auth_from_url(url) for url in index_urls),
436
+ )
437
+ index_urls = []
438
+
439
+ # Make sure find_links is a list before passing to create().
440
+ find_links = options.find_links or []
441
+
442
+ search_scope = SearchScope.create(
443
+ find_links=find_links,
444
+ index_urls=index_urls,
445
+ no_index=options.no_index,
446
+ )
447
+ link_collector = LinkCollector(
448
+ session=session,
449
+ search_scope=search_scope,
450
+ )
451
+ return link_collector
452
+
453
+ @property
454
+ def find_links(self) -> List[str]:
455
+ return self.search_scope.find_links
456
+
457
+ def fetch_response(self, location: Link) -> Optional[IndexContent]:
458
+ """
459
+ Fetch an HTML page containing package links.
460
+ """
461
+ return _get_index_content(location, session=self.session)
462
+
463
+ def collect_sources(
464
+ self,
465
+ project_name: str,
466
+ candidates_from_page: CandidatesFromPage,
467
+ ) -> CollectedSources:
468
+ # The OrderedDict calls deduplicate sources by URL.
469
+ index_url_sources = collections.OrderedDict(
470
+ build_source(
471
+ loc,
472
+ candidates_from_page=candidates_from_page,
473
+ page_validator=self.session.is_secure_origin,
474
+ expand_dir=False,
475
+ cache_link_parsing=False,
476
+ project_name=project_name,
477
+ )
478
+ for loc in self.search_scope.get_index_urls_locations(project_name)
479
+ ).values()
480
+ find_links_sources = collections.OrderedDict(
481
+ build_source(
482
+ loc,
483
+ candidates_from_page=candidates_from_page,
484
+ page_validator=self.session.is_secure_origin,
485
+ expand_dir=True,
486
+ cache_link_parsing=True,
487
+ project_name=project_name,
488
+ )
489
+ for loc in self.find_links
490
+ ).values()
491
+
492
+ if logger.isEnabledFor(logging.DEBUG):
493
+ lines = [
494
+ f"* {s.link}"
495
+ for s in itertools.chain(find_links_sources, index_url_sources)
496
+ if s is not None and s.link is not None
497
+ ]
498
+ lines = [
499
+ f"{len(lines)} location(s) to search "
500
+ f"for versions of {project_name}:"
501
+ ] + lines
502
+ logger.debug("\n".join(lines))
503
+
504
+ return CollectedSources(
505
+ find_links=list(find_links_sources),
506
+ index_urls=list(index_url_sources),
507
+ )
.venv/lib/python3.11/site-packages/pip/_internal/index/package_finder.py ADDED
@@ -0,0 +1,1027 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Routines related to PyPI, indexes"""
2
+
3
+ import enum
4
+ import functools
5
+ import itertools
6
+ import logging
7
+ import re
8
+ from typing import TYPE_CHECKING, FrozenSet, Iterable, List, Optional, Set, Tuple, Union
9
+
10
+ from pip._vendor.packaging import specifiers
11
+ from pip._vendor.packaging.tags import Tag
12
+ from pip._vendor.packaging.utils import canonicalize_name
13
+ from pip._vendor.packaging.version import _BaseVersion
14
+ from pip._vendor.packaging.version import parse as parse_version
15
+
16
+ from pip._internal.exceptions import (
17
+ BestVersionAlreadyInstalled,
18
+ DistributionNotFound,
19
+ InvalidWheelFilename,
20
+ UnsupportedWheel,
21
+ )
22
+ from pip._internal.index.collector import LinkCollector, parse_links
23
+ from pip._internal.models.candidate import InstallationCandidate
24
+ from pip._internal.models.format_control import FormatControl
25
+ from pip._internal.models.link import Link
26
+ from pip._internal.models.search_scope import SearchScope
27
+ from pip._internal.models.selection_prefs import SelectionPreferences
28
+ from pip._internal.models.target_python import TargetPython
29
+ from pip._internal.models.wheel import Wheel
30
+ from pip._internal.req import InstallRequirement
31
+ from pip._internal.utils._log import getLogger
32
+ from pip._internal.utils.filetypes import WHEEL_EXTENSION
33
+ from pip._internal.utils.hashes import Hashes
34
+ from pip._internal.utils.logging import indent_log
35
+ from pip._internal.utils.misc import build_netloc
36
+ from pip._internal.utils.packaging import check_requires_python
37
+ from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS
38
+
39
+ if TYPE_CHECKING:
40
+ from pip._vendor.typing_extensions import TypeGuard
41
+
42
+ __all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"]
43
+
44
+
45
+ logger = getLogger(__name__)
46
+
47
+ BuildTag = Union[Tuple[()], Tuple[int, str]]
48
+ CandidateSortingKey = Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag]
49
+
50
+
51
+ def _check_link_requires_python(
52
+ link: Link,
53
+ version_info: Tuple[int, int, int],
54
+ ignore_requires_python: bool = False,
55
+ ) -> bool:
56
+ """
57
+ Return whether the given Python version is compatible with a link's
58
+ "Requires-Python" value.
59
+
60
+ :param version_info: A 3-tuple of ints representing the Python
61
+ major-minor-micro version to check.
62
+ :param ignore_requires_python: Whether to ignore the "Requires-Python"
63
+ value if the given Python version isn't compatible.
64
+ """
65
+ try:
66
+ is_compatible = check_requires_python(
67
+ link.requires_python,
68
+ version_info=version_info,
69
+ )
70
+ except specifiers.InvalidSpecifier:
71
+ logger.debug(
72
+ "Ignoring invalid Requires-Python (%r) for link: %s",
73
+ link.requires_python,
74
+ link,
75
+ )
76
+ else:
77
+ if not is_compatible:
78
+ version = ".".join(map(str, version_info))
79
+ if not ignore_requires_python:
80
+ logger.verbose(
81
+ "Link requires a different Python (%s not in: %r): %s",
82
+ version,
83
+ link.requires_python,
84
+ link,
85
+ )
86
+ return False
87
+
88
+ logger.debug(
89
+ "Ignoring failed Requires-Python check (%s not in: %r) for link: %s",
90
+ version,
91
+ link.requires_python,
92
+ link,
93
+ )
94
+
95
+ return True
96
+
97
+
98
+ class LinkType(enum.Enum):
99
+ candidate = enum.auto()
100
+ different_project = enum.auto()
101
+ yanked = enum.auto()
102
+ format_unsupported = enum.auto()
103
+ format_invalid = enum.auto()
104
+ platform_mismatch = enum.auto()
105
+ requires_python_mismatch = enum.auto()
106
+
107
+
108
+ class LinkEvaluator:
109
+
110
+ """
111
+ Responsible for evaluating links for a particular project.
112
+ """
113
+
114
+ _py_version_re = re.compile(r"-py([123]\.?[0-9]?)$")
115
+
116
+ # Don't include an allow_yanked default value to make sure each call
117
+ # site considers whether yanked releases are allowed. This also causes
118
+ # that decision to be made explicit in the calling code, which helps
119
+ # people when reading the code.
120
+ def __init__(
121
+ self,
122
+ project_name: str,
123
+ canonical_name: str,
124
+ formats: FrozenSet[str],
125
+ target_python: TargetPython,
126
+ allow_yanked: bool,
127
+ ignore_requires_python: Optional[bool] = None,
128
+ ) -> None:
129
+ """
130
+ :param project_name: The user supplied package name.
131
+ :param canonical_name: The canonical package name.
132
+ :param formats: The formats allowed for this package. Should be a set
133
+ with 'binary' or 'source' or both in it.
134
+ :param target_python: The target Python interpreter to use when
135
+ evaluating link compatibility. This is used, for example, to
136
+ check wheel compatibility, as well as when checking the Python
137
+ version, e.g. the Python version embedded in a link filename
138
+ (or egg fragment) and against an HTML link's optional PEP 503
139
+ "data-requires-python" attribute.
140
+ :param allow_yanked: Whether files marked as yanked (in the sense
141
+ of PEP 592) are permitted to be candidates for install.
142
+ :param ignore_requires_python: Whether to ignore incompatible
143
+ PEP 503 "data-requires-python" values in HTML links. Defaults
144
+ to False.
145
+ """
146
+ if ignore_requires_python is None:
147
+ ignore_requires_python = False
148
+
149
+ self._allow_yanked = allow_yanked
150
+ self._canonical_name = canonical_name
151
+ self._ignore_requires_python = ignore_requires_python
152
+ self._formats = formats
153
+ self._target_python = target_python
154
+
155
+ self.project_name = project_name
156
+
157
+ def evaluate_link(self, link: Link) -> Tuple[LinkType, str]:
158
+ """
159
+ Determine whether a link is a candidate for installation.
160
+
161
+ :return: A tuple (result, detail), where *result* is an enum
162
+ representing whether the evaluation found a candidate, or the reason
163
+ why one is not found. If a candidate is found, *detail* will be the
164
+ candidate's version string; if one is not found, it contains the
165
+ reason the link fails to qualify.
166
+ """
167
+ version = None
168
+ if link.is_yanked and not self._allow_yanked:
169
+ reason = link.yanked_reason or "<none given>"
170
+ return (LinkType.yanked, f"yanked for reason: {reason}")
171
+
172
+ if link.egg_fragment:
173
+ egg_info = link.egg_fragment
174
+ ext = link.ext
175
+ else:
176
+ egg_info, ext = link.splitext()
177
+ if not ext:
178
+ return (LinkType.format_unsupported, "not a file")
179
+ if ext not in SUPPORTED_EXTENSIONS:
180
+ return (
181
+ LinkType.format_unsupported,
182
+ f"unsupported archive format: {ext}",
183
+ )
184
+ if "binary" not in self._formats and ext == WHEEL_EXTENSION:
185
+ reason = f"No binaries permitted for {self.project_name}"
186
+ return (LinkType.format_unsupported, reason)
187
+ if "macosx10" in link.path and ext == ".zip":
188
+ return (LinkType.format_unsupported, "macosx10 one")
189
+ if ext == WHEEL_EXTENSION:
190
+ try:
191
+ wheel = Wheel(link.filename)
192
+ except InvalidWheelFilename:
193
+ return (
194
+ LinkType.format_invalid,
195
+ "invalid wheel filename",
196
+ )
197
+ if canonicalize_name(wheel.name) != self._canonical_name:
198
+ reason = f"wrong project name (not {self.project_name})"
199
+ return (LinkType.different_project, reason)
200
+
201
+ supported_tags = self._target_python.get_unsorted_tags()
202
+ if not wheel.supported(supported_tags):
203
+ # Include the wheel's tags in the reason string to
204
+ # simplify troubleshooting compatibility issues.
205
+ file_tags = ", ".join(wheel.get_formatted_file_tags())
206
+ reason = (
207
+ f"none of the wheel's tags ({file_tags}) are compatible "
208
+ f"(run pip debug --verbose to show compatible tags)"
209
+ )
210
+ return (LinkType.platform_mismatch, reason)
211
+
212
+ version = wheel.version
213
+
214
+ # This should be up by the self.ok_binary check, but see issue 2700.
215
+ if "source" not in self._formats and ext != WHEEL_EXTENSION:
216
+ reason = f"No sources permitted for {self.project_name}"
217
+ return (LinkType.format_unsupported, reason)
218
+
219
+ if not version:
220
+ version = _extract_version_from_fragment(
221
+ egg_info,
222
+ self._canonical_name,
223
+ )
224
+ if not version:
225
+ reason = f"Missing project version for {self.project_name}"
226
+ return (LinkType.format_invalid, reason)
227
+
228
+ match = self._py_version_re.search(version)
229
+ if match:
230
+ version = version[: match.start()]
231
+ py_version = match.group(1)
232
+ if py_version != self._target_python.py_version:
233
+ return (
234
+ LinkType.platform_mismatch,
235
+ "Python version is incorrect",
236
+ )
237
+
238
+ supports_python = _check_link_requires_python(
239
+ link,
240
+ version_info=self._target_python.py_version_info,
241
+ ignore_requires_python=self._ignore_requires_python,
242
+ )
243
+ if not supports_python:
244
+ reason = f"{version} Requires-Python {link.requires_python}"
245
+ return (LinkType.requires_python_mismatch, reason)
246
+
247
+ logger.debug("Found link %s, version: %s", link, version)
248
+
249
+ return (LinkType.candidate, version)
250
+
251
+
252
+ def filter_unallowed_hashes(
253
+ candidates: List[InstallationCandidate],
254
+ hashes: Optional[Hashes],
255
+ project_name: str,
256
+ ) -> List[InstallationCandidate]:
257
+ """
258
+ Filter out candidates whose hashes aren't allowed, and return a new
259
+ list of candidates.
260
+
261
+ If at least one candidate has an allowed hash, then all candidates with
262
+ either an allowed hash or no hash specified are returned. Otherwise,
263
+ the given candidates are returned.
264
+
265
+ Including the candidates with no hash specified when there is a match
266
+ allows a warning to be logged if there is a more preferred candidate
267
+ with no hash specified. Returning all candidates in the case of no
268
+ matches lets pip report the hash of the candidate that would otherwise
269
+ have been installed (e.g. permitting the user to more easily update
270
+ their requirements file with the desired hash).
271
+ """
272
+ if not hashes:
273
+ logger.debug(
274
+ "Given no hashes to check %s links for project %r: "
275
+ "discarding no candidates",
276
+ len(candidates),
277
+ project_name,
278
+ )
279
+ # Make sure we're not returning back the given value.
280
+ return list(candidates)
281
+
282
+ matches_or_no_digest = []
283
+ # Collect the non-matches for logging purposes.
284
+ non_matches = []
285
+ match_count = 0
286
+ for candidate in candidates:
287
+ link = candidate.link
288
+ if not link.has_hash:
289
+ pass
290
+ elif link.is_hash_allowed(hashes=hashes):
291
+ match_count += 1
292
+ else:
293
+ non_matches.append(candidate)
294
+ continue
295
+
296
+ matches_or_no_digest.append(candidate)
297
+
298
+ if match_count:
299
+ filtered = matches_or_no_digest
300
+ else:
301
+ # Make sure we're not returning back the given value.
302
+ filtered = list(candidates)
303
+
304
+ if len(filtered) == len(candidates):
305
+ discard_message = "discarding no candidates"
306
+ else:
307
+ discard_message = "discarding {} non-matches:\n {}".format(
308
+ len(non_matches),
309
+ "\n ".join(str(candidate.link) for candidate in non_matches),
310
+ )
311
+
312
+ logger.debug(
313
+ "Checked %s links for project %r against %s hashes "
314
+ "(%s matches, %s no digest): %s",
315
+ len(candidates),
316
+ project_name,
317
+ hashes.digest_count,
318
+ match_count,
319
+ len(matches_or_no_digest) - match_count,
320
+ discard_message,
321
+ )
322
+
323
+ return filtered
324
+
325
+
326
+ class CandidatePreferences:
327
+
328
+ """
329
+ Encapsulates some of the preferences for filtering and sorting
330
+ InstallationCandidate objects.
331
+ """
332
+
333
+ def __init__(
334
+ self,
335
+ prefer_binary: bool = False,
336
+ allow_all_prereleases: bool = False,
337
+ ) -> None:
338
+ """
339
+ :param allow_all_prereleases: Whether to allow all pre-releases.
340
+ """
341
+ self.allow_all_prereleases = allow_all_prereleases
342
+ self.prefer_binary = prefer_binary
343
+
344
+
345
+ class BestCandidateResult:
346
+ """A collection of candidates, returned by `PackageFinder.find_best_candidate`.
347
+
348
+ This class is only intended to be instantiated by CandidateEvaluator's
349
+ `compute_best_candidate()` method.
350
+ """
351
+
352
+ def __init__(
353
+ self,
354
+ candidates: List[InstallationCandidate],
355
+ applicable_candidates: List[InstallationCandidate],
356
+ best_candidate: Optional[InstallationCandidate],
357
+ ) -> None:
358
+ """
359
+ :param candidates: A sequence of all available candidates found.
360
+ :param applicable_candidates: The applicable candidates.
361
+ :param best_candidate: The most preferred candidate found, or None
362
+ if no applicable candidates were found.
363
+ """
364
+ assert set(applicable_candidates) <= set(candidates)
365
+
366
+ if best_candidate is None:
367
+ assert not applicable_candidates
368
+ else:
369
+ assert best_candidate in applicable_candidates
370
+
371
+ self._applicable_candidates = applicable_candidates
372
+ self._candidates = candidates
373
+
374
+ self.best_candidate = best_candidate
375
+
376
+ def iter_all(self) -> Iterable[InstallationCandidate]:
377
+ """Iterate through all candidates."""
378
+ return iter(self._candidates)
379
+
380
+ def iter_applicable(self) -> Iterable[InstallationCandidate]:
381
+ """Iterate through the applicable candidates."""
382
+ return iter(self._applicable_candidates)
383
+
384
+
385
+ class CandidateEvaluator:
386
+
387
+ """
388
+ Responsible for filtering and sorting candidates for installation based
389
+ on what tags are valid.
390
+ """
391
+
392
+ @classmethod
393
+ def create(
394
+ cls,
395
+ project_name: str,
396
+ target_python: Optional[TargetPython] = None,
397
+ prefer_binary: bool = False,
398
+ allow_all_prereleases: bool = False,
399
+ specifier: Optional[specifiers.BaseSpecifier] = None,
400
+ hashes: Optional[Hashes] = None,
401
+ ) -> "CandidateEvaluator":
402
+ """Create a CandidateEvaluator object.
403
+
404
+ :param target_python: The target Python interpreter to use when
405
+ checking compatibility. If None (the default), a TargetPython
406
+ object will be constructed from the running Python.
407
+ :param specifier: An optional object implementing `filter`
408
+ (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
409
+ versions.
410
+ :param hashes: An optional collection of allowed hashes.
411
+ """
412
+ if target_python is None:
413
+ target_python = TargetPython()
414
+ if specifier is None:
415
+ specifier = specifiers.SpecifierSet()
416
+
417
+ supported_tags = target_python.get_sorted_tags()
418
+
419
+ return cls(
420
+ project_name=project_name,
421
+ supported_tags=supported_tags,
422
+ specifier=specifier,
423
+ prefer_binary=prefer_binary,
424
+ allow_all_prereleases=allow_all_prereleases,
425
+ hashes=hashes,
426
+ )
427
+
428
+ def __init__(
429
+ self,
430
+ project_name: str,
431
+ supported_tags: List[Tag],
432
+ specifier: specifiers.BaseSpecifier,
433
+ prefer_binary: bool = False,
434
+ allow_all_prereleases: bool = False,
435
+ hashes: Optional[Hashes] = None,
436
+ ) -> None:
437
+ """
438
+ :param supported_tags: The PEP 425 tags supported by the target
439
+ Python in order of preference (most preferred first).
440
+ """
441
+ self._allow_all_prereleases = allow_all_prereleases
442
+ self._hashes = hashes
443
+ self._prefer_binary = prefer_binary
444
+ self._project_name = project_name
445
+ self._specifier = specifier
446
+ self._supported_tags = supported_tags
447
+ # Since the index of the tag in the _supported_tags list is used
448
+ # as a priority, precompute a map from tag to index/priority to be
449
+ # used in wheel.find_most_preferred_tag.
450
+ self._wheel_tag_preferences = {
451
+ tag: idx for idx, tag in enumerate(supported_tags)
452
+ }
453
+
454
+ def get_applicable_candidates(
455
+ self,
456
+ candidates: List[InstallationCandidate],
457
+ ) -> List[InstallationCandidate]:
458
+ """
459
+ Return the applicable candidates from a list of candidates.
460
+ """
461
+ # Using None infers from the specifier instead.
462
+ allow_prereleases = self._allow_all_prereleases or None
463
+ specifier = self._specifier
464
+ versions = {
465
+ str(v)
466
+ for v in specifier.filter(
467
+ # We turn the version object into a str here because otherwise
468
+ # when we're debundled but setuptools isn't, Python will see
469
+ # packaging.version.Version and
470
+ # pkg_resources._vendor.packaging.version.Version as different
471
+ # types. This way we'll use a str as a common data interchange
472
+ # format. If we stop using the pkg_resources provided specifier
473
+ # and start using our own, we can drop the cast to str().
474
+ (str(c.version) for c in candidates),
475
+ prereleases=allow_prereleases,
476
+ )
477
+ }
478
+
479
+ # Again, converting version to str to deal with debundling.
480
+ applicable_candidates = [c for c in candidates if str(c.version) in versions]
481
+
482
+ filtered_applicable_candidates = filter_unallowed_hashes(
483
+ candidates=applicable_candidates,
484
+ hashes=self._hashes,
485
+ project_name=self._project_name,
486
+ )
487
+
488
+ return sorted(filtered_applicable_candidates, key=self._sort_key)
489
+
490
+ def _sort_key(self, candidate: InstallationCandidate) -> CandidateSortingKey:
491
+ """
492
+ Function to pass as the `key` argument to a call to sorted() to sort
493
+ InstallationCandidates by preference.
494
+
495
+ Returns a tuple such that tuples sorting as greater using Python's
496
+ default comparison operator are more preferred.
497
+
498
+ The preference is as follows:
499
+
500
+ First and foremost, candidates with allowed (matching) hashes are
501
+ always preferred over candidates without matching hashes. This is
502
+ because e.g. if the only candidate with an allowed hash is yanked,
503
+ we still want to use that candidate.
504
+
505
+ Second, excepting hash considerations, candidates that have been
506
+ yanked (in the sense of PEP 592) are always less preferred than
507
+ candidates that haven't been yanked. Then:
508
+
509
+ If not finding wheels, they are sorted by version only.
510
+ If finding wheels, then the sort order is by version, then:
511
+ 1. existing installs
512
+ 2. wheels ordered via Wheel.support_index_min(self._supported_tags)
513
+ 3. source archives
514
+ If prefer_binary was set, then all wheels are sorted above sources.
515
+
516
+ Note: it was considered to embed this logic into the Link
517
+ comparison operators, but then different sdist links
518
+ with the same version, would have to be considered equal
519
+ """
520
+ valid_tags = self._supported_tags
521
+ support_num = len(valid_tags)
522
+ build_tag: BuildTag = ()
523
+ binary_preference = 0
524
+ link = candidate.link
525
+ if link.is_wheel:
526
+ # can raise InvalidWheelFilename
527
+ wheel = Wheel(link.filename)
528
+ try:
529
+ pri = -(
530
+ wheel.find_most_preferred_tag(
531
+ valid_tags, self._wheel_tag_preferences
532
+ )
533
+ )
534
+ except ValueError:
535
+ raise UnsupportedWheel(
536
+ f"{wheel.filename} is not a supported wheel for this platform. It "
537
+ "can't be sorted."
538
+ )
539
+ if self._prefer_binary:
540
+ binary_preference = 1
541
+ if wheel.build_tag is not None:
542
+ match = re.match(r"^(\d+)(.*)$", wheel.build_tag)
543
+ assert match is not None, "guaranteed by filename validation"
544
+ build_tag_groups = match.groups()
545
+ build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
546
+ else: # sdist
547
+ pri = -(support_num)
548
+ has_allowed_hash = int(link.is_hash_allowed(self._hashes))
549
+ yank_value = -1 * int(link.is_yanked) # -1 for yanked.
550
+ return (
551
+ has_allowed_hash,
552
+ yank_value,
553
+ binary_preference,
554
+ candidate.version,
555
+ pri,
556
+ build_tag,
557
+ )
558
+
559
+ def sort_best_candidate(
560
+ self,
561
+ candidates: List[InstallationCandidate],
562
+ ) -> Optional[InstallationCandidate]:
563
+ """
564
+ Return the best candidate per the instance's sort order, or None if
565
+ no candidate is acceptable.
566
+ """
567
+ if not candidates:
568
+ return None
569
+ best_candidate = max(candidates, key=self._sort_key)
570
+ return best_candidate
571
+
572
+ def compute_best_candidate(
573
+ self,
574
+ candidates: List[InstallationCandidate],
575
+ ) -> BestCandidateResult:
576
+ """
577
+ Compute and return a `BestCandidateResult` instance.
578
+ """
579
+ applicable_candidates = self.get_applicable_candidates(candidates)
580
+
581
+ best_candidate = self.sort_best_candidate(applicable_candidates)
582
+
583
+ return BestCandidateResult(
584
+ candidates,
585
+ applicable_candidates=applicable_candidates,
586
+ best_candidate=best_candidate,
587
+ )
588
+
589
+
590
+ class PackageFinder:
591
+ """This finds packages.
592
+
593
+ This is meant to match easy_install's technique for looking for
594
+ packages, by reading pages and looking for appropriate links.
595
+ """
596
+
597
+ def __init__(
598
+ self,
599
+ link_collector: LinkCollector,
600
+ target_python: TargetPython,
601
+ allow_yanked: bool,
602
+ format_control: Optional[FormatControl] = None,
603
+ candidate_prefs: Optional[CandidatePreferences] = None,
604
+ ignore_requires_python: Optional[bool] = None,
605
+ ) -> None:
606
+ """
607
+ This constructor is primarily meant to be used by the create() class
608
+ method and from tests.
609
+
610
+ :param format_control: A FormatControl object, used to control
611
+ the selection of source packages / binary packages when consulting
612
+ the index and links.
613
+ :param candidate_prefs: Options to use when creating a
614
+ CandidateEvaluator object.
615
+ """
616
+ if candidate_prefs is None:
617
+ candidate_prefs = CandidatePreferences()
618
+
619
+ format_control = format_control or FormatControl(set(), set())
620
+
621
+ self._allow_yanked = allow_yanked
622
+ self._candidate_prefs = candidate_prefs
623
+ self._ignore_requires_python = ignore_requires_python
624
+ self._link_collector = link_collector
625
+ self._target_python = target_python
626
+
627
+ self.format_control = format_control
628
+
629
+ # These are boring links that have already been logged somehow.
630
+ self._logged_links: Set[Tuple[Link, LinkType, str]] = set()
631
+
632
+ # Don't include an allow_yanked default value to make sure each call
633
+ # site considers whether yanked releases are allowed. This also causes
634
+ # that decision to be made explicit in the calling code, which helps
635
+ # people when reading the code.
636
+ @classmethod
637
+ def create(
638
+ cls,
639
+ link_collector: LinkCollector,
640
+ selection_prefs: SelectionPreferences,
641
+ target_python: Optional[TargetPython] = None,
642
+ ) -> "PackageFinder":
643
+ """Create a PackageFinder.
644
+
645
+ :param selection_prefs: The candidate selection preferences, as a
646
+ SelectionPreferences object.
647
+ :param target_python: The target Python interpreter to use when
648
+ checking compatibility. If None (the default), a TargetPython
649
+ object will be constructed from the running Python.
650
+ """
651
+ if target_python is None:
652
+ target_python = TargetPython()
653
+
654
+ candidate_prefs = CandidatePreferences(
655
+ prefer_binary=selection_prefs.prefer_binary,
656
+ allow_all_prereleases=selection_prefs.allow_all_prereleases,
657
+ )
658
+
659
+ return cls(
660
+ candidate_prefs=candidate_prefs,
661
+ link_collector=link_collector,
662
+ target_python=target_python,
663
+ allow_yanked=selection_prefs.allow_yanked,
664
+ format_control=selection_prefs.format_control,
665
+ ignore_requires_python=selection_prefs.ignore_requires_python,
666
+ )
667
+
668
+ @property
669
+ def target_python(self) -> TargetPython:
670
+ return self._target_python
671
+
672
+ @property
673
+ def search_scope(self) -> SearchScope:
674
+ return self._link_collector.search_scope
675
+
676
+ @search_scope.setter
677
+ def search_scope(self, search_scope: SearchScope) -> None:
678
+ self._link_collector.search_scope = search_scope
679
+
680
+ @property
681
+ def find_links(self) -> List[str]:
682
+ return self._link_collector.find_links
683
+
684
+ @property
685
+ def index_urls(self) -> List[str]:
686
+ return self.search_scope.index_urls
687
+
688
+ @property
689
+ def trusted_hosts(self) -> Iterable[str]:
690
+ for host_port in self._link_collector.session.pip_trusted_origins:
691
+ yield build_netloc(*host_port)
692
+
693
+ @property
694
+ def allow_all_prereleases(self) -> bool:
695
+ return self._candidate_prefs.allow_all_prereleases
696
+
697
+ def set_allow_all_prereleases(self) -> None:
698
+ self._candidate_prefs.allow_all_prereleases = True
699
+
700
+ @property
701
+ def prefer_binary(self) -> bool:
702
+ return self._candidate_prefs.prefer_binary
703
+
704
+ def set_prefer_binary(self) -> None:
705
+ self._candidate_prefs.prefer_binary = True
706
+
707
+ def requires_python_skipped_reasons(self) -> List[str]:
708
+ reasons = {
709
+ detail
710
+ for _, result, detail in self._logged_links
711
+ if result == LinkType.requires_python_mismatch
712
+ }
713
+ return sorted(reasons)
714
+
715
+ def make_link_evaluator(self, project_name: str) -> LinkEvaluator:
716
+ canonical_name = canonicalize_name(project_name)
717
+ formats = self.format_control.get_allowed_formats(canonical_name)
718
+
719
+ return LinkEvaluator(
720
+ project_name=project_name,
721
+ canonical_name=canonical_name,
722
+ formats=formats,
723
+ target_python=self._target_python,
724
+ allow_yanked=self._allow_yanked,
725
+ ignore_requires_python=self._ignore_requires_python,
726
+ )
727
+
728
+ def _sort_links(self, links: Iterable[Link]) -> List[Link]:
729
+ """
730
+ Returns elements of links in order, non-egg links first, egg links
731
+ second, while eliminating duplicates
732
+ """
733
+ eggs, no_eggs = [], []
734
+ seen: Set[Link] = set()
735
+ for link in links:
736
+ if link not in seen:
737
+ seen.add(link)
738
+ if link.egg_fragment:
739
+ eggs.append(link)
740
+ else:
741
+ no_eggs.append(link)
742
+ return no_eggs + eggs
743
+
744
+ def _log_skipped_link(self, link: Link, result: LinkType, detail: str) -> None:
745
+ entry = (link, result, detail)
746
+ if entry not in self._logged_links:
747
+ # Put the link at the end so the reason is more visible and because
748
+ # the link string is usually very long.
749
+ logger.debug("Skipping link: %s: %s", detail, link)
750
+ self._logged_links.add(entry)
751
+
752
+ def get_install_candidate(
753
+ self, link_evaluator: LinkEvaluator, link: Link
754
+ ) -> Optional[InstallationCandidate]:
755
+ """
756
+ If the link is a candidate for install, convert it to an
757
+ InstallationCandidate and return it. Otherwise, return None.
758
+ """
759
+ result, detail = link_evaluator.evaluate_link(link)
760
+ if result != LinkType.candidate:
761
+ self._log_skipped_link(link, result, detail)
762
+ return None
763
+
764
+ return InstallationCandidate(
765
+ name=link_evaluator.project_name,
766
+ link=link,
767
+ version=detail,
768
+ )
769
+
770
+ def evaluate_links(
771
+ self, link_evaluator: LinkEvaluator, links: Iterable[Link]
772
+ ) -> List[InstallationCandidate]:
773
+ """
774
+ Convert links that are candidates to InstallationCandidate objects.
775
+ """
776
+ candidates = []
777
+ for link in self._sort_links(links):
778
+ candidate = self.get_install_candidate(link_evaluator, link)
779
+ if candidate is not None:
780
+ candidates.append(candidate)
781
+
782
+ return candidates
783
+
784
+ def process_project_url(
785
+ self, project_url: Link, link_evaluator: LinkEvaluator
786
+ ) -> List[InstallationCandidate]:
787
+ logger.debug(
788
+ "Fetching project page and analyzing links: %s",
789
+ project_url,
790
+ )
791
+ index_response = self._link_collector.fetch_response(project_url)
792
+ if index_response is None:
793
+ return []
794
+
795
+ page_links = list(parse_links(index_response))
796
+
797
+ with indent_log():
798
+ package_links = self.evaluate_links(
799
+ link_evaluator,
800
+ links=page_links,
801
+ )
802
+
803
+ return package_links
804
+
805
+ @functools.lru_cache(maxsize=None)
806
+ def find_all_candidates(self, project_name: str) -> List[InstallationCandidate]:
807
+ """Find all available InstallationCandidate for project_name
808
+
809
+ This checks index_urls and find_links.
810
+ All versions found are returned as an InstallationCandidate list.
811
+
812
+ See LinkEvaluator.evaluate_link() for details on which files
813
+ are accepted.
814
+ """
815
+ link_evaluator = self.make_link_evaluator(project_name)
816
+
817
+ collected_sources = self._link_collector.collect_sources(
818
+ project_name=project_name,
819
+ candidates_from_page=functools.partial(
820
+ self.process_project_url,
821
+ link_evaluator=link_evaluator,
822
+ ),
823
+ )
824
+
825
+ page_candidates_it = itertools.chain.from_iterable(
826
+ source.page_candidates()
827
+ for sources in collected_sources
828
+ for source in sources
829
+ if source is not None
830
+ )
831
+ page_candidates = list(page_candidates_it)
832
+
833
+ file_links_it = itertools.chain.from_iterable(
834
+ source.file_links()
835
+ for sources in collected_sources
836
+ for source in sources
837
+ if source is not None
838
+ )
839
+ file_candidates = self.evaluate_links(
840
+ link_evaluator,
841
+ sorted(file_links_it, reverse=True),
842
+ )
843
+
844
+ if logger.isEnabledFor(logging.DEBUG) and file_candidates:
845
+ paths = []
846
+ for candidate in file_candidates:
847
+ assert candidate.link.url # we need to have a URL
848
+ try:
849
+ paths.append(candidate.link.file_path)
850
+ except Exception:
851
+ paths.append(candidate.link.url) # it's not a local file
852
+
853
+ logger.debug("Local files found: %s", ", ".join(paths))
854
+
855
+ # This is an intentional priority ordering
856
+ return file_candidates + page_candidates
857
+
858
+ def make_candidate_evaluator(
859
+ self,
860
+ project_name: str,
861
+ specifier: Optional[specifiers.BaseSpecifier] = None,
862
+ hashes: Optional[Hashes] = None,
863
+ ) -> CandidateEvaluator:
864
+ """Create a CandidateEvaluator object to use."""
865
+ candidate_prefs = self._candidate_prefs
866
+ return CandidateEvaluator.create(
867
+ project_name=project_name,
868
+ target_python=self._target_python,
869
+ prefer_binary=candidate_prefs.prefer_binary,
870
+ allow_all_prereleases=candidate_prefs.allow_all_prereleases,
871
+ specifier=specifier,
872
+ hashes=hashes,
873
+ )
874
+
875
+ @functools.lru_cache(maxsize=None)
876
+ def find_best_candidate(
877
+ self,
878
+ project_name: str,
879
+ specifier: Optional[specifiers.BaseSpecifier] = None,
880
+ hashes: Optional[Hashes] = None,
881
+ ) -> BestCandidateResult:
882
+ """Find matches for the given project and specifier.
883
+
884
+ :param specifier: An optional object implementing `filter`
885
+ (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
886
+ versions.
887
+
888
+ :return: A `BestCandidateResult` instance.
889
+ """
890
+ candidates = self.find_all_candidates(project_name)
891
+ candidate_evaluator = self.make_candidate_evaluator(
892
+ project_name=project_name,
893
+ specifier=specifier,
894
+ hashes=hashes,
895
+ )
896
+ return candidate_evaluator.compute_best_candidate(candidates)
897
+
898
+ def find_requirement(
899
+ self, req: InstallRequirement, upgrade: bool
900
+ ) -> Optional[InstallationCandidate]:
901
+ """Try to find a Link matching req
902
+
903
+ Expects req, an InstallRequirement and upgrade, a boolean
904
+ Returns a InstallationCandidate if found,
905
+ Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
906
+ """
907
+ hashes = req.hashes(trust_internet=False)
908
+ best_candidate_result = self.find_best_candidate(
909
+ req.name,
910
+ specifier=req.specifier,
911
+ hashes=hashes,
912
+ )
913
+ best_candidate = best_candidate_result.best_candidate
914
+
915
+ installed_version: Optional[_BaseVersion] = None
916
+ if req.satisfied_by is not None:
917
+ installed_version = req.satisfied_by.version
918
+
919
+ def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str:
920
+ # This repeated parse_version and str() conversion is needed to
921
+ # handle different vendoring sources from pip and pkg_resources.
922
+ # If we stop using the pkg_resources provided specifier and start
923
+ # using our own, we can drop the cast to str().
924
+ return (
925
+ ", ".join(
926
+ sorted(
927
+ {str(c.version) for c in cand_iter},
928
+ key=parse_version,
929
+ )
930
+ )
931
+ or "none"
932
+ )
933
+
934
+ if installed_version is None and best_candidate is None:
935
+ logger.critical(
936
+ "Could not find a version that satisfies the requirement %s "
937
+ "(from versions: %s)",
938
+ req,
939
+ _format_versions(best_candidate_result.iter_all()),
940
+ )
941
+
942
+ raise DistributionNotFound(f"No matching distribution found for {req}")
943
+
944
+ def _should_install_candidate(
945
+ candidate: Optional[InstallationCandidate],
946
+ ) -> "TypeGuard[InstallationCandidate]":
947
+ if installed_version is None:
948
+ return True
949
+ if best_candidate is None:
950
+ return False
951
+ return best_candidate.version > installed_version
952
+
953
+ if not upgrade and installed_version is not None:
954
+ if _should_install_candidate(best_candidate):
955
+ logger.debug(
956
+ "Existing installed version (%s) satisfies requirement "
957
+ "(most up-to-date version is %s)",
958
+ installed_version,
959
+ best_candidate.version,
960
+ )
961
+ else:
962
+ logger.debug(
963
+ "Existing installed version (%s) is most up-to-date and "
964
+ "satisfies requirement",
965
+ installed_version,
966
+ )
967
+ return None
968
+
969
+ if _should_install_candidate(best_candidate):
970
+ logger.debug(
971
+ "Using version %s (newest of versions: %s)",
972
+ best_candidate.version,
973
+ _format_versions(best_candidate_result.iter_applicable()),
974
+ )
975
+ return best_candidate
976
+
977
+ # We have an existing version, and its the best version
978
+ logger.debug(
979
+ "Installed version (%s) is most up-to-date (past versions: %s)",
980
+ installed_version,
981
+ _format_versions(best_candidate_result.iter_applicable()),
982
+ )
983
+ raise BestVersionAlreadyInstalled
984
+
985
+
986
+ def _find_name_version_sep(fragment: str, canonical_name: str) -> int:
987
+ """Find the separator's index based on the package's canonical name.
988
+
989
+ :param fragment: A <package>+<version> filename "fragment" (stem) or
990
+ egg fragment.
991
+ :param canonical_name: The package's canonical name.
992
+
993
+ This function is needed since the canonicalized name does not necessarily
994
+ have the same length as the egg info's name part. An example::
995
+
996
+ >>> fragment = 'foo__bar-1.0'
997
+ >>> canonical_name = 'foo-bar'
998
+ >>> _find_name_version_sep(fragment, canonical_name)
999
+ 8
1000
+ """
1001
+ # Project name and version must be separated by one single dash. Find all
1002
+ # occurrences of dashes; if the string in front of it matches the canonical
1003
+ # name, this is the one separating the name and version parts.
1004
+ for i, c in enumerate(fragment):
1005
+ if c != "-":
1006
+ continue
1007
+ if canonicalize_name(fragment[:i]) == canonical_name:
1008
+ return i
1009
+ raise ValueError(f"{fragment} does not match {canonical_name}")
1010
+
1011
+
1012
+ def _extract_version_from_fragment(fragment: str, canonical_name: str) -> Optional[str]:
1013
+ """Parse the version string from a <package>+<version> filename
1014
+ "fragment" (stem) or egg fragment.
1015
+
1016
+ :param fragment: The string to parse. E.g. foo-2.1
1017
+ :param canonical_name: The canonicalized name of the package this
1018
+ belongs to.
1019
+ """
1020
+ try:
1021
+ version_start = _find_name_version_sep(fragment, canonical_name) + 1
1022
+ except ValueError:
1023
+ return None
1024
+ version = fragment[version_start:]
1025
+ if not version:
1026
+ return None
1027
+ return version
.venv/lib/python3.11/site-packages/pip/_internal/index/sources.py ADDED
@@ -0,0 +1,285 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import mimetypes
3
+ import os
4
+ from collections import defaultdict
5
+ from typing import Callable, Dict, Iterable, List, Optional, Tuple
6
+
7
+ from pip._vendor.packaging.utils import (
8
+ InvalidSdistFilename,
9
+ InvalidVersion,
10
+ InvalidWheelFilename,
11
+ canonicalize_name,
12
+ parse_sdist_filename,
13
+ parse_wheel_filename,
14
+ )
15
+
16
+ from pip._internal.models.candidate import InstallationCandidate
17
+ from pip._internal.models.link import Link
18
+ from pip._internal.utils.urls import path_to_url, url_to_path
19
+ from pip._internal.vcs import is_url
20
+
21
+ logger = logging.getLogger(__name__)
22
+
23
+ FoundCandidates = Iterable[InstallationCandidate]
24
+ FoundLinks = Iterable[Link]
25
+ CandidatesFromPage = Callable[[Link], Iterable[InstallationCandidate]]
26
+ PageValidator = Callable[[Link], bool]
27
+
28
+
29
+ class LinkSource:
30
+ @property
31
+ def link(self) -> Optional[Link]:
32
+ """Returns the underlying link, if there's one."""
33
+ raise NotImplementedError()
34
+
35
+ def page_candidates(self) -> FoundCandidates:
36
+ """Candidates found by parsing an archive listing HTML file."""
37
+ raise NotImplementedError()
38
+
39
+ def file_links(self) -> FoundLinks:
40
+ """Links found by specifying archives directly."""
41
+ raise NotImplementedError()
42
+
43
+
44
+ def _is_html_file(file_url: str) -> bool:
45
+ return mimetypes.guess_type(file_url, strict=False)[0] == "text/html"
46
+
47
+
48
+ class _FlatDirectoryToUrls:
49
+ """Scans directory and caches results"""
50
+
51
+ def __init__(self, path: str) -> None:
52
+ self._path = path
53
+ self._page_candidates: List[str] = []
54
+ self._project_name_to_urls: Dict[str, List[str]] = defaultdict(list)
55
+ self._scanned_directory = False
56
+
57
+ def _scan_directory(self) -> None:
58
+ """Scans directory once and populates both page_candidates
59
+ and project_name_to_urls at the same time
60
+ """
61
+ for entry in os.scandir(self._path):
62
+ url = path_to_url(entry.path)
63
+ if _is_html_file(url):
64
+ self._page_candidates.append(url)
65
+ continue
66
+
67
+ # File must have a valid wheel or sdist name,
68
+ # otherwise not worth considering as a package
69
+ try:
70
+ project_filename = parse_wheel_filename(entry.name)[0]
71
+ except (InvalidWheelFilename, InvalidVersion):
72
+ try:
73
+ project_filename = parse_sdist_filename(entry.name)[0]
74
+ except (InvalidSdistFilename, InvalidVersion):
75
+ continue
76
+
77
+ self._project_name_to_urls[project_filename].append(url)
78
+ self._scanned_directory = True
79
+
80
+ @property
81
+ def page_candidates(self) -> List[str]:
82
+ if not self._scanned_directory:
83
+ self._scan_directory()
84
+
85
+ return self._page_candidates
86
+
87
+ @property
88
+ def project_name_to_urls(self) -> Dict[str, List[str]]:
89
+ if not self._scanned_directory:
90
+ self._scan_directory()
91
+
92
+ return self._project_name_to_urls
93
+
94
+
95
+ class _FlatDirectorySource(LinkSource):
96
+ """Link source specified by ``--find-links=<path-to-dir>``.
97
+
98
+ This looks the content of the directory, and returns:
99
+
100
+ * ``page_candidates``: Links listed on each HTML file in the directory.
101
+ * ``file_candidates``: Archives in the directory.
102
+ """
103
+
104
+ _paths_to_urls: Dict[str, _FlatDirectoryToUrls] = {}
105
+
106
+ def __init__(
107
+ self,
108
+ candidates_from_page: CandidatesFromPage,
109
+ path: str,
110
+ project_name: str,
111
+ ) -> None:
112
+ self._candidates_from_page = candidates_from_page
113
+ self._project_name = canonicalize_name(project_name)
114
+
115
+ # Get existing instance of _FlatDirectoryToUrls if it exists
116
+ if path in self._paths_to_urls:
117
+ self._path_to_urls = self._paths_to_urls[path]
118
+ else:
119
+ self._path_to_urls = _FlatDirectoryToUrls(path=path)
120
+ self._paths_to_urls[path] = self._path_to_urls
121
+
122
+ @property
123
+ def link(self) -> Optional[Link]:
124
+ return None
125
+
126
+ def page_candidates(self) -> FoundCandidates:
127
+ for url in self._path_to_urls.page_candidates:
128
+ yield from self._candidates_from_page(Link(url))
129
+
130
+ def file_links(self) -> FoundLinks:
131
+ for url in self._path_to_urls.project_name_to_urls[self._project_name]:
132
+ yield Link(url)
133
+
134
+
135
+ class _LocalFileSource(LinkSource):
136
+ """``--find-links=<path-or-url>`` or ``--[extra-]index-url=<path-or-url>``.
137
+
138
+ If a URL is supplied, it must be a ``file:`` URL. If a path is supplied to
139
+ the option, it is converted to a URL first. This returns:
140
+
141
+ * ``page_candidates``: Links listed on an HTML file.
142
+ * ``file_candidates``: The non-HTML file.
143
+ """
144
+
145
+ def __init__(
146
+ self,
147
+ candidates_from_page: CandidatesFromPage,
148
+ link: Link,
149
+ ) -> None:
150
+ self._candidates_from_page = candidates_from_page
151
+ self._link = link
152
+
153
+ @property
154
+ def link(self) -> Optional[Link]:
155
+ return self._link
156
+
157
+ def page_candidates(self) -> FoundCandidates:
158
+ if not _is_html_file(self._link.url):
159
+ return
160
+ yield from self._candidates_from_page(self._link)
161
+
162
+ def file_links(self) -> FoundLinks:
163
+ if _is_html_file(self._link.url):
164
+ return
165
+ yield self._link
166
+
167
+
168
+ class _RemoteFileSource(LinkSource):
169
+ """``--find-links=<url>`` or ``--[extra-]index-url=<url>``.
170
+
171
+ This returns:
172
+
173
+ * ``page_candidates``: Links listed on an HTML file.
174
+ * ``file_candidates``: The non-HTML file.
175
+ """
176
+
177
+ def __init__(
178
+ self,
179
+ candidates_from_page: CandidatesFromPage,
180
+ page_validator: PageValidator,
181
+ link: Link,
182
+ ) -> None:
183
+ self._candidates_from_page = candidates_from_page
184
+ self._page_validator = page_validator
185
+ self._link = link
186
+
187
+ @property
188
+ def link(self) -> Optional[Link]:
189
+ return self._link
190
+
191
+ def page_candidates(self) -> FoundCandidates:
192
+ if not self._page_validator(self._link):
193
+ return
194
+ yield from self._candidates_from_page(self._link)
195
+
196
+ def file_links(self) -> FoundLinks:
197
+ yield self._link
198
+
199
+
200
+ class _IndexDirectorySource(LinkSource):
201
+ """``--[extra-]index-url=<path-to-directory>``.
202
+
203
+ This is treated like a remote URL; ``candidates_from_page`` contains logic
204
+ for this by appending ``index.html`` to the link.
205
+ """
206
+
207
+ def __init__(
208
+ self,
209
+ candidates_from_page: CandidatesFromPage,
210
+ link: Link,
211
+ ) -> None:
212
+ self._candidates_from_page = candidates_from_page
213
+ self._link = link
214
+
215
+ @property
216
+ def link(self) -> Optional[Link]:
217
+ return self._link
218
+
219
+ def page_candidates(self) -> FoundCandidates:
220
+ yield from self._candidates_from_page(self._link)
221
+
222
+ def file_links(self) -> FoundLinks:
223
+ return ()
224
+
225
+
226
+ def build_source(
227
+ location: str,
228
+ *,
229
+ candidates_from_page: CandidatesFromPage,
230
+ page_validator: PageValidator,
231
+ expand_dir: bool,
232
+ cache_link_parsing: bool,
233
+ project_name: str,
234
+ ) -> Tuple[Optional[str], Optional[LinkSource]]:
235
+ path: Optional[str] = None
236
+ url: Optional[str] = None
237
+ if os.path.exists(location): # Is a local path.
238
+ url = path_to_url(location)
239
+ path = location
240
+ elif location.startswith("file:"): # A file: URL.
241
+ url = location
242
+ path = url_to_path(location)
243
+ elif is_url(location):
244
+ url = location
245
+
246
+ if url is None:
247
+ msg = (
248
+ "Location '%s' is ignored: "
249
+ "it is either a non-existing path or lacks a specific scheme."
250
+ )
251
+ logger.warning(msg, location)
252
+ return (None, None)
253
+
254
+ if path is None:
255
+ source: LinkSource = _RemoteFileSource(
256
+ candidates_from_page=candidates_from_page,
257
+ page_validator=page_validator,
258
+ link=Link(url, cache_link_parsing=cache_link_parsing),
259
+ )
260
+ return (url, source)
261
+
262
+ if os.path.isdir(path):
263
+ if expand_dir:
264
+ source = _FlatDirectorySource(
265
+ candidates_from_page=candidates_from_page,
266
+ path=path,
267
+ project_name=project_name,
268
+ )
269
+ else:
270
+ source = _IndexDirectorySource(
271
+ candidates_from_page=candidates_from_page,
272
+ link=Link(url, cache_link_parsing=cache_link_parsing),
273
+ )
274
+ return (url, source)
275
+ elif os.path.isfile(path):
276
+ source = _LocalFileSource(
277
+ candidates_from_page=candidates_from_page,
278
+ link=Link(url, cache_link_parsing=cache_link_parsing),
279
+ )
280
+ return (url, source)
281
+ logger.warning(
282
+ "Location '%s' is ignored: it is neither a file nor a directory.",
283
+ location,
284
+ )
285
+ return (url, None)
.venv/lib/python3.11/site-packages/pip/_internal/main.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import List, Optional
2
+
3
+
4
+ def main(args: Optional[List[str]] = None) -> int:
5
+ """This is preserved for old console scripts that may still be referencing
6
+ it.
7
+
8
+ For additional details, see https://github.com/pypa/pip/issues/7498.
9
+ """
10
+ from pip._internal.utils.entrypoints import _wrapper
11
+
12
+ return _wrapper(args)
.venv/lib/python3.11/site-packages/pip/_internal/operations/__init__.py ADDED
File without changes
.venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (197 Bytes). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/check.cpython-311.pyc ADDED
Binary file (8.46 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-311.pyc ADDED
Binary file (11.6 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-311.pyc ADDED
Binary file (27.8 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/operations/build/__init__.py ADDED
File without changes
.venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (203 Bytes). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-311.pyc ADDED
Binary file (8.92 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-311.pyc ADDED
Binary file (2.27 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-311.pyc ADDED
Binary file (2.31 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-311.pyc ADDED
Binary file (3.71 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-311.pyc ADDED
Binary file (1.94 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-311.pyc ADDED
Binary file (2.38 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-311.pyc ADDED
Binary file (4.49 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/operations/build/build_tracker.py ADDED
@@ -0,0 +1,139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import contextlib
2
+ import hashlib
3
+ import logging
4
+ import os
5
+ from types import TracebackType
6
+ from typing import Dict, Generator, Optional, Set, Type, Union
7
+
8
+ from pip._internal.models.link import Link
9
+ from pip._internal.req.req_install import InstallRequirement
10
+ from pip._internal.utils.temp_dir import TempDirectory
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ @contextlib.contextmanager
16
+ def update_env_context_manager(**changes: str) -> Generator[None, None, None]:
17
+ target = os.environ
18
+
19
+ # Save values from the target and change them.
20
+ non_existent_marker = object()
21
+ saved_values: Dict[str, Union[object, str]] = {}
22
+ for name, new_value in changes.items():
23
+ try:
24
+ saved_values[name] = target[name]
25
+ except KeyError:
26
+ saved_values[name] = non_existent_marker
27
+ target[name] = new_value
28
+
29
+ try:
30
+ yield
31
+ finally:
32
+ # Restore original values in the target.
33
+ for name, original_value in saved_values.items():
34
+ if original_value is non_existent_marker:
35
+ del target[name]
36
+ else:
37
+ assert isinstance(original_value, str) # for mypy
38
+ target[name] = original_value
39
+
40
+
41
+ @contextlib.contextmanager
42
+ def get_build_tracker() -> Generator["BuildTracker", None, None]:
43
+ root = os.environ.get("PIP_BUILD_TRACKER")
44
+ with contextlib.ExitStack() as ctx:
45
+ if root is None:
46
+ root = ctx.enter_context(TempDirectory(kind="build-tracker")).path
47
+ ctx.enter_context(update_env_context_manager(PIP_BUILD_TRACKER=root))
48
+ logger.debug("Initialized build tracking at %s", root)
49
+
50
+ with BuildTracker(root) as tracker:
51
+ yield tracker
52
+
53
+
54
+ class TrackerId(str):
55
+ """Uniquely identifying string provided to the build tracker."""
56
+
57
+
58
+ class BuildTracker:
59
+ """Ensure that an sdist cannot request itself as a setup requirement.
60
+
61
+ When an sdist is prepared, it identifies its setup requirements in the
62
+ context of ``BuildTracker.track()``. If a requirement shows up recursively, this
63
+ raises an exception.
64
+
65
+ This stops fork bombs embedded in malicious packages."""
66
+
67
+ def __init__(self, root: str) -> None:
68
+ self._root = root
69
+ self._entries: Dict[TrackerId, InstallRequirement] = {}
70
+ logger.debug("Created build tracker: %s", self._root)
71
+
72
+ def __enter__(self) -> "BuildTracker":
73
+ logger.debug("Entered build tracker: %s", self._root)
74
+ return self
75
+
76
+ def __exit__(
77
+ self,
78
+ exc_type: Optional[Type[BaseException]],
79
+ exc_val: Optional[BaseException],
80
+ exc_tb: Optional[TracebackType],
81
+ ) -> None:
82
+ self.cleanup()
83
+
84
+ def _entry_path(self, key: TrackerId) -> str:
85
+ hashed = hashlib.sha224(key.encode()).hexdigest()
86
+ return os.path.join(self._root, hashed)
87
+
88
+ def add(self, req: InstallRequirement, key: TrackerId) -> None:
89
+ """Add an InstallRequirement to build tracking."""
90
+
91
+ # Get the file to write information about this requirement.
92
+ entry_path = self._entry_path(key)
93
+
94
+ # Try reading from the file. If it exists and can be read from, a build
95
+ # is already in progress, so a LookupError is raised.
96
+ try:
97
+ with open(entry_path) as fp:
98
+ contents = fp.read()
99
+ except FileNotFoundError:
100
+ pass
101
+ else:
102
+ message = "{} is already being built: {}".format(req.link, contents)
103
+ raise LookupError(message)
104
+
105
+ # If we're here, req should really not be building already.
106
+ assert key not in self._entries
107
+
108
+ # Start tracking this requirement.
109
+ with open(entry_path, "w", encoding="utf-8") as fp:
110
+ fp.write(str(req))
111
+ self._entries[key] = req
112
+
113
+ logger.debug("Added %s to build tracker %r", req, self._root)
114
+
115
+ def remove(self, req: InstallRequirement, key: TrackerId) -> None:
116
+ """Remove an InstallRequirement from build tracking."""
117
+
118
+ # Delete the created file and the corresponding entry.
119
+ os.unlink(self._entry_path(key))
120
+ del self._entries[key]
121
+
122
+ logger.debug("Removed %s from build tracker %r", req, self._root)
123
+
124
+ def cleanup(self) -> None:
125
+ for key, req in list(self._entries.items()):
126
+ self.remove(req, key)
127
+
128
+ logger.debug("Removed build tracker: %r", self._root)
129
+
130
+ @contextlib.contextmanager
131
+ def track(self, req: InstallRequirement, key: str) -> Generator[None, None, None]:
132
+ """Ensure that `key` cannot install itself as a setup requirement.
133
+
134
+ :raises LookupError: If `key` was already provided in a parent invocation of
135
+ the context introduced by this method."""
136
+ tracker_id = TrackerId(key)
137
+ self.add(req, tracker_id)
138
+ yield
139
+ self.remove(req, tracker_id)
.venv/lib/python3.11/site-packages/pip/_internal/operations/build/metadata.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Metadata generation logic for source distributions.
2
+ """
3
+
4
+ import os
5
+
6
+ from pip._vendor.pyproject_hooks import BuildBackendHookCaller
7
+
8
+ from pip._internal.build_env import BuildEnvironment
9
+ from pip._internal.exceptions import (
10
+ InstallationSubprocessError,
11
+ MetadataGenerationFailed,
12
+ )
13
+ from pip._internal.utils.subprocess import runner_with_spinner_message
14
+ from pip._internal.utils.temp_dir import TempDirectory
15
+
16
+
17
+ def generate_metadata(
18
+ build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str
19
+ ) -> str:
20
+ """Generate metadata using mechanisms described in PEP 517.
21
+
22
+ Returns the generated metadata directory.
23
+ """
24
+ metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
25
+
26
+ metadata_dir = metadata_tmpdir.path
27
+
28
+ with build_env:
29
+ # Note that BuildBackendHookCaller implements a fallback for
30
+ # prepare_metadata_for_build_wheel, so we don't have to
31
+ # consider the possibility that this hook doesn't exist.
32
+ runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)")
33
+ with backend.subprocess_runner(runner):
34
+ try:
35
+ distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir)
36
+ except InstallationSubprocessError as error:
37
+ raise MetadataGenerationFailed(package_details=details) from error
38
+
39
+ return os.path.join(metadata_dir, distinfo_dir)
.venv/lib/python3.11/site-packages/pip/_internal/operations/build/metadata_editable.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Metadata generation logic for source distributions.
2
+ """
3
+
4
+ import os
5
+
6
+ from pip._vendor.pyproject_hooks import BuildBackendHookCaller
7
+
8
+ from pip._internal.build_env import BuildEnvironment
9
+ from pip._internal.exceptions import (
10
+ InstallationSubprocessError,
11
+ MetadataGenerationFailed,
12
+ )
13
+ from pip._internal.utils.subprocess import runner_with_spinner_message
14
+ from pip._internal.utils.temp_dir import TempDirectory
15
+
16
+
17
+ def generate_editable_metadata(
18
+ build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str
19
+ ) -> str:
20
+ """Generate metadata using mechanisms described in PEP 660.
21
+
22
+ Returns the generated metadata directory.
23
+ """
24
+ metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
25
+
26
+ metadata_dir = metadata_tmpdir.path
27
+
28
+ with build_env:
29
+ # Note that BuildBackendHookCaller implements a fallback for
30
+ # prepare_metadata_for_build_wheel/editable, so we don't have to
31
+ # consider the possibility that this hook doesn't exist.
32
+ runner = runner_with_spinner_message(
33
+ "Preparing editable metadata (pyproject.toml)"
34
+ )
35
+ with backend.subprocess_runner(runner):
36
+ try:
37
+ distinfo_dir = backend.prepare_metadata_for_build_editable(metadata_dir)
38
+ except InstallationSubprocessError as error:
39
+ raise MetadataGenerationFailed(package_details=details) from error
40
+
41
+ return os.path.join(metadata_dir, distinfo_dir)
.venv/lib/python3.11/site-packages/pip/_internal/operations/build/metadata_legacy.py ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Metadata generation logic for legacy source distributions.
2
+ """
3
+
4
+ import logging
5
+ import os
6
+
7
+ from pip._internal.build_env import BuildEnvironment
8
+ from pip._internal.cli.spinners import open_spinner
9
+ from pip._internal.exceptions import (
10
+ InstallationError,
11
+ InstallationSubprocessError,
12
+ MetadataGenerationFailed,
13
+ )
14
+ from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
15
+ from pip._internal.utils.subprocess import call_subprocess
16
+ from pip._internal.utils.temp_dir import TempDirectory
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ def _find_egg_info(directory: str) -> str:
22
+ """Find an .egg-info subdirectory in `directory`."""
23
+ filenames = [f for f in os.listdir(directory) if f.endswith(".egg-info")]
24
+
25
+ if not filenames:
26
+ raise InstallationError(f"No .egg-info directory found in {directory}")
27
+
28
+ if len(filenames) > 1:
29
+ raise InstallationError(
30
+ "More than one .egg-info directory found in {}".format(directory)
31
+ )
32
+
33
+ return os.path.join(directory, filenames[0])
34
+
35
+
36
+ def generate_metadata(
37
+ build_env: BuildEnvironment,
38
+ setup_py_path: str,
39
+ source_dir: str,
40
+ isolated: bool,
41
+ details: str,
42
+ ) -> str:
43
+ """Generate metadata using setup.py-based defacto mechanisms.
44
+
45
+ Returns the generated metadata directory.
46
+ """
47
+ logger.debug(
48
+ "Running setup.py (path:%s) egg_info for package %s",
49
+ setup_py_path,
50
+ details,
51
+ )
52
+
53
+ egg_info_dir = TempDirectory(kind="pip-egg-info", globally_managed=True).path
54
+
55
+ args = make_setuptools_egg_info_args(
56
+ setup_py_path,
57
+ egg_info_dir=egg_info_dir,
58
+ no_user_config=isolated,
59
+ )
60
+
61
+ with build_env:
62
+ with open_spinner("Preparing metadata (setup.py)") as spinner:
63
+ try:
64
+ call_subprocess(
65
+ args,
66
+ cwd=source_dir,
67
+ command_desc="python setup.py egg_info",
68
+ spinner=spinner,
69
+ )
70
+ except InstallationSubprocessError as error:
71
+ raise MetadataGenerationFailed(package_details=details) from error
72
+
73
+ # Return the .egg-info directory.
74
+ return _find_egg_info(egg_info_dir)
.venv/lib/python3.11/site-packages/pip/_internal/operations/build/wheel.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os
3
+ from typing import Optional
4
+
5
+ from pip._vendor.pyproject_hooks import BuildBackendHookCaller
6
+
7
+ from pip._internal.utils.subprocess import runner_with_spinner_message
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ def build_wheel_pep517(
13
+ name: str,
14
+ backend: BuildBackendHookCaller,
15
+ metadata_directory: str,
16
+ tempd: str,
17
+ ) -> Optional[str]:
18
+ """Build one InstallRequirement using the PEP 517 build process.
19
+
20
+ Returns path to wheel if successfully built. Otherwise, returns None.
21
+ """
22
+ assert metadata_directory is not None
23
+ try:
24
+ logger.debug("Destination directory: %s", tempd)
25
+
26
+ runner = runner_with_spinner_message(
27
+ f"Building wheel for {name} (pyproject.toml)"
28
+ )
29
+ with backend.subprocess_runner(runner):
30
+ wheel_name = backend.build_wheel(
31
+ tempd,
32
+ metadata_directory=metadata_directory,
33
+ )
34
+ except Exception:
35
+ logger.error("Failed building wheel for %s", name)
36
+ return None
37
+ return os.path.join(tempd, wheel_name)
.venv/lib/python3.11/site-packages/pip/_internal/operations/build/wheel_editable.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os
3
+ from typing import Optional
4
+
5
+ from pip._vendor.pyproject_hooks import BuildBackendHookCaller, HookMissing
6
+
7
+ from pip._internal.utils.subprocess import runner_with_spinner_message
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ def build_wheel_editable(
13
+ name: str,
14
+ backend: BuildBackendHookCaller,
15
+ metadata_directory: str,
16
+ tempd: str,
17
+ ) -> Optional[str]:
18
+ """Build one InstallRequirement using the PEP 660 build process.
19
+
20
+ Returns path to wheel if successfully built. Otherwise, returns None.
21
+ """
22
+ assert metadata_directory is not None
23
+ try:
24
+ logger.debug("Destination directory: %s", tempd)
25
+
26
+ runner = runner_with_spinner_message(
27
+ f"Building editable for {name} (pyproject.toml)"
28
+ )
29
+ with backend.subprocess_runner(runner):
30
+ try:
31
+ wheel_name = backend.build_editable(
32
+ tempd,
33
+ metadata_directory=metadata_directory,
34
+ )
35
+ except HookMissing as e:
36
+ logger.error(
37
+ "Cannot build editable %s because the build "
38
+ "backend does not have the %s hook",
39
+ name,
40
+ e,
41
+ )
42
+ return None
43
+ except Exception:
44
+ logger.error("Failed building editable for %s", name)
45
+ return None
46
+ return os.path.join(tempd, wheel_name)
.venv/lib/python3.11/site-packages/pip/_internal/operations/build/wheel_legacy.py ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os.path
3
+ from typing import List, Optional
4
+
5
+ from pip._internal.cli.spinners import open_spinner
6
+ from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args
7
+ from pip._internal.utils.subprocess import call_subprocess, format_command_args
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ def format_command_result(
13
+ command_args: List[str],
14
+ command_output: str,
15
+ ) -> str:
16
+ """Format command information for logging."""
17
+ command_desc = format_command_args(command_args)
18
+ text = f"Command arguments: {command_desc}\n"
19
+
20
+ if not command_output:
21
+ text += "Command output: None"
22
+ elif logger.getEffectiveLevel() > logging.DEBUG:
23
+ text += "Command output: [use --verbose to show]"
24
+ else:
25
+ if not command_output.endswith("\n"):
26
+ command_output += "\n"
27
+ text += f"Command output:\n{command_output}"
28
+
29
+ return text
30
+
31
+
32
+ def get_legacy_build_wheel_path(
33
+ names: List[str],
34
+ temp_dir: str,
35
+ name: str,
36
+ command_args: List[str],
37
+ command_output: str,
38
+ ) -> Optional[str]:
39
+ """Return the path to the wheel in the temporary build directory."""
40
+ # Sort for determinism.
41
+ names = sorted(names)
42
+ if not names:
43
+ msg = ("Legacy build of wheel for {!r} created no files.\n").format(name)
44
+ msg += format_command_result(command_args, command_output)
45
+ logger.warning(msg)
46
+ return None
47
+
48
+ if len(names) > 1:
49
+ msg = (
50
+ "Legacy build of wheel for {!r} created more than one file.\n"
51
+ "Filenames (choosing first): {}\n"
52
+ ).format(name, names)
53
+ msg += format_command_result(command_args, command_output)
54
+ logger.warning(msg)
55
+
56
+ return os.path.join(temp_dir, names[0])
57
+
58
+
59
+ def build_wheel_legacy(
60
+ name: str,
61
+ setup_py_path: str,
62
+ source_dir: str,
63
+ global_options: List[str],
64
+ build_options: List[str],
65
+ tempd: str,
66
+ ) -> Optional[str]:
67
+ """Build one unpacked package using the "legacy" build process.
68
+
69
+ Returns path to wheel if successfully built. Otherwise, returns None.
70
+ """
71
+ wheel_args = make_setuptools_bdist_wheel_args(
72
+ setup_py_path,
73
+ global_options=global_options,
74
+ build_options=build_options,
75
+ destination_dir=tempd,
76
+ )
77
+
78
+ spin_message = f"Building wheel for {name} (setup.py)"
79
+ with open_spinner(spin_message) as spinner:
80
+ logger.debug("Destination directory: %s", tempd)
81
+
82
+ try:
83
+ output = call_subprocess(
84
+ wheel_args,
85
+ command_desc="python setup.py bdist_wheel",
86
+ cwd=source_dir,
87
+ spinner=spinner,
88
+ )
89
+ except Exception:
90
+ spinner.finish("error")
91
+ logger.error("Failed building wheel for %s", name)
92
+ return None
93
+
94
+ names = os.listdir(tempd)
95
+ wheel_path = get_legacy_build_wheel_path(
96
+ names=names,
97
+ temp_dir=tempd,
98
+ name=name,
99
+ command_args=wheel_args,
100
+ command_output=output,
101
+ )
102
+ return wheel_path
.venv/lib/python3.11/site-packages/pip/_internal/operations/check.py ADDED
@@ -0,0 +1,187 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Validation of dependencies of packages
2
+ """
3
+
4
+ import logging
5
+ from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple
6
+
7
+ from pip._vendor.packaging.requirements import Requirement
8
+ from pip._vendor.packaging.specifiers import LegacySpecifier
9
+ from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
10
+ from pip._vendor.packaging.version import LegacyVersion
11
+
12
+ from pip._internal.distributions import make_distribution_for_install_requirement
13
+ from pip._internal.metadata import get_default_environment
14
+ from pip._internal.metadata.base import DistributionVersion
15
+ from pip._internal.req.req_install import InstallRequirement
16
+ from pip._internal.utils.deprecation import deprecated
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ class PackageDetails(NamedTuple):
22
+ version: DistributionVersion
23
+ dependencies: List[Requirement]
24
+
25
+
26
+ # Shorthands
27
+ PackageSet = Dict[NormalizedName, PackageDetails]
28
+ Missing = Tuple[NormalizedName, Requirement]
29
+ Conflicting = Tuple[NormalizedName, DistributionVersion, Requirement]
30
+
31
+ MissingDict = Dict[NormalizedName, List[Missing]]
32
+ ConflictingDict = Dict[NormalizedName, List[Conflicting]]
33
+ CheckResult = Tuple[MissingDict, ConflictingDict]
34
+ ConflictDetails = Tuple[PackageSet, CheckResult]
35
+
36
+
37
+ def create_package_set_from_installed() -> Tuple[PackageSet, bool]:
38
+ """Converts a list of distributions into a PackageSet."""
39
+ package_set = {}
40
+ problems = False
41
+ env = get_default_environment()
42
+ for dist in env.iter_installed_distributions(local_only=False, skip=()):
43
+ name = dist.canonical_name
44
+ try:
45
+ dependencies = list(dist.iter_dependencies())
46
+ package_set[name] = PackageDetails(dist.version, dependencies)
47
+ except (OSError, ValueError) as e:
48
+ # Don't crash on unreadable or broken metadata.
49
+ logger.warning("Error parsing requirements for %s: %s", name, e)
50
+ problems = True
51
+ return package_set, problems
52
+
53
+
54
+ def check_package_set(
55
+ package_set: PackageSet, should_ignore: Optional[Callable[[str], bool]] = None
56
+ ) -> CheckResult:
57
+ """Check if a package set is consistent
58
+
59
+ If should_ignore is passed, it should be a callable that takes a
60
+ package name and returns a boolean.
61
+ """
62
+
63
+ warn_legacy_versions_and_specifiers(package_set)
64
+
65
+ missing = {}
66
+ conflicting = {}
67
+
68
+ for package_name, package_detail in package_set.items():
69
+ # Info about dependencies of package_name
70
+ missing_deps: Set[Missing] = set()
71
+ conflicting_deps: Set[Conflicting] = set()
72
+
73
+ if should_ignore and should_ignore(package_name):
74
+ continue
75
+
76
+ for req in package_detail.dependencies:
77
+ name = canonicalize_name(req.name)
78
+
79
+ # Check if it's missing
80
+ if name not in package_set:
81
+ missed = True
82
+ if req.marker is not None:
83
+ missed = req.marker.evaluate({"extra": ""})
84
+ if missed:
85
+ missing_deps.add((name, req))
86
+ continue
87
+
88
+ # Check if there's a conflict
89
+ version = package_set[name].version
90
+ if not req.specifier.contains(version, prereleases=True):
91
+ conflicting_deps.add((name, version, req))
92
+
93
+ if missing_deps:
94
+ missing[package_name] = sorted(missing_deps, key=str)
95
+ if conflicting_deps:
96
+ conflicting[package_name] = sorted(conflicting_deps, key=str)
97
+
98
+ return missing, conflicting
99
+
100
+
101
+ def check_install_conflicts(to_install: List[InstallRequirement]) -> ConflictDetails:
102
+ """For checking if the dependency graph would be consistent after \
103
+ installing given requirements
104
+ """
105
+ # Start from the current state
106
+ package_set, _ = create_package_set_from_installed()
107
+ # Install packages
108
+ would_be_installed = _simulate_installation_of(to_install, package_set)
109
+
110
+ # Only warn about directly-dependent packages; create a whitelist of them
111
+ whitelist = _create_whitelist(would_be_installed, package_set)
112
+
113
+ return (
114
+ package_set,
115
+ check_package_set(
116
+ package_set, should_ignore=lambda name: name not in whitelist
117
+ ),
118
+ )
119
+
120
+
121
+ def _simulate_installation_of(
122
+ to_install: List[InstallRequirement], package_set: PackageSet
123
+ ) -> Set[NormalizedName]:
124
+ """Computes the version of packages after installing to_install."""
125
+ # Keep track of packages that were installed
126
+ installed = set()
127
+
128
+ # Modify it as installing requirement_set would (assuming no errors)
129
+ for inst_req in to_install:
130
+ abstract_dist = make_distribution_for_install_requirement(inst_req)
131
+ dist = abstract_dist.get_metadata_distribution()
132
+ name = dist.canonical_name
133
+ package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies()))
134
+
135
+ installed.add(name)
136
+
137
+ return installed
138
+
139
+
140
+ def _create_whitelist(
141
+ would_be_installed: Set[NormalizedName], package_set: PackageSet
142
+ ) -> Set[NormalizedName]:
143
+ packages_affected = set(would_be_installed)
144
+
145
+ for package_name in package_set:
146
+ if package_name in packages_affected:
147
+ continue
148
+
149
+ for req in package_set[package_name].dependencies:
150
+ if canonicalize_name(req.name) in packages_affected:
151
+ packages_affected.add(package_name)
152
+ break
153
+
154
+ return packages_affected
155
+
156
+
157
+ def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None:
158
+ for project_name, package_details in package_set.items():
159
+ if isinstance(package_details.version, LegacyVersion):
160
+ deprecated(
161
+ reason=(
162
+ f"{project_name} {package_details.version} "
163
+ f"has a non-standard version number."
164
+ ),
165
+ replacement=(
166
+ f"to upgrade to a newer version of {project_name} "
167
+ f"or contact the author to suggest that they "
168
+ f"release a version with a conforming version number"
169
+ ),
170
+ issue=12063,
171
+ gone_in="24.1",
172
+ )
173
+ for dep in package_details.dependencies:
174
+ if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier):
175
+ deprecated(
176
+ reason=(
177
+ f"{project_name} {package_details.version} "
178
+ f"has a non-standard dependency specifier {dep}."
179
+ ),
180
+ replacement=(
181
+ f"to upgrade to a newer version of {project_name} "
182
+ f"or contact the author to suggest that they "
183
+ f"release a version with a conforming dependency specifiers"
184
+ ),
185
+ issue=12063,
186
+ gone_in="24.1",
187
+ )
.venv/lib/python3.11/site-packages/pip/_internal/operations/freeze.py ADDED
@@ -0,0 +1,255 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import collections
2
+ import logging
3
+ import os
4
+ from typing import Container, Dict, Generator, Iterable, List, NamedTuple, Optional, Set
5
+
6
+ from pip._vendor.packaging.utils import canonicalize_name
7
+ from pip._vendor.packaging.version import Version
8
+
9
+ from pip._internal.exceptions import BadCommand, InstallationError
10
+ from pip._internal.metadata import BaseDistribution, get_environment
11
+ from pip._internal.req.constructors import (
12
+ install_req_from_editable,
13
+ install_req_from_line,
14
+ )
15
+ from pip._internal.req.req_file import COMMENT_RE
16
+ from pip._internal.utils.direct_url_helpers import direct_url_as_pep440_direct_reference
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ class _EditableInfo(NamedTuple):
22
+ requirement: str
23
+ comments: List[str]
24
+
25
+
26
+ def freeze(
27
+ requirement: Optional[List[str]] = None,
28
+ local_only: bool = False,
29
+ user_only: bool = False,
30
+ paths: Optional[List[str]] = None,
31
+ isolated: bool = False,
32
+ exclude_editable: bool = False,
33
+ skip: Container[str] = (),
34
+ ) -> Generator[str, None, None]:
35
+ installations: Dict[str, FrozenRequirement] = {}
36
+
37
+ dists = get_environment(paths).iter_installed_distributions(
38
+ local_only=local_only,
39
+ skip=(),
40
+ user_only=user_only,
41
+ )
42
+ for dist in dists:
43
+ req = FrozenRequirement.from_dist(dist)
44
+ if exclude_editable and req.editable:
45
+ continue
46
+ installations[req.canonical_name] = req
47
+
48
+ if requirement:
49
+ # the options that don't get turned into an InstallRequirement
50
+ # should only be emitted once, even if the same option is in multiple
51
+ # requirements files, so we need to keep track of what has been emitted
52
+ # so that we don't emit it again if it's seen again
53
+ emitted_options: Set[str] = set()
54
+ # keep track of which files a requirement is in so that we can
55
+ # give an accurate warning if a requirement appears multiple times.
56
+ req_files: Dict[str, List[str]] = collections.defaultdict(list)
57
+ for req_file_path in requirement:
58
+ with open(req_file_path) as req_file:
59
+ for line in req_file:
60
+ if (
61
+ not line.strip()
62
+ or line.strip().startswith("#")
63
+ or line.startswith(
64
+ (
65
+ "-r",
66
+ "--requirement",
67
+ "-f",
68
+ "--find-links",
69
+ "-i",
70
+ "--index-url",
71
+ "--pre",
72
+ "--trusted-host",
73
+ "--process-dependency-links",
74
+ "--extra-index-url",
75
+ "--use-feature",
76
+ )
77
+ )
78
+ ):
79
+ line = line.rstrip()
80
+ if line not in emitted_options:
81
+ emitted_options.add(line)
82
+ yield line
83
+ continue
84
+
85
+ if line.startswith("-e") or line.startswith("--editable"):
86
+ if line.startswith("-e"):
87
+ line = line[2:].strip()
88
+ else:
89
+ line = line[len("--editable") :].strip().lstrip("=")
90
+ line_req = install_req_from_editable(
91
+ line,
92
+ isolated=isolated,
93
+ )
94
+ else:
95
+ line_req = install_req_from_line(
96
+ COMMENT_RE.sub("", line).strip(),
97
+ isolated=isolated,
98
+ )
99
+
100
+ if not line_req.name:
101
+ logger.info(
102
+ "Skipping line in requirement file [%s] because "
103
+ "it's not clear what it would install: %s",
104
+ req_file_path,
105
+ line.strip(),
106
+ )
107
+ logger.info(
108
+ " (add #egg=PackageName to the URL to avoid"
109
+ " this warning)"
110
+ )
111
+ else:
112
+ line_req_canonical_name = canonicalize_name(line_req.name)
113
+ if line_req_canonical_name not in installations:
114
+ # either it's not installed, or it is installed
115
+ # but has been processed already
116
+ if not req_files[line_req.name]:
117
+ logger.warning(
118
+ "Requirement file [%s] contains %s, but "
119
+ "package %r is not installed",
120
+ req_file_path,
121
+ COMMENT_RE.sub("", line).strip(),
122
+ line_req.name,
123
+ )
124
+ else:
125
+ req_files[line_req.name].append(req_file_path)
126
+ else:
127
+ yield str(installations[line_req_canonical_name]).rstrip()
128
+ del installations[line_req_canonical_name]
129
+ req_files[line_req.name].append(req_file_path)
130
+
131
+ # Warn about requirements that were included multiple times (in a
132
+ # single requirements file or in different requirements files).
133
+ for name, files in req_files.items():
134
+ if len(files) > 1:
135
+ logger.warning(
136
+ "Requirement %s included multiple times [%s]",
137
+ name,
138
+ ", ".join(sorted(set(files))),
139
+ )
140
+
141
+ yield ("## The following requirements were added by pip freeze:")
142
+ for installation in sorted(installations.values(), key=lambda x: x.name.lower()):
143
+ if installation.canonical_name not in skip:
144
+ yield str(installation).rstrip()
145
+
146
+
147
+ def _format_as_name_version(dist: BaseDistribution) -> str:
148
+ dist_version = dist.version
149
+ if isinstance(dist_version, Version):
150
+ return f"{dist.raw_name}=={dist_version}"
151
+ return f"{dist.raw_name}==={dist_version}"
152
+
153
+
154
+ def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:
155
+ """
156
+ Compute and return values (req, comments) for use in
157
+ FrozenRequirement.from_dist().
158
+ """
159
+ editable_project_location = dist.editable_project_location
160
+ assert editable_project_location
161
+ location = os.path.normcase(os.path.abspath(editable_project_location))
162
+
163
+ from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs
164
+
165
+ vcs_backend = vcs.get_backend_for_dir(location)
166
+
167
+ if vcs_backend is None:
168
+ display = _format_as_name_version(dist)
169
+ logger.debug(
170
+ 'No VCS found for editable requirement "%s" in: %r',
171
+ display,
172
+ location,
173
+ )
174
+ return _EditableInfo(
175
+ requirement=location,
176
+ comments=[f"# Editable install with no version control ({display})"],
177
+ )
178
+
179
+ vcs_name = type(vcs_backend).__name__
180
+
181
+ try:
182
+ req = vcs_backend.get_src_requirement(location, dist.raw_name)
183
+ except RemoteNotFoundError:
184
+ display = _format_as_name_version(dist)
185
+ return _EditableInfo(
186
+ requirement=location,
187
+ comments=[f"# Editable {vcs_name} install with no remote ({display})"],
188
+ )
189
+ except RemoteNotValidError as ex:
190
+ display = _format_as_name_version(dist)
191
+ return _EditableInfo(
192
+ requirement=location,
193
+ comments=[
194
+ f"# Editable {vcs_name} install ({display}) with either a deleted "
195
+ f"local remote or invalid URI:",
196
+ f"# '{ex.url}'",
197
+ ],
198
+ )
199
+ except BadCommand:
200
+ logger.warning(
201
+ "cannot determine version of editable source in %s "
202
+ "(%s command not found in path)",
203
+ location,
204
+ vcs_backend.name,
205
+ )
206
+ return _EditableInfo(requirement=location, comments=[])
207
+ except InstallationError as exc:
208
+ logger.warning("Error when trying to get requirement for VCS system %s", exc)
209
+ else:
210
+ return _EditableInfo(requirement=req, comments=[])
211
+
212
+ logger.warning("Could not determine repository location of %s", location)
213
+
214
+ return _EditableInfo(
215
+ requirement=location,
216
+ comments=["## !! Could not determine repository location"],
217
+ )
218
+
219
+
220
+ class FrozenRequirement:
221
+ def __init__(
222
+ self,
223
+ name: str,
224
+ req: str,
225
+ editable: bool,
226
+ comments: Iterable[str] = (),
227
+ ) -> None:
228
+ self.name = name
229
+ self.canonical_name = canonicalize_name(name)
230
+ self.req = req
231
+ self.editable = editable
232
+ self.comments = comments
233
+
234
+ @classmethod
235
+ def from_dist(cls, dist: BaseDistribution) -> "FrozenRequirement":
236
+ editable = dist.editable
237
+ if editable:
238
+ req, comments = _get_editable_info(dist)
239
+ else:
240
+ comments = []
241
+ direct_url = dist.direct_url
242
+ if direct_url:
243
+ # if PEP 610 metadata is present, use it
244
+ req = direct_url_as_pep440_direct_reference(direct_url, dist.raw_name)
245
+ else:
246
+ # name==version requirement
247
+ req = _format_as_name_version(dist)
248
+
249
+ return cls(dist.raw_name, req, editable, comments=comments)
250
+
251
+ def __str__(self) -> str:
252
+ req = self.req
253
+ if self.editable:
254
+ req = f"-e {req}"
255
+ return "\n".join(list(self.comments) + [str(req)]) + "\n"
.venv/lib/python3.11/site-packages/pip/_internal/operations/install/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ """For modules related to installing packages.
2
+ """
.venv/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (271 Bytes). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-311.pyc ADDED
Binary file (2.18 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-311.pyc ADDED
Binary file (40.2 kB). View file