prasb commited on
Commit
0337776
·
verified ·
1 Parent(s): 68d27bf

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/__pycache__/configuration.cpython-38.pyc +0 -0
  3. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/cli/spinners.py +159 -0
  4. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/cache.py +225 -0
  5. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/check.py +54 -0
  6. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/completion.py +130 -0
  7. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/configuration.py +280 -0
  8. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/download.py +147 -0
  9. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/help.py +41 -0
  10. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/install.py +774 -0
  11. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/list.py +368 -0
  12. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/search.py +174 -0
  13. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/show.py +189 -0
  14. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/uninstall.py +113 -0
  15. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/wheel.py +183 -0
  16. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/distributions/__init__.py +21 -0
  17. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/distributions/base.py +51 -0
  18. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/distributions/installed.py +29 -0
  19. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/distributions/sdist.py +156 -0
  20. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/distributions/wheel.py +40 -0
  21. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/index/__init__.py +2 -0
  22. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/index/collector.py +507 -0
  23. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/index/package_finder.py +1027 -0
  24. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/index/sources.py +285 -0
  25. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-38.pyc +0 -0
  26. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-38.pyc +0 -0
  27. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__init__.py +2 -0
  28. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/candidate.py +30 -0
  29. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/direct_url.py +235 -0
  30. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/format_control.py +78 -0
  31. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/link.py +579 -0
  32. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/scheme.py +31 -0
  33. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/search_scope.py +132 -0
  34. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/selection_prefs.py +51 -0
  35. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/target_python.py +122 -0
  36. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/wheel.py +92 -0
  37. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/auth.py +561 -0
  38. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/cache.py +106 -0
  39. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/__init__.py +0 -0
  40. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/check.py +187 -0
  41. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/freeze.py +255 -0
  42. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/prepare.py +730 -0
  43. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/req/req_file.py +554 -0
  44. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/req/req_install.py +923 -0
  45. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/req/req_set.py +119 -0
  46. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-38.pyc +0 -0
  47. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-38.pyc +0 -0
  48. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-38.pyc +0 -0
  49. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-38.pyc +0 -0
  50. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-38.pyc +0 -0
.gitattributes CHANGED
@@ -358,3 +358,4 @@ my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/Pillow.libs
358
  my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/opencv_python.libs/libavformat-3ff1be5b.so.59.27.100 filter=lfs diff=lfs merge=lfs -text
359
  my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_vendor/distlib/w64.exe filter=lfs diff=lfs merge=lfs -text
360
  my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/opencv_python.libs/libQt5XcbQpa-5b2d853e.so.5.15.0 filter=lfs diff=lfs merge=lfs -text
 
 
358
  my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/opencv_python.libs/libavformat-3ff1be5b.so.59.27.100 filter=lfs diff=lfs merge=lfs -text
359
  my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_vendor/distlib/w64.exe filter=lfs diff=lfs merge=lfs -text
360
  my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/opencv_python.libs/libQt5XcbQpa-5b2d853e.so.5.15.0 filter=lfs diff=lfs merge=lfs -text
361
+ my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_vendor/distlib/t64-arm.exe filter=lfs diff=lfs merge=lfs -text
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/__pycache__/configuration.cpython-38.pyc ADDED
Binary file (11.7 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/cli/spinners.py ADDED
@@ -0,0 +1,159 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import contextlib
2
+ import itertools
3
+ import logging
4
+ import sys
5
+ import time
6
+ from typing import IO, Generator, Optional
7
+
8
+ from pip._internal.utils.compat import WINDOWS
9
+ from pip._internal.utils.logging import get_indentation
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ class SpinnerInterface:
15
+ def spin(self) -> None:
16
+ raise NotImplementedError()
17
+
18
+ def finish(self, final_status: str) -> None:
19
+ raise NotImplementedError()
20
+
21
+
22
+ class InteractiveSpinner(SpinnerInterface):
23
+ def __init__(
24
+ self,
25
+ message: str,
26
+ file: Optional[IO[str]] = None,
27
+ spin_chars: str = "-\\|/",
28
+ # Empirically, 8 updates/second looks nice
29
+ min_update_interval_seconds: float = 0.125,
30
+ ):
31
+ self._message = message
32
+ if file is None:
33
+ file = sys.stdout
34
+ self._file = file
35
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
36
+ self._finished = False
37
+
38
+ self._spin_cycle = itertools.cycle(spin_chars)
39
+
40
+ self._file.write(" " * get_indentation() + self._message + " ... ")
41
+ self._width = 0
42
+
43
+ def _write(self, status: str) -> None:
44
+ assert not self._finished
45
+ # Erase what we wrote before by backspacing to the beginning, writing
46
+ # spaces to overwrite the old text, and then backspacing again
47
+ backup = "\b" * self._width
48
+ self._file.write(backup + " " * self._width + backup)
49
+ # Now we have a blank slate to add our status
50
+ self._file.write(status)
51
+ self._width = len(status)
52
+ self._file.flush()
53
+ self._rate_limiter.reset()
54
+
55
+ def spin(self) -> None:
56
+ if self._finished:
57
+ return
58
+ if not self._rate_limiter.ready():
59
+ return
60
+ self._write(next(self._spin_cycle))
61
+
62
+ def finish(self, final_status: str) -> None:
63
+ if self._finished:
64
+ return
65
+ self._write(final_status)
66
+ self._file.write("\n")
67
+ self._file.flush()
68
+ self._finished = True
69
+
70
+
71
+ # Used for dumb terminals, non-interactive installs (no tty), etc.
72
+ # We still print updates occasionally (once every 60 seconds by default) to
73
+ # act as a keep-alive for systems like Travis-CI that take lack-of-output as
74
+ # an indication that a task has frozen.
75
+ class NonInteractiveSpinner(SpinnerInterface):
76
+ def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None:
77
+ self._message = message
78
+ self._finished = False
79
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
80
+ self._update("started")
81
+
82
+ def _update(self, status: str) -> None:
83
+ assert not self._finished
84
+ self._rate_limiter.reset()
85
+ logger.info("%s: %s", self._message, status)
86
+
87
+ def spin(self) -> None:
88
+ if self._finished:
89
+ return
90
+ if not self._rate_limiter.ready():
91
+ return
92
+ self._update("still running...")
93
+
94
+ def finish(self, final_status: str) -> None:
95
+ if self._finished:
96
+ return
97
+ self._update(f"finished with status '{final_status}'")
98
+ self._finished = True
99
+
100
+
101
+ class RateLimiter:
102
+ def __init__(self, min_update_interval_seconds: float) -> None:
103
+ self._min_update_interval_seconds = min_update_interval_seconds
104
+ self._last_update: float = 0
105
+
106
+ def ready(self) -> bool:
107
+ now = time.time()
108
+ delta = now - self._last_update
109
+ return delta >= self._min_update_interval_seconds
110
+
111
+ def reset(self) -> None:
112
+ self._last_update = time.time()
113
+
114
+
115
+ @contextlib.contextmanager
116
+ def open_spinner(message: str) -> Generator[SpinnerInterface, None, None]:
117
+ # Interactive spinner goes directly to sys.stdout rather than being routed
118
+ # through the logging system, but it acts like it has level INFO,
119
+ # i.e. it's only displayed if we're at level INFO or better.
120
+ # Non-interactive spinner goes through the logging system, so it is always
121
+ # in sync with logging configuration.
122
+ if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
123
+ spinner: SpinnerInterface = InteractiveSpinner(message)
124
+ else:
125
+ spinner = NonInteractiveSpinner(message)
126
+ try:
127
+ with hidden_cursor(sys.stdout):
128
+ yield spinner
129
+ except KeyboardInterrupt:
130
+ spinner.finish("canceled")
131
+ raise
132
+ except Exception:
133
+ spinner.finish("error")
134
+ raise
135
+ else:
136
+ spinner.finish("done")
137
+
138
+
139
+ HIDE_CURSOR = "\x1b[?25l"
140
+ SHOW_CURSOR = "\x1b[?25h"
141
+
142
+
143
+ @contextlib.contextmanager
144
+ def hidden_cursor(file: IO[str]) -> Generator[None, None, None]:
145
+ # The Windows terminal does not support the hide/show cursor ANSI codes,
146
+ # even via colorama. So don't even try.
147
+ if WINDOWS:
148
+ yield
149
+ # We don't want to clutter the output with control characters if we're
150
+ # writing to a file, or if the user is running with --quiet.
151
+ # See https://github.com/pypa/pip/issues/3418
152
+ elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
153
+ yield
154
+ else:
155
+ file.write(HIDE_CURSOR)
156
+ try:
157
+ yield
158
+ finally:
159
+ file.write(SHOW_CURSOR)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/cache.py ADDED
@@ -0,0 +1,225 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import textwrap
3
+ from optparse import Values
4
+ from typing import Any, List
5
+
6
+ from pip._internal.cli.base_command import Command
7
+ from pip._internal.cli.status_codes import ERROR, SUCCESS
8
+ from pip._internal.exceptions import CommandError, PipError
9
+ from pip._internal.utils import filesystem
10
+ from pip._internal.utils.logging import getLogger
11
+
12
+ logger = getLogger(__name__)
13
+
14
+
15
+ class CacheCommand(Command):
16
+ """
17
+ Inspect and manage pip's wheel cache.
18
+
19
+ Subcommands:
20
+
21
+ - dir: Show the cache directory.
22
+ - info: Show information about the cache.
23
+ - list: List filenames of packages stored in the cache.
24
+ - remove: Remove one or more package from the cache.
25
+ - purge: Remove all items from the cache.
26
+
27
+ ``<pattern>`` can be a glob expression or a package name.
28
+ """
29
+
30
+ ignore_require_venv = True
31
+ usage = """
32
+ %prog dir
33
+ %prog info
34
+ %prog list [<pattern>] [--format=[human, abspath]]
35
+ %prog remove <pattern>
36
+ %prog purge
37
+ """
38
+
39
+ def add_options(self) -> None:
40
+ self.cmd_opts.add_option(
41
+ "--format",
42
+ action="store",
43
+ dest="list_format",
44
+ default="human",
45
+ choices=("human", "abspath"),
46
+ help="Select the output format among: human (default) or abspath",
47
+ )
48
+
49
+ self.parser.insert_option_group(0, self.cmd_opts)
50
+
51
+ def run(self, options: Values, args: List[str]) -> int:
52
+ handlers = {
53
+ "dir": self.get_cache_dir,
54
+ "info": self.get_cache_info,
55
+ "list": self.list_cache_items,
56
+ "remove": self.remove_cache_items,
57
+ "purge": self.purge_cache,
58
+ }
59
+
60
+ if not options.cache_dir:
61
+ logger.error("pip cache commands can not function since cache is disabled.")
62
+ return ERROR
63
+
64
+ # Determine action
65
+ if not args or args[0] not in handlers:
66
+ logger.error(
67
+ "Need an action (%s) to perform.",
68
+ ", ".join(sorted(handlers)),
69
+ )
70
+ return ERROR
71
+
72
+ action = args[0]
73
+
74
+ # Error handling happens here, not in the action-handlers.
75
+ try:
76
+ handlers[action](options, args[1:])
77
+ except PipError as e:
78
+ logger.error(e.args[0])
79
+ return ERROR
80
+
81
+ return SUCCESS
82
+
83
+ def get_cache_dir(self, options: Values, args: List[Any]) -> None:
84
+ if args:
85
+ raise CommandError("Too many arguments")
86
+
87
+ logger.info(options.cache_dir)
88
+
89
+ def get_cache_info(self, options: Values, args: List[Any]) -> None:
90
+ if args:
91
+ raise CommandError("Too many arguments")
92
+
93
+ num_http_files = len(self._find_http_files(options))
94
+ num_packages = len(self._find_wheels(options, "*"))
95
+
96
+ http_cache_location = self._cache_dir(options, "http-v2")
97
+ old_http_cache_location = self._cache_dir(options, "http")
98
+ wheels_cache_location = self._cache_dir(options, "wheels")
99
+ http_cache_size = filesystem.format_size(
100
+ filesystem.directory_size(http_cache_location)
101
+ + filesystem.directory_size(old_http_cache_location)
102
+ )
103
+ wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)
104
+
105
+ message = (
106
+ textwrap.dedent(
107
+ """
108
+ Package index page cache location (pip v23.3+): {http_cache_location}
109
+ Package index page cache location (older pips): {old_http_cache_location}
110
+ Package index page cache size: {http_cache_size}
111
+ Number of HTTP files: {num_http_files}
112
+ Locally built wheels location: {wheels_cache_location}
113
+ Locally built wheels size: {wheels_cache_size}
114
+ Number of locally built wheels: {package_count}
115
+ """ # noqa: E501
116
+ )
117
+ .format(
118
+ http_cache_location=http_cache_location,
119
+ old_http_cache_location=old_http_cache_location,
120
+ http_cache_size=http_cache_size,
121
+ num_http_files=num_http_files,
122
+ wheels_cache_location=wheels_cache_location,
123
+ package_count=num_packages,
124
+ wheels_cache_size=wheels_cache_size,
125
+ )
126
+ .strip()
127
+ )
128
+
129
+ logger.info(message)
130
+
131
+ def list_cache_items(self, options: Values, args: List[Any]) -> None:
132
+ if len(args) > 1:
133
+ raise CommandError("Too many arguments")
134
+
135
+ if args:
136
+ pattern = args[0]
137
+ else:
138
+ pattern = "*"
139
+
140
+ files = self._find_wheels(options, pattern)
141
+ if options.list_format == "human":
142
+ self.format_for_human(files)
143
+ else:
144
+ self.format_for_abspath(files)
145
+
146
+ def format_for_human(self, files: List[str]) -> None:
147
+ if not files:
148
+ logger.info("No locally built wheels cached.")
149
+ return
150
+
151
+ results = []
152
+ for filename in files:
153
+ wheel = os.path.basename(filename)
154
+ size = filesystem.format_file_size(filename)
155
+ results.append(f" - {wheel} ({size})")
156
+ logger.info("Cache contents:\n")
157
+ logger.info("\n".join(sorted(results)))
158
+
159
+ def format_for_abspath(self, files: List[str]) -> None:
160
+ if files:
161
+ logger.info("\n".join(sorted(files)))
162
+
163
+ def remove_cache_items(self, options: Values, args: List[Any]) -> None:
164
+ if len(args) > 1:
165
+ raise CommandError("Too many arguments")
166
+
167
+ if not args:
168
+ raise CommandError("Please provide a pattern")
169
+
170
+ files = self._find_wheels(options, args[0])
171
+
172
+ no_matching_msg = "No matching packages"
173
+ if args[0] == "*":
174
+ # Only fetch http files if no specific pattern given
175
+ files += self._find_http_files(options)
176
+ else:
177
+ # Add the pattern to the log message
178
+ no_matching_msg += f' for pattern "{args[0]}"'
179
+
180
+ if not files:
181
+ logger.warning(no_matching_msg)
182
+
183
+ for filename in files:
184
+ os.unlink(filename)
185
+ logger.verbose("Removed %s", filename)
186
+ logger.info("Files removed: %s", len(files))
187
+
188
+ def purge_cache(self, options: Values, args: List[Any]) -> None:
189
+ if args:
190
+ raise CommandError("Too many arguments")
191
+
192
+ return self.remove_cache_items(options, ["*"])
193
+
194
+ def _cache_dir(self, options: Values, subdir: str) -> str:
195
+ return os.path.join(options.cache_dir, subdir)
196
+
197
+ def _find_http_files(self, options: Values) -> List[str]:
198
+ old_http_dir = self._cache_dir(options, "http")
199
+ new_http_dir = self._cache_dir(options, "http-v2")
200
+ return filesystem.find_files(old_http_dir, "*") + filesystem.find_files(
201
+ new_http_dir, "*"
202
+ )
203
+
204
+ def _find_wheels(self, options: Values, pattern: str) -> List[str]:
205
+ wheel_dir = self._cache_dir(options, "wheels")
206
+
207
+ # The wheel filename format, as specified in PEP 427, is:
208
+ # {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl
209
+ #
210
+ # Additionally, non-alphanumeric values in the distribution are
211
+ # normalized to underscores (_), meaning hyphens can never occur
212
+ # before `-{version}`.
213
+ #
214
+ # Given that information:
215
+ # - If the pattern we're given contains a hyphen (-), the user is
216
+ # providing at least the version. Thus, we can just append `*.whl`
217
+ # to match the rest of it.
218
+ # - If the pattern we're given doesn't contain a hyphen (-), the
219
+ # user is only providing the name. Thus, we append `-*.whl` to
220
+ # match the hyphen before the version, followed by anything else.
221
+ #
222
+ # PEP 427: https://www.python.org/dev/peps/pep-0427/
223
+ pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl")
224
+
225
+ return filesystem.find_files(wheel_dir, pattern)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/check.py ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from optparse import Values
3
+ from typing import List
4
+
5
+ from pip._internal.cli.base_command import Command
6
+ from pip._internal.cli.status_codes import ERROR, SUCCESS
7
+ from pip._internal.operations.check import (
8
+ check_package_set,
9
+ create_package_set_from_installed,
10
+ warn_legacy_versions_and_specifiers,
11
+ )
12
+ from pip._internal.utils.misc import write_output
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class CheckCommand(Command):
18
+ """Verify installed packages have compatible dependencies."""
19
+
20
+ usage = """
21
+ %prog [options]"""
22
+
23
+ def run(self, options: Values, args: List[str]) -> int:
24
+ package_set, parsing_probs = create_package_set_from_installed()
25
+ warn_legacy_versions_and_specifiers(package_set)
26
+ missing, conflicting = check_package_set(package_set)
27
+
28
+ for project_name in missing:
29
+ version = package_set[project_name].version
30
+ for dependency in missing[project_name]:
31
+ write_output(
32
+ "%s %s requires %s, which is not installed.",
33
+ project_name,
34
+ version,
35
+ dependency[0],
36
+ )
37
+
38
+ for project_name in conflicting:
39
+ version = package_set[project_name].version
40
+ for dep_name, dep_version, req in conflicting[project_name]:
41
+ write_output(
42
+ "%s %s has requirement %s, but you have %s %s.",
43
+ project_name,
44
+ version,
45
+ req,
46
+ dep_name,
47
+ dep_version,
48
+ )
49
+
50
+ if missing or conflicting or parsing_probs:
51
+ return ERROR
52
+ else:
53
+ write_output("No broken requirements found.")
54
+ return SUCCESS
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/completion.py ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ import textwrap
3
+ from optparse import Values
4
+ from typing import List
5
+
6
+ from pip._internal.cli.base_command import Command
7
+ from pip._internal.cli.status_codes import SUCCESS
8
+ from pip._internal.utils.misc import get_prog
9
+
10
+ BASE_COMPLETION = """
11
+ # pip {shell} completion start{script}# pip {shell} completion end
12
+ """
13
+
14
+ COMPLETION_SCRIPTS = {
15
+ "bash": """
16
+ _pip_completion()
17
+ {{
18
+ COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\
19
+ COMP_CWORD=$COMP_CWORD \\
20
+ PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) )
21
+ }}
22
+ complete -o default -F _pip_completion {prog}
23
+ """,
24
+ "zsh": """
25
+ #compdef -P pip[0-9.]#
26
+ __pip() {{
27
+ compadd $( COMP_WORDS="$words[*]" \\
28
+ COMP_CWORD=$((CURRENT-1)) \\
29
+ PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )
30
+ }}
31
+ if [[ $zsh_eval_context[-1] == loadautofunc ]]; then
32
+ # autoload from fpath, call function directly
33
+ __pip "$@"
34
+ else
35
+ # eval/source/. command, register function for later
36
+ compdef __pip -P 'pip[0-9.]#'
37
+ fi
38
+ """,
39
+ "fish": """
40
+ function __fish_complete_pip
41
+ set -lx COMP_WORDS (commandline -o) ""
42
+ set -lx COMP_CWORD ( \\
43
+ math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
44
+ )
45
+ set -lx PIP_AUTO_COMPLETE 1
46
+ string split \\ -- (eval $COMP_WORDS[1])
47
+ end
48
+ complete -fa "(__fish_complete_pip)" -c {prog}
49
+ """,
50
+ "powershell": """
51
+ if ((Test-Path Function:\\TabExpansion) -and -not `
52
+ (Test-Path Function:\\_pip_completeBackup)) {{
53
+ Rename-Item Function:\\TabExpansion _pip_completeBackup
54
+ }}
55
+ function TabExpansion($line, $lastWord) {{
56
+ $lastBlock = [regex]::Split($line, '[|;]')[-1].TrimStart()
57
+ if ($lastBlock.StartsWith("{prog} ")) {{
58
+ $Env:COMP_WORDS=$lastBlock
59
+ $Env:COMP_CWORD=$lastBlock.Split().Length - 1
60
+ $Env:PIP_AUTO_COMPLETE=1
61
+ (& {prog}).Split()
62
+ Remove-Item Env:COMP_WORDS
63
+ Remove-Item Env:COMP_CWORD
64
+ Remove-Item Env:PIP_AUTO_COMPLETE
65
+ }}
66
+ elseif (Test-Path Function:\\_pip_completeBackup) {{
67
+ # Fall back on existing tab expansion
68
+ _pip_completeBackup $line $lastWord
69
+ }}
70
+ }}
71
+ """,
72
+ }
73
+
74
+
75
+ class CompletionCommand(Command):
76
+ """A helper command to be used for command completion."""
77
+
78
+ ignore_require_venv = True
79
+
80
+ def add_options(self) -> None:
81
+ self.cmd_opts.add_option(
82
+ "--bash",
83
+ "-b",
84
+ action="store_const",
85
+ const="bash",
86
+ dest="shell",
87
+ help="Emit completion code for bash",
88
+ )
89
+ self.cmd_opts.add_option(
90
+ "--zsh",
91
+ "-z",
92
+ action="store_const",
93
+ const="zsh",
94
+ dest="shell",
95
+ help="Emit completion code for zsh",
96
+ )
97
+ self.cmd_opts.add_option(
98
+ "--fish",
99
+ "-f",
100
+ action="store_const",
101
+ const="fish",
102
+ dest="shell",
103
+ help="Emit completion code for fish",
104
+ )
105
+ self.cmd_opts.add_option(
106
+ "--powershell",
107
+ "-p",
108
+ action="store_const",
109
+ const="powershell",
110
+ dest="shell",
111
+ help="Emit completion code for powershell",
112
+ )
113
+
114
+ self.parser.insert_option_group(0, self.cmd_opts)
115
+
116
+ def run(self, options: Values, args: List[str]) -> int:
117
+ """Prints the completion code of the given shell"""
118
+ shells = COMPLETION_SCRIPTS.keys()
119
+ shell_options = ["--" + shell for shell in sorted(shells)]
120
+ if options.shell in shells:
121
+ script = textwrap.dedent(
122
+ COMPLETION_SCRIPTS.get(options.shell, "").format(prog=get_prog())
123
+ )
124
+ print(BASE_COMPLETION.format(script=script, shell=options.shell))
125
+ return SUCCESS
126
+ else:
127
+ sys.stderr.write(
128
+ "ERROR: You must pass {}\n".format(" or ".join(shell_options))
129
+ )
130
+ return SUCCESS
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/configuration.py ADDED
@@ -0,0 +1,280 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os
3
+ import subprocess
4
+ from optparse import Values
5
+ from typing import Any, List, Optional
6
+
7
+ from pip._internal.cli.base_command import Command
8
+ from pip._internal.cli.status_codes import ERROR, SUCCESS
9
+ from pip._internal.configuration import (
10
+ Configuration,
11
+ Kind,
12
+ get_configuration_files,
13
+ kinds,
14
+ )
15
+ from pip._internal.exceptions import PipError
16
+ from pip._internal.utils.logging import indent_log
17
+ from pip._internal.utils.misc import get_prog, write_output
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+
22
+ class ConfigurationCommand(Command):
23
+ """
24
+ Manage local and global configuration.
25
+
26
+ Subcommands:
27
+
28
+ - list: List the active configuration (or from the file specified)
29
+ - edit: Edit the configuration file in an editor
30
+ - get: Get the value associated with command.option
31
+ - set: Set the command.option=value
32
+ - unset: Unset the value associated with command.option
33
+ - debug: List the configuration files and values defined under them
34
+
35
+ Configuration keys should be dot separated command and option name,
36
+ with the special prefix "global" affecting any command. For example,
37
+ "pip config set global.index-url https://example.org/" would configure
38
+ the index url for all commands, but "pip config set download.timeout 10"
39
+ would configure a 10 second timeout only for "pip download" commands.
40
+
41
+ If none of --user, --global and --site are passed, a virtual
42
+ environment configuration file is used if one is active and the file
43
+ exists. Otherwise, all modifications happen to the user file by
44
+ default.
45
+ """
46
+
47
+ ignore_require_venv = True
48
+ usage = """
49
+ %prog [<file-option>] list
50
+ %prog [<file-option>] [--editor <editor-path>] edit
51
+
52
+ %prog [<file-option>] get command.option
53
+ %prog [<file-option>] set command.option value
54
+ %prog [<file-option>] unset command.option
55
+ %prog [<file-option>] debug
56
+ """
57
+
58
+ def add_options(self) -> None:
59
+ self.cmd_opts.add_option(
60
+ "--editor",
61
+ dest="editor",
62
+ action="store",
63
+ default=None,
64
+ help=(
65
+ "Editor to use to edit the file. Uses VISUAL or EDITOR "
66
+ "environment variables if not provided."
67
+ ),
68
+ )
69
+
70
+ self.cmd_opts.add_option(
71
+ "--global",
72
+ dest="global_file",
73
+ action="store_true",
74
+ default=False,
75
+ help="Use the system-wide configuration file only",
76
+ )
77
+
78
+ self.cmd_opts.add_option(
79
+ "--user",
80
+ dest="user_file",
81
+ action="store_true",
82
+ default=False,
83
+ help="Use the user configuration file only",
84
+ )
85
+
86
+ self.cmd_opts.add_option(
87
+ "--site",
88
+ dest="site_file",
89
+ action="store_true",
90
+ default=False,
91
+ help="Use the current environment configuration file only",
92
+ )
93
+
94
+ self.parser.insert_option_group(0, self.cmd_opts)
95
+
96
+ def run(self, options: Values, args: List[str]) -> int:
97
+ handlers = {
98
+ "list": self.list_values,
99
+ "edit": self.open_in_editor,
100
+ "get": self.get_name,
101
+ "set": self.set_name_value,
102
+ "unset": self.unset_name,
103
+ "debug": self.list_config_values,
104
+ }
105
+
106
+ # Determine action
107
+ if not args or args[0] not in handlers:
108
+ logger.error(
109
+ "Need an action (%s) to perform.",
110
+ ", ".join(sorted(handlers)),
111
+ )
112
+ return ERROR
113
+
114
+ action = args[0]
115
+
116
+ # Determine which configuration files are to be loaded
117
+ # Depends on whether the command is modifying.
118
+ try:
119
+ load_only = self._determine_file(
120
+ options, need_value=(action in ["get", "set", "unset", "edit"])
121
+ )
122
+ except PipError as e:
123
+ logger.error(e.args[0])
124
+ return ERROR
125
+
126
+ # Load a new configuration
127
+ self.configuration = Configuration(
128
+ isolated=options.isolated_mode, load_only=load_only
129
+ )
130
+ self.configuration.load()
131
+
132
+ # Error handling happens here, not in the action-handlers.
133
+ try:
134
+ handlers[action](options, args[1:])
135
+ except PipError as e:
136
+ logger.error(e.args[0])
137
+ return ERROR
138
+
139
+ return SUCCESS
140
+
141
+ def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]:
142
+ file_options = [
143
+ key
144
+ for key, value in (
145
+ (kinds.USER, options.user_file),
146
+ (kinds.GLOBAL, options.global_file),
147
+ (kinds.SITE, options.site_file),
148
+ )
149
+ if value
150
+ ]
151
+
152
+ if not file_options:
153
+ if not need_value:
154
+ return None
155
+ # Default to user, unless there's a site file.
156
+ elif any(
157
+ os.path.exists(site_config_file)
158
+ for site_config_file in get_configuration_files()[kinds.SITE]
159
+ ):
160
+ return kinds.SITE
161
+ else:
162
+ return kinds.USER
163
+ elif len(file_options) == 1:
164
+ return file_options[0]
165
+
166
+ raise PipError(
167
+ "Need exactly one file to operate upon "
168
+ "(--user, --site, --global) to perform."
169
+ )
170
+
171
+ def list_values(self, options: Values, args: List[str]) -> None:
172
+ self._get_n_args(args, "list", n=0)
173
+
174
+ for key, value in sorted(self.configuration.items()):
175
+ write_output("%s=%r", key, value)
176
+
177
+ def get_name(self, options: Values, args: List[str]) -> None:
178
+ key = self._get_n_args(args, "get [name]", n=1)
179
+ value = self.configuration.get_value(key)
180
+
181
+ write_output("%s", value)
182
+
183
+ def set_name_value(self, options: Values, args: List[str]) -> None:
184
+ key, value = self._get_n_args(args, "set [name] [value]", n=2)
185
+ self.configuration.set_value(key, value)
186
+
187
+ self._save_configuration()
188
+
189
+ def unset_name(self, options: Values, args: List[str]) -> None:
190
+ key = self._get_n_args(args, "unset [name]", n=1)
191
+ self.configuration.unset_value(key)
192
+
193
+ self._save_configuration()
194
+
195
+ def list_config_values(self, options: Values, args: List[str]) -> None:
196
+ """List config key-value pairs across different config files"""
197
+ self._get_n_args(args, "debug", n=0)
198
+
199
+ self.print_env_var_values()
200
+ # Iterate over config files and print if they exist, and the
201
+ # key-value pairs present in them if they do
202
+ for variant, files in sorted(self.configuration.iter_config_files()):
203
+ write_output("%s:", variant)
204
+ for fname in files:
205
+ with indent_log():
206
+ file_exists = os.path.exists(fname)
207
+ write_output("%s, exists: %r", fname, file_exists)
208
+ if file_exists:
209
+ self.print_config_file_values(variant)
210
+
211
+ def print_config_file_values(self, variant: Kind) -> None:
212
+ """Get key-value pairs from the file of a variant"""
213
+ for name, value in self.configuration.get_values_in_config(variant).items():
214
+ with indent_log():
215
+ write_output("%s: %s", name, value)
216
+
217
+ def print_env_var_values(self) -> None:
218
+ """Get key-values pairs present as environment variables"""
219
+ write_output("%s:", "env_var")
220
+ with indent_log():
221
+ for key, value in sorted(self.configuration.get_environ_vars()):
222
+ env_var = f"PIP_{key.upper()}"
223
+ write_output("%s=%r", env_var, value)
224
+
225
+ def open_in_editor(self, options: Values, args: List[str]) -> None:
226
+ editor = self._determine_editor(options)
227
+
228
+ fname = self.configuration.get_file_to_edit()
229
+ if fname is None:
230
+ raise PipError("Could not determine appropriate file.")
231
+ elif '"' in fname:
232
+ # This shouldn't happen, unless we see a username like that.
233
+ # If that happens, we'd appreciate a pull request fixing this.
234
+ raise PipError(
235
+ f'Can not open an editor for a file name containing "\n{fname}'
236
+ )
237
+
238
+ try:
239
+ subprocess.check_call(f'{editor} "{fname}"', shell=True)
240
+ except FileNotFoundError as e:
241
+ if not e.filename:
242
+ e.filename = editor
243
+ raise
244
+ except subprocess.CalledProcessError as e:
245
+ raise PipError(f"Editor Subprocess exited with exit code {e.returncode}")
246
+
247
+ def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
248
+ """Helper to make sure the command got the right number of arguments"""
249
+ if len(args) != n:
250
+ msg = (
251
+ f"Got unexpected number of arguments, expected {n}. "
252
+ f'(example: "{get_prog()} config {example}")'
253
+ )
254
+ raise PipError(msg)
255
+
256
+ if n == 1:
257
+ return args[0]
258
+ else:
259
+ return args
260
+
261
+ def _save_configuration(self) -> None:
262
+ # We successfully ran a modifying command. Need to save the
263
+ # configuration.
264
+ try:
265
+ self.configuration.save()
266
+ except Exception:
267
+ logger.exception(
268
+ "Unable to save configuration. Please report this as a bug."
269
+ )
270
+ raise PipError("Internal Error.")
271
+
272
+ def _determine_editor(self, options: Values) -> str:
273
+ if options.editor is not None:
274
+ return options.editor
275
+ elif "VISUAL" in os.environ:
276
+ return os.environ["VISUAL"]
277
+ elif "EDITOR" in os.environ:
278
+ return os.environ["EDITOR"]
279
+ else:
280
+ raise PipError("Could not determine editor to use.")
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/download.py ADDED
@@ -0,0 +1,147 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os
3
+ from optparse import Values
4
+ from typing import List
5
+
6
+ from pip._internal.cli import cmdoptions
7
+ from pip._internal.cli.cmdoptions import make_target_python
8
+ from pip._internal.cli.req_command import RequirementCommand, with_cleanup
9
+ from pip._internal.cli.status_codes import SUCCESS
10
+ from pip._internal.operations.build.build_tracker import get_build_tracker
11
+ from pip._internal.req.req_install import check_legacy_setup_py_options
12
+ from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
13
+ from pip._internal.utils.temp_dir import TempDirectory
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ class DownloadCommand(RequirementCommand):
19
+ """
20
+ Download packages from:
21
+
22
+ - PyPI (and other indexes) using requirement specifiers.
23
+ - VCS project urls.
24
+ - Local project directories.
25
+ - Local or remote source archives.
26
+
27
+ pip also supports downloading from "requirements files", which provide
28
+ an easy way to specify a whole environment to be downloaded.
29
+ """
30
+
31
+ usage = """
32
+ %prog [options] <requirement specifier> [package-index-options] ...
33
+ %prog [options] -r <requirements file> [package-index-options] ...
34
+ %prog [options] <vcs project url> ...
35
+ %prog [options] <local project path> ...
36
+ %prog [options] <archive url/path> ..."""
37
+
38
+ def add_options(self) -> None:
39
+ self.cmd_opts.add_option(cmdoptions.constraints())
40
+ self.cmd_opts.add_option(cmdoptions.requirements())
41
+ self.cmd_opts.add_option(cmdoptions.no_deps())
42
+ self.cmd_opts.add_option(cmdoptions.global_options())
43
+ self.cmd_opts.add_option(cmdoptions.no_binary())
44
+ self.cmd_opts.add_option(cmdoptions.only_binary())
45
+ self.cmd_opts.add_option(cmdoptions.prefer_binary())
46
+ self.cmd_opts.add_option(cmdoptions.src())
47
+ self.cmd_opts.add_option(cmdoptions.pre())
48
+ self.cmd_opts.add_option(cmdoptions.require_hashes())
49
+ self.cmd_opts.add_option(cmdoptions.progress_bar())
50
+ self.cmd_opts.add_option(cmdoptions.no_build_isolation())
51
+ self.cmd_opts.add_option(cmdoptions.use_pep517())
52
+ self.cmd_opts.add_option(cmdoptions.no_use_pep517())
53
+ self.cmd_opts.add_option(cmdoptions.check_build_deps())
54
+ self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
55
+
56
+ self.cmd_opts.add_option(
57
+ "-d",
58
+ "--dest",
59
+ "--destination-dir",
60
+ "--destination-directory",
61
+ dest="download_dir",
62
+ metavar="dir",
63
+ default=os.curdir,
64
+ help="Download packages into <dir>.",
65
+ )
66
+
67
+ cmdoptions.add_target_python_options(self.cmd_opts)
68
+
69
+ index_opts = cmdoptions.make_option_group(
70
+ cmdoptions.index_group,
71
+ self.parser,
72
+ )
73
+
74
+ self.parser.insert_option_group(0, index_opts)
75
+ self.parser.insert_option_group(0, self.cmd_opts)
76
+
77
+ @with_cleanup
78
+ def run(self, options: Values, args: List[str]) -> int:
79
+ options.ignore_installed = True
80
+ # editable doesn't really make sense for `pip download`, but the bowels
81
+ # of the RequirementSet code require that property.
82
+ options.editables = []
83
+
84
+ cmdoptions.check_dist_restriction(options)
85
+
86
+ options.download_dir = normalize_path(options.download_dir)
87
+ ensure_dir(options.download_dir)
88
+
89
+ session = self.get_default_session(options)
90
+
91
+ target_python = make_target_python(options)
92
+ finder = self._build_package_finder(
93
+ options=options,
94
+ session=session,
95
+ target_python=target_python,
96
+ ignore_requires_python=options.ignore_requires_python,
97
+ )
98
+
99
+ build_tracker = self.enter_context(get_build_tracker())
100
+
101
+ directory = TempDirectory(
102
+ delete=not options.no_clean,
103
+ kind="download",
104
+ globally_managed=True,
105
+ )
106
+
107
+ reqs = self.get_requirements(args, options, finder, session)
108
+ check_legacy_setup_py_options(options, reqs)
109
+
110
+ preparer = self.make_requirement_preparer(
111
+ temp_build_dir=directory,
112
+ options=options,
113
+ build_tracker=build_tracker,
114
+ session=session,
115
+ finder=finder,
116
+ download_dir=options.download_dir,
117
+ use_user_site=False,
118
+ verbosity=self.verbosity,
119
+ )
120
+
121
+ resolver = self.make_resolver(
122
+ preparer=preparer,
123
+ finder=finder,
124
+ options=options,
125
+ ignore_requires_python=options.ignore_requires_python,
126
+ use_pep517=options.use_pep517,
127
+ py_version_info=options.python_version,
128
+ )
129
+
130
+ self.trace_basic_info(finder)
131
+
132
+ requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
133
+
134
+ downloaded: List[str] = []
135
+ for req in requirement_set.requirements.values():
136
+ if req.satisfied_by is None:
137
+ assert req.name is not None
138
+ preparer.save_linked_requirement(req)
139
+ downloaded.append(req.name)
140
+
141
+ preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
142
+ requirement_set.warn_legacy_versions_and_specifiers()
143
+
144
+ if downloaded:
145
+ write_output("Successfully downloaded %s", " ".join(downloaded))
146
+
147
+ return SUCCESS
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/help.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from optparse import Values
2
+ from typing import List
3
+
4
+ from pip._internal.cli.base_command import Command
5
+ from pip._internal.cli.status_codes import SUCCESS
6
+ from pip._internal.exceptions import CommandError
7
+
8
+
9
+ class HelpCommand(Command):
10
+ """Show help for commands"""
11
+
12
+ usage = """
13
+ %prog <command>"""
14
+ ignore_require_venv = True
15
+
16
+ def run(self, options: Values, args: List[str]) -> int:
17
+ from pip._internal.commands import (
18
+ commands_dict,
19
+ create_command,
20
+ get_similar_commands,
21
+ )
22
+
23
+ try:
24
+ # 'pip help' with no args is handled by pip.__init__.parseopt()
25
+ cmd_name = args[0] # the command we need help for
26
+ except IndexError:
27
+ return SUCCESS
28
+
29
+ if cmd_name not in commands_dict:
30
+ guess = get_similar_commands(cmd_name)
31
+
32
+ msg = [f'unknown command "{cmd_name}"']
33
+ if guess:
34
+ msg.append(f'maybe you meant "{guess}"')
35
+
36
+ raise CommandError(" - ".join(msg))
37
+
38
+ command = create_command(cmd_name)
39
+ command.parser.print_help()
40
+
41
+ return SUCCESS
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/install.py ADDED
@@ -0,0 +1,774 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import errno
2
+ import json
3
+ import operator
4
+ import os
5
+ import shutil
6
+ import site
7
+ from optparse import SUPPRESS_HELP, Values
8
+ from typing import List, Optional
9
+
10
+ from pip._vendor.rich import print_json
11
+
12
+ from pip._internal.cache import WheelCache
13
+ from pip._internal.cli import cmdoptions
14
+ from pip._internal.cli.cmdoptions import make_target_python
15
+ from pip._internal.cli.req_command import (
16
+ RequirementCommand,
17
+ warn_if_run_as_root,
18
+ with_cleanup,
19
+ )
20
+ from pip._internal.cli.status_codes import ERROR, SUCCESS
21
+ from pip._internal.exceptions import CommandError, InstallationError
22
+ from pip._internal.locations import get_scheme
23
+ from pip._internal.metadata import get_environment
24
+ from pip._internal.models.installation_report import InstallationReport
25
+ from pip._internal.operations.build.build_tracker import get_build_tracker
26
+ from pip._internal.operations.check import ConflictDetails, check_install_conflicts
27
+ from pip._internal.req import install_given_reqs
28
+ from pip._internal.req.req_install import (
29
+ InstallRequirement,
30
+ check_legacy_setup_py_options,
31
+ )
32
+ from pip._internal.utils.compat import WINDOWS
33
+ from pip._internal.utils.filesystem import test_writable_dir
34
+ from pip._internal.utils.logging import getLogger
35
+ from pip._internal.utils.misc import (
36
+ check_externally_managed,
37
+ ensure_dir,
38
+ get_pip_version,
39
+ protect_pip_from_modification_on_windows,
40
+ write_output,
41
+ )
42
+ from pip._internal.utils.temp_dir import TempDirectory
43
+ from pip._internal.utils.virtualenv import (
44
+ running_under_virtualenv,
45
+ virtualenv_no_global,
46
+ )
47
+ from pip._internal.wheel_builder import build, should_build_for_install_command
48
+
49
+ logger = getLogger(__name__)
50
+
51
+
52
+ class InstallCommand(RequirementCommand):
53
+ """
54
+ Install packages from:
55
+
56
+ - PyPI (and other indexes) using requirement specifiers.
57
+ - VCS project urls.
58
+ - Local project directories.
59
+ - Local or remote source archives.
60
+
61
+ pip also supports installing from "requirements files", which provide
62
+ an easy way to specify a whole environment to be installed.
63
+ """
64
+
65
+ usage = """
66
+ %prog [options] <requirement specifier> [package-index-options] ...
67
+ %prog [options] -r <requirements file> [package-index-options] ...
68
+ %prog [options] [-e] <vcs project url> ...
69
+ %prog [options] [-e] <local project path> ...
70
+ %prog [options] <archive url/path> ..."""
71
+
72
+ def add_options(self) -> None:
73
+ self.cmd_opts.add_option(cmdoptions.requirements())
74
+ self.cmd_opts.add_option(cmdoptions.constraints())
75
+ self.cmd_opts.add_option(cmdoptions.no_deps())
76
+ self.cmd_opts.add_option(cmdoptions.pre())
77
+
78
+ self.cmd_opts.add_option(cmdoptions.editable())
79
+ self.cmd_opts.add_option(
80
+ "--dry-run",
81
+ action="store_true",
82
+ dest="dry_run",
83
+ default=False,
84
+ help=(
85
+ "Don't actually install anything, just print what would be. "
86
+ "Can be used in combination with --ignore-installed "
87
+ "to 'resolve' the requirements."
88
+ ),
89
+ )
90
+ self.cmd_opts.add_option(
91
+ "-t",
92
+ "--target",
93
+ dest="target_dir",
94
+ metavar="dir",
95
+ default=None,
96
+ help=(
97
+ "Install packages into <dir>. "
98
+ "By default this will not replace existing files/folders in "
99
+ "<dir>. Use --upgrade to replace existing packages in <dir> "
100
+ "with new versions."
101
+ ),
102
+ )
103
+ cmdoptions.add_target_python_options(self.cmd_opts)
104
+
105
+ self.cmd_opts.add_option(
106
+ "--user",
107
+ dest="use_user_site",
108
+ action="store_true",
109
+ help=(
110
+ "Install to the Python user install directory for your "
111
+ "platform. Typically ~/.local/, or %APPDATA%\\Python on "
112
+ "Windows. (See the Python documentation for site.USER_BASE "
113
+ "for full details.)"
114
+ ),
115
+ )
116
+ self.cmd_opts.add_option(
117
+ "--no-user",
118
+ dest="use_user_site",
119
+ action="store_false",
120
+ help=SUPPRESS_HELP,
121
+ )
122
+ self.cmd_opts.add_option(
123
+ "--root",
124
+ dest="root_path",
125
+ metavar="dir",
126
+ default=None,
127
+ help="Install everything relative to this alternate root directory.",
128
+ )
129
+ self.cmd_opts.add_option(
130
+ "--prefix",
131
+ dest="prefix_path",
132
+ metavar="dir",
133
+ default=None,
134
+ help=(
135
+ "Installation prefix where lib, bin and other top-level "
136
+ "folders are placed. Note that the resulting installation may "
137
+ "contain scripts and other resources which reference the "
138
+ "Python interpreter of pip, and not that of ``--prefix``. "
139
+ "See also the ``--python`` option if the intention is to "
140
+ "install packages into another (possibly pip-free) "
141
+ "environment."
142
+ ),
143
+ )
144
+
145
+ self.cmd_opts.add_option(cmdoptions.src())
146
+
147
+ self.cmd_opts.add_option(
148
+ "-U",
149
+ "--upgrade",
150
+ dest="upgrade",
151
+ action="store_true",
152
+ help=(
153
+ "Upgrade all specified packages to the newest available "
154
+ "version. The handling of dependencies depends on the "
155
+ "upgrade-strategy used."
156
+ ),
157
+ )
158
+
159
+ self.cmd_opts.add_option(
160
+ "--upgrade-strategy",
161
+ dest="upgrade_strategy",
162
+ default="only-if-needed",
163
+ choices=["only-if-needed", "eager"],
164
+ help=(
165
+ "Determines how dependency upgrading should be handled "
166
+ "[default: %default]. "
167
+ '"eager" - dependencies are upgraded regardless of '
168
+ "whether the currently installed version satisfies the "
169
+ "requirements of the upgraded package(s). "
170
+ '"only-if-needed" - are upgraded only when they do not '
171
+ "satisfy the requirements of the upgraded package(s)."
172
+ ),
173
+ )
174
+
175
+ self.cmd_opts.add_option(
176
+ "--force-reinstall",
177
+ dest="force_reinstall",
178
+ action="store_true",
179
+ help="Reinstall all packages even if they are already up-to-date.",
180
+ )
181
+
182
+ self.cmd_opts.add_option(
183
+ "-I",
184
+ "--ignore-installed",
185
+ dest="ignore_installed",
186
+ action="store_true",
187
+ help=(
188
+ "Ignore the installed packages, overwriting them. "
189
+ "This can break your system if the existing package "
190
+ "is of a different version or was installed "
191
+ "with a different package manager!"
192
+ ),
193
+ )
194
+
195
+ self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
196
+ self.cmd_opts.add_option(cmdoptions.no_build_isolation())
197
+ self.cmd_opts.add_option(cmdoptions.use_pep517())
198
+ self.cmd_opts.add_option(cmdoptions.no_use_pep517())
199
+ self.cmd_opts.add_option(cmdoptions.check_build_deps())
200
+ self.cmd_opts.add_option(cmdoptions.override_externally_managed())
201
+
202
+ self.cmd_opts.add_option(cmdoptions.config_settings())
203
+ self.cmd_opts.add_option(cmdoptions.global_options())
204
+
205
+ self.cmd_opts.add_option(
206
+ "--compile",
207
+ action="store_true",
208
+ dest="compile",
209
+ default=True,
210
+ help="Compile Python source files to bytecode",
211
+ )
212
+
213
+ self.cmd_opts.add_option(
214
+ "--no-compile",
215
+ action="store_false",
216
+ dest="compile",
217
+ help="Do not compile Python source files to bytecode",
218
+ )
219
+
220
+ self.cmd_opts.add_option(
221
+ "--no-warn-script-location",
222
+ action="store_false",
223
+ dest="warn_script_location",
224
+ default=True,
225
+ help="Do not warn when installing scripts outside PATH",
226
+ )
227
+ self.cmd_opts.add_option(
228
+ "--no-warn-conflicts",
229
+ action="store_false",
230
+ dest="warn_about_conflicts",
231
+ default=True,
232
+ help="Do not warn about broken dependencies",
233
+ )
234
+ self.cmd_opts.add_option(cmdoptions.no_binary())
235
+ self.cmd_opts.add_option(cmdoptions.only_binary())
236
+ self.cmd_opts.add_option(cmdoptions.prefer_binary())
237
+ self.cmd_opts.add_option(cmdoptions.require_hashes())
238
+ self.cmd_opts.add_option(cmdoptions.progress_bar())
239
+ self.cmd_opts.add_option(cmdoptions.root_user_action())
240
+
241
+ index_opts = cmdoptions.make_option_group(
242
+ cmdoptions.index_group,
243
+ self.parser,
244
+ )
245
+
246
+ self.parser.insert_option_group(0, index_opts)
247
+ self.parser.insert_option_group(0, self.cmd_opts)
248
+
249
+ self.cmd_opts.add_option(
250
+ "--report",
251
+ dest="json_report_file",
252
+ metavar="file",
253
+ default=None,
254
+ help=(
255
+ "Generate a JSON file describing what pip did to install "
256
+ "the provided requirements. "
257
+ "Can be used in combination with --dry-run and --ignore-installed "
258
+ "to 'resolve' the requirements. "
259
+ "When - is used as file name it writes to stdout. "
260
+ "When writing to stdout, please combine with the --quiet option "
261
+ "to avoid mixing pip logging output with JSON output."
262
+ ),
263
+ )
264
+
265
+ @with_cleanup
266
+ def run(self, options: Values, args: List[str]) -> int:
267
+ if options.use_user_site and options.target_dir is not None:
268
+ raise CommandError("Can not combine '--user' and '--target'")
269
+
270
+ # Check whether the environment we're installing into is externally
271
+ # managed, as specified in PEP 668. Specifying --root, --target, or
272
+ # --prefix disables the check, since there's no reliable way to locate
273
+ # the EXTERNALLY-MANAGED file for those cases. An exception is also
274
+ # made specifically for "--dry-run --report" for convenience.
275
+ installing_into_current_environment = (
276
+ not (options.dry_run and options.json_report_file)
277
+ and options.root_path is None
278
+ and options.target_dir is None
279
+ and options.prefix_path is None
280
+ )
281
+ if (
282
+ installing_into_current_environment
283
+ and not options.override_externally_managed
284
+ ):
285
+ check_externally_managed()
286
+
287
+ upgrade_strategy = "to-satisfy-only"
288
+ if options.upgrade:
289
+ upgrade_strategy = options.upgrade_strategy
290
+
291
+ cmdoptions.check_dist_restriction(options, check_target=True)
292
+
293
+ logger.verbose("Using %s", get_pip_version())
294
+ options.use_user_site = decide_user_install(
295
+ options.use_user_site,
296
+ prefix_path=options.prefix_path,
297
+ target_dir=options.target_dir,
298
+ root_path=options.root_path,
299
+ isolated_mode=options.isolated_mode,
300
+ )
301
+
302
+ target_temp_dir: Optional[TempDirectory] = None
303
+ target_temp_dir_path: Optional[str] = None
304
+ if options.target_dir:
305
+ options.ignore_installed = True
306
+ options.target_dir = os.path.abspath(options.target_dir)
307
+ if (
308
+ # fmt: off
309
+ os.path.exists(options.target_dir) and
310
+ not os.path.isdir(options.target_dir)
311
+ # fmt: on
312
+ ):
313
+ raise CommandError(
314
+ "Target path exists but is not a directory, will not continue."
315
+ )
316
+
317
+ # Create a target directory for using with the target option
318
+ target_temp_dir = TempDirectory(kind="target")
319
+ target_temp_dir_path = target_temp_dir.path
320
+ self.enter_context(target_temp_dir)
321
+
322
+ global_options = options.global_options or []
323
+
324
+ session = self.get_default_session(options)
325
+
326
+ target_python = make_target_python(options)
327
+ finder = self._build_package_finder(
328
+ options=options,
329
+ session=session,
330
+ target_python=target_python,
331
+ ignore_requires_python=options.ignore_requires_python,
332
+ )
333
+ build_tracker = self.enter_context(get_build_tracker())
334
+
335
+ directory = TempDirectory(
336
+ delete=not options.no_clean,
337
+ kind="install",
338
+ globally_managed=True,
339
+ )
340
+
341
+ try:
342
+ reqs = self.get_requirements(args, options, finder, session)
343
+ check_legacy_setup_py_options(options, reqs)
344
+
345
+ wheel_cache = WheelCache(options.cache_dir)
346
+
347
+ # Only when installing is it permitted to use PEP 660.
348
+ # In other circumstances (pip wheel, pip download) we generate
349
+ # regular (i.e. non editable) metadata and wheels.
350
+ for req in reqs:
351
+ req.permit_editable_wheels = True
352
+
353
+ preparer = self.make_requirement_preparer(
354
+ temp_build_dir=directory,
355
+ options=options,
356
+ build_tracker=build_tracker,
357
+ session=session,
358
+ finder=finder,
359
+ use_user_site=options.use_user_site,
360
+ verbosity=self.verbosity,
361
+ )
362
+ resolver = self.make_resolver(
363
+ preparer=preparer,
364
+ finder=finder,
365
+ options=options,
366
+ wheel_cache=wheel_cache,
367
+ use_user_site=options.use_user_site,
368
+ ignore_installed=options.ignore_installed,
369
+ ignore_requires_python=options.ignore_requires_python,
370
+ force_reinstall=options.force_reinstall,
371
+ upgrade_strategy=upgrade_strategy,
372
+ use_pep517=options.use_pep517,
373
+ )
374
+
375
+ self.trace_basic_info(finder)
376
+
377
+ requirement_set = resolver.resolve(
378
+ reqs, check_supported_wheels=not options.target_dir
379
+ )
380
+
381
+ if options.json_report_file:
382
+ report = InstallationReport(requirement_set.requirements_to_install)
383
+ if options.json_report_file == "-":
384
+ print_json(data=report.to_dict())
385
+ else:
386
+ with open(options.json_report_file, "w", encoding="utf-8") as f:
387
+ json.dump(report.to_dict(), f, indent=2, ensure_ascii=False)
388
+
389
+ if options.dry_run:
390
+ # In non dry-run mode, the legacy versions and specifiers check
391
+ # will be done as part of conflict detection.
392
+ requirement_set.warn_legacy_versions_and_specifiers()
393
+ would_install_items = sorted(
394
+ (r.metadata["name"], r.metadata["version"])
395
+ for r in requirement_set.requirements_to_install
396
+ )
397
+ if would_install_items:
398
+ write_output(
399
+ "Would install %s",
400
+ " ".join("-".join(item) for item in would_install_items),
401
+ )
402
+ return SUCCESS
403
+
404
+ try:
405
+ pip_req = requirement_set.get_requirement("pip")
406
+ except KeyError:
407
+ modifying_pip = False
408
+ else:
409
+ # If we're not replacing an already installed pip,
410
+ # we're not modifying it.
411
+ modifying_pip = pip_req.satisfied_by is None
412
+ protect_pip_from_modification_on_windows(modifying_pip=modifying_pip)
413
+
414
+ reqs_to_build = [
415
+ r
416
+ for r in requirement_set.requirements.values()
417
+ if should_build_for_install_command(r)
418
+ ]
419
+
420
+ _, build_failures = build(
421
+ reqs_to_build,
422
+ wheel_cache=wheel_cache,
423
+ verify=True,
424
+ build_options=[],
425
+ global_options=global_options,
426
+ )
427
+
428
+ if build_failures:
429
+ raise InstallationError(
430
+ "Could not build wheels for {}, which is required to "
431
+ "install pyproject.toml-based projects".format(
432
+ ", ".join(r.name for r in build_failures) # type: ignore
433
+ )
434
+ )
435
+
436
+ to_install = resolver.get_installation_order(requirement_set)
437
+
438
+ # Check for conflicts in the package set we're installing.
439
+ conflicts: Optional[ConflictDetails] = None
440
+ should_warn_about_conflicts = (
441
+ not options.ignore_dependencies and options.warn_about_conflicts
442
+ )
443
+ if should_warn_about_conflicts:
444
+ conflicts = self._determine_conflicts(to_install)
445
+
446
+ # Don't warn about script install locations if
447
+ # --target or --prefix has been specified
448
+ warn_script_location = options.warn_script_location
449
+ if options.target_dir or options.prefix_path:
450
+ warn_script_location = False
451
+
452
+ installed = install_given_reqs(
453
+ to_install,
454
+ global_options,
455
+ root=options.root_path,
456
+ home=target_temp_dir_path,
457
+ prefix=options.prefix_path,
458
+ warn_script_location=warn_script_location,
459
+ use_user_site=options.use_user_site,
460
+ pycompile=options.compile,
461
+ )
462
+
463
+ lib_locations = get_lib_location_guesses(
464
+ user=options.use_user_site,
465
+ home=target_temp_dir_path,
466
+ root=options.root_path,
467
+ prefix=options.prefix_path,
468
+ isolated=options.isolated_mode,
469
+ )
470
+ env = get_environment(lib_locations)
471
+
472
+ installed.sort(key=operator.attrgetter("name"))
473
+ items = []
474
+ for result in installed:
475
+ item = result.name
476
+ try:
477
+ installed_dist = env.get_distribution(item)
478
+ if installed_dist is not None:
479
+ item = f"{item}-{installed_dist.version}"
480
+ except Exception:
481
+ pass
482
+ items.append(item)
483
+
484
+ if conflicts is not None:
485
+ self._warn_about_conflicts(
486
+ conflicts,
487
+ resolver_variant=self.determine_resolver_variant(options),
488
+ )
489
+
490
+ installed_desc = " ".join(items)
491
+ if installed_desc:
492
+ write_output(
493
+ "Successfully installed %s",
494
+ installed_desc,
495
+ )
496
+ except OSError as error:
497
+ show_traceback = self.verbosity >= 1
498
+
499
+ message = create_os_error_message(
500
+ error,
501
+ show_traceback,
502
+ options.use_user_site,
503
+ )
504
+ logger.error(message, exc_info=show_traceback)
505
+
506
+ return ERROR
507
+
508
+ if options.target_dir:
509
+ assert target_temp_dir
510
+ self._handle_target_dir(
511
+ options.target_dir, target_temp_dir, options.upgrade
512
+ )
513
+ if options.root_user_action == "warn":
514
+ warn_if_run_as_root()
515
+ return SUCCESS
516
+
517
+ def _handle_target_dir(
518
+ self, target_dir: str, target_temp_dir: TempDirectory, upgrade: bool
519
+ ) -> None:
520
+ ensure_dir(target_dir)
521
+
522
+ # Checking both purelib and platlib directories for installed
523
+ # packages to be moved to target directory
524
+ lib_dir_list = []
525
+
526
+ # Checking both purelib and platlib directories for installed
527
+ # packages to be moved to target directory
528
+ scheme = get_scheme("", home=target_temp_dir.path)
529
+ purelib_dir = scheme.purelib
530
+ platlib_dir = scheme.platlib
531
+ data_dir = scheme.data
532
+
533
+ if os.path.exists(purelib_dir):
534
+ lib_dir_list.append(purelib_dir)
535
+ if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
536
+ lib_dir_list.append(platlib_dir)
537
+ if os.path.exists(data_dir):
538
+ lib_dir_list.append(data_dir)
539
+
540
+ for lib_dir in lib_dir_list:
541
+ for item in os.listdir(lib_dir):
542
+ if lib_dir == data_dir:
543
+ ddir = os.path.join(data_dir, item)
544
+ if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
545
+ continue
546
+ target_item_dir = os.path.join(target_dir, item)
547
+ if os.path.exists(target_item_dir):
548
+ if not upgrade:
549
+ logger.warning(
550
+ "Target directory %s already exists. Specify "
551
+ "--upgrade to force replacement.",
552
+ target_item_dir,
553
+ )
554
+ continue
555
+ if os.path.islink(target_item_dir):
556
+ logger.warning(
557
+ "Target directory %s already exists and is "
558
+ "a link. pip will not automatically replace "
559
+ "links, please remove if replacement is "
560
+ "desired.",
561
+ target_item_dir,
562
+ )
563
+ continue
564
+ if os.path.isdir(target_item_dir):
565
+ shutil.rmtree(target_item_dir)
566
+ else:
567
+ os.remove(target_item_dir)
568
+
569
+ shutil.move(os.path.join(lib_dir, item), target_item_dir)
570
+
571
+ def _determine_conflicts(
572
+ self, to_install: List[InstallRequirement]
573
+ ) -> Optional[ConflictDetails]:
574
+ try:
575
+ return check_install_conflicts(to_install)
576
+ except Exception:
577
+ logger.exception(
578
+ "Error while checking for conflicts. Please file an issue on "
579
+ "pip's issue tracker: https://github.com/pypa/pip/issues/new"
580
+ )
581
+ return None
582
+
583
+ def _warn_about_conflicts(
584
+ self, conflict_details: ConflictDetails, resolver_variant: str
585
+ ) -> None:
586
+ package_set, (missing, conflicting) = conflict_details
587
+ if not missing and not conflicting:
588
+ return
589
+
590
+ parts: List[str] = []
591
+ if resolver_variant == "legacy":
592
+ parts.append(
593
+ "pip's legacy dependency resolver does not consider dependency "
594
+ "conflicts when selecting packages. This behaviour is the "
595
+ "source of the following dependency conflicts."
596
+ )
597
+ else:
598
+ assert resolver_variant == "resolvelib"
599
+ parts.append(
600
+ "pip's dependency resolver does not currently take into account "
601
+ "all the packages that are installed. This behaviour is the "
602
+ "source of the following dependency conflicts."
603
+ )
604
+
605
+ # NOTE: There is some duplication here, with commands/check.py
606
+ for project_name in missing:
607
+ version = package_set[project_name][0]
608
+ for dependency in missing[project_name]:
609
+ message = (
610
+ f"{project_name} {version} requires {dependency[1]}, "
611
+ "which is not installed."
612
+ )
613
+ parts.append(message)
614
+
615
+ for project_name in conflicting:
616
+ version = package_set[project_name][0]
617
+ for dep_name, dep_version, req in conflicting[project_name]:
618
+ message = (
619
+ "{name} {version} requires {requirement}, but {you} have "
620
+ "{dep_name} {dep_version} which is incompatible."
621
+ ).format(
622
+ name=project_name,
623
+ version=version,
624
+ requirement=req,
625
+ dep_name=dep_name,
626
+ dep_version=dep_version,
627
+ you=("you" if resolver_variant == "resolvelib" else "you'll"),
628
+ )
629
+ parts.append(message)
630
+
631
+ logger.critical("\n".join(parts))
632
+
633
+
634
+ def get_lib_location_guesses(
635
+ user: bool = False,
636
+ home: Optional[str] = None,
637
+ root: Optional[str] = None,
638
+ isolated: bool = False,
639
+ prefix: Optional[str] = None,
640
+ ) -> List[str]:
641
+ scheme = get_scheme(
642
+ "",
643
+ user=user,
644
+ home=home,
645
+ root=root,
646
+ isolated=isolated,
647
+ prefix=prefix,
648
+ )
649
+ return [scheme.purelib, scheme.platlib]
650
+
651
+
652
+ def site_packages_writable(root: Optional[str], isolated: bool) -> bool:
653
+ return all(
654
+ test_writable_dir(d)
655
+ for d in set(get_lib_location_guesses(root=root, isolated=isolated))
656
+ )
657
+
658
+
659
+ def decide_user_install(
660
+ use_user_site: Optional[bool],
661
+ prefix_path: Optional[str] = None,
662
+ target_dir: Optional[str] = None,
663
+ root_path: Optional[str] = None,
664
+ isolated_mode: bool = False,
665
+ ) -> bool:
666
+ """Determine whether to do a user install based on the input options.
667
+
668
+ If use_user_site is False, no additional checks are done.
669
+ If use_user_site is True, it is checked for compatibility with other
670
+ options.
671
+ If use_user_site is None, the default behaviour depends on the environment,
672
+ which is provided by the other arguments.
673
+ """
674
+ # In some cases (config from tox), use_user_site can be set to an integer
675
+ # rather than a bool, which 'use_user_site is False' wouldn't catch.
676
+ if (use_user_site is not None) and (not use_user_site):
677
+ logger.debug("Non-user install by explicit request")
678
+ return False
679
+
680
+ if use_user_site:
681
+ if prefix_path:
682
+ raise CommandError(
683
+ "Can not combine '--user' and '--prefix' as they imply "
684
+ "different installation locations"
685
+ )
686
+ if virtualenv_no_global():
687
+ raise InstallationError(
688
+ "Can not perform a '--user' install. User site-packages "
689
+ "are not visible in this virtualenv."
690
+ )
691
+ logger.debug("User install by explicit request")
692
+ return True
693
+
694
+ # If we are here, user installs have not been explicitly requested/avoided
695
+ assert use_user_site is None
696
+
697
+ # user install incompatible with --prefix/--target
698
+ if prefix_path or target_dir:
699
+ logger.debug("Non-user install due to --prefix or --target option")
700
+ return False
701
+
702
+ # If user installs are not enabled, choose a non-user install
703
+ if not site.ENABLE_USER_SITE:
704
+ logger.debug("Non-user install because user site-packages disabled")
705
+ return False
706
+
707
+ # If we have permission for a non-user install, do that,
708
+ # otherwise do a user install.
709
+ if site_packages_writable(root=root_path, isolated=isolated_mode):
710
+ logger.debug("Non-user install because site-packages writeable")
711
+ return False
712
+
713
+ logger.info(
714
+ "Defaulting to user installation because normal site-packages "
715
+ "is not writeable"
716
+ )
717
+ return True
718
+
719
+
720
+ def create_os_error_message(
721
+ error: OSError, show_traceback: bool, using_user_site: bool
722
+ ) -> str:
723
+ """Format an error message for an OSError
724
+
725
+ It may occur anytime during the execution of the install command.
726
+ """
727
+ parts = []
728
+
729
+ # Mention the error if we are not going to show a traceback
730
+ parts.append("Could not install packages due to an OSError")
731
+ if not show_traceback:
732
+ parts.append(": ")
733
+ parts.append(str(error))
734
+ else:
735
+ parts.append(".")
736
+
737
+ # Spilt the error indication from a helper message (if any)
738
+ parts[-1] += "\n"
739
+
740
+ # Suggest useful actions to the user:
741
+ # (1) using user site-packages or (2) verifying the permissions
742
+ if error.errno == errno.EACCES:
743
+ user_option_part = "Consider using the `--user` option"
744
+ permissions_part = "Check the permissions"
745
+
746
+ if not running_under_virtualenv() and not using_user_site:
747
+ parts.extend(
748
+ [
749
+ user_option_part,
750
+ " or ",
751
+ permissions_part.lower(),
752
+ ]
753
+ )
754
+ else:
755
+ parts.append(permissions_part)
756
+ parts.append(".\n")
757
+
758
+ # Suggest the user to enable Long Paths if path length is
759
+ # more than 260
760
+ if (
761
+ WINDOWS
762
+ and error.errno == errno.ENOENT
763
+ and error.filename
764
+ and len(error.filename) > 260
765
+ ):
766
+ parts.append(
767
+ "HINT: This error might have occurred since "
768
+ "this system does not have Windows Long Path "
769
+ "support enabled. You can find information on "
770
+ "how to enable this at "
771
+ "https://pip.pypa.io/warnings/enable-long-paths\n"
772
+ )
773
+
774
+ return "".join(parts).strip() + "\n"
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/list.py ADDED
@@ -0,0 +1,368 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import logging
3
+ from optparse import Values
4
+ from typing import TYPE_CHECKING, Generator, List, Optional, Sequence, Tuple, cast
5
+
6
+ from pip._vendor.packaging.utils import canonicalize_name
7
+
8
+ from pip._internal.cli import cmdoptions
9
+ from pip._internal.cli.req_command import IndexGroupCommand
10
+ from pip._internal.cli.status_codes import SUCCESS
11
+ from pip._internal.exceptions import CommandError
12
+ from pip._internal.index.collector import LinkCollector
13
+ from pip._internal.index.package_finder import PackageFinder
14
+ from pip._internal.metadata import BaseDistribution, get_environment
15
+ from pip._internal.models.selection_prefs import SelectionPreferences
16
+ from pip._internal.network.session import PipSession
17
+ from pip._internal.utils.compat import stdlib_pkgs
18
+ from pip._internal.utils.misc import tabulate, write_output
19
+
20
+ if TYPE_CHECKING:
21
+ from pip._internal.metadata.base import DistributionVersion
22
+
23
+ class _DistWithLatestInfo(BaseDistribution):
24
+ """Give the distribution object a couple of extra fields.
25
+
26
+ These will be populated during ``get_outdated()``. This is dirty but
27
+ makes the rest of the code much cleaner.
28
+ """
29
+
30
+ latest_version: DistributionVersion
31
+ latest_filetype: str
32
+
33
+ _ProcessedDists = Sequence[_DistWithLatestInfo]
34
+
35
+
36
+ logger = logging.getLogger(__name__)
37
+
38
+
39
+ class ListCommand(IndexGroupCommand):
40
+ """
41
+ List installed packages, including editables.
42
+
43
+ Packages are listed in a case-insensitive sorted order.
44
+ """
45
+
46
+ ignore_require_venv = True
47
+ usage = """
48
+ %prog [options]"""
49
+
50
+ def add_options(self) -> None:
51
+ self.cmd_opts.add_option(
52
+ "-o",
53
+ "--outdated",
54
+ action="store_true",
55
+ default=False,
56
+ help="List outdated packages",
57
+ )
58
+ self.cmd_opts.add_option(
59
+ "-u",
60
+ "--uptodate",
61
+ action="store_true",
62
+ default=False,
63
+ help="List uptodate packages",
64
+ )
65
+ self.cmd_opts.add_option(
66
+ "-e",
67
+ "--editable",
68
+ action="store_true",
69
+ default=False,
70
+ help="List editable projects.",
71
+ )
72
+ self.cmd_opts.add_option(
73
+ "-l",
74
+ "--local",
75
+ action="store_true",
76
+ default=False,
77
+ help=(
78
+ "If in a virtualenv that has global access, do not list "
79
+ "globally-installed packages."
80
+ ),
81
+ )
82
+ self.cmd_opts.add_option(
83
+ "--user",
84
+ dest="user",
85
+ action="store_true",
86
+ default=False,
87
+ help="Only output packages installed in user-site.",
88
+ )
89
+ self.cmd_opts.add_option(cmdoptions.list_path())
90
+ self.cmd_opts.add_option(
91
+ "--pre",
92
+ action="store_true",
93
+ default=False,
94
+ help=(
95
+ "Include pre-release and development versions. By default, "
96
+ "pip only finds stable versions."
97
+ ),
98
+ )
99
+
100
+ self.cmd_opts.add_option(
101
+ "--format",
102
+ action="store",
103
+ dest="list_format",
104
+ default="columns",
105
+ choices=("columns", "freeze", "json"),
106
+ help=(
107
+ "Select the output format among: columns (default), freeze, or json. "
108
+ "The 'freeze' format cannot be used with the --outdated option."
109
+ ),
110
+ )
111
+
112
+ self.cmd_opts.add_option(
113
+ "--not-required",
114
+ action="store_true",
115
+ dest="not_required",
116
+ help="List packages that are not dependencies of installed packages.",
117
+ )
118
+
119
+ self.cmd_opts.add_option(
120
+ "--exclude-editable",
121
+ action="store_false",
122
+ dest="include_editable",
123
+ help="Exclude editable package from output.",
124
+ )
125
+ self.cmd_opts.add_option(
126
+ "--include-editable",
127
+ action="store_true",
128
+ dest="include_editable",
129
+ help="Include editable package from output.",
130
+ default=True,
131
+ )
132
+ self.cmd_opts.add_option(cmdoptions.list_exclude())
133
+ index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser)
134
+
135
+ self.parser.insert_option_group(0, index_opts)
136
+ self.parser.insert_option_group(0, self.cmd_opts)
137
+
138
+ def _build_package_finder(
139
+ self, options: Values, session: PipSession
140
+ ) -> PackageFinder:
141
+ """
142
+ Create a package finder appropriate to this list command.
143
+ """
144
+ link_collector = LinkCollector.create(session, options=options)
145
+
146
+ # Pass allow_yanked=False to ignore yanked versions.
147
+ selection_prefs = SelectionPreferences(
148
+ allow_yanked=False,
149
+ allow_all_prereleases=options.pre,
150
+ )
151
+
152
+ return PackageFinder.create(
153
+ link_collector=link_collector,
154
+ selection_prefs=selection_prefs,
155
+ )
156
+
157
+ def run(self, options: Values, args: List[str]) -> int:
158
+ if options.outdated and options.uptodate:
159
+ raise CommandError("Options --outdated and --uptodate cannot be combined.")
160
+
161
+ if options.outdated and options.list_format == "freeze":
162
+ raise CommandError(
163
+ "List format 'freeze' cannot be used with the --outdated option."
164
+ )
165
+
166
+ cmdoptions.check_list_path_option(options)
167
+
168
+ skip = set(stdlib_pkgs)
169
+ if options.excludes:
170
+ skip.update(canonicalize_name(n) for n in options.excludes)
171
+
172
+ packages: "_ProcessedDists" = [
173
+ cast("_DistWithLatestInfo", d)
174
+ for d in get_environment(options.path).iter_installed_distributions(
175
+ local_only=options.local,
176
+ user_only=options.user,
177
+ editables_only=options.editable,
178
+ include_editables=options.include_editable,
179
+ skip=skip,
180
+ )
181
+ ]
182
+
183
+ # get_not_required must be called firstly in order to find and
184
+ # filter out all dependencies correctly. Otherwise a package
185
+ # can't be identified as requirement because some parent packages
186
+ # could be filtered out before.
187
+ if options.not_required:
188
+ packages = self.get_not_required(packages, options)
189
+
190
+ if options.outdated:
191
+ packages = self.get_outdated(packages, options)
192
+ elif options.uptodate:
193
+ packages = self.get_uptodate(packages, options)
194
+
195
+ self.output_package_listing(packages, options)
196
+ return SUCCESS
197
+
198
+ def get_outdated(
199
+ self, packages: "_ProcessedDists", options: Values
200
+ ) -> "_ProcessedDists":
201
+ return [
202
+ dist
203
+ for dist in self.iter_packages_latest_infos(packages, options)
204
+ if dist.latest_version > dist.version
205
+ ]
206
+
207
+ def get_uptodate(
208
+ self, packages: "_ProcessedDists", options: Values
209
+ ) -> "_ProcessedDists":
210
+ return [
211
+ dist
212
+ for dist in self.iter_packages_latest_infos(packages, options)
213
+ if dist.latest_version == dist.version
214
+ ]
215
+
216
+ def get_not_required(
217
+ self, packages: "_ProcessedDists", options: Values
218
+ ) -> "_ProcessedDists":
219
+ dep_keys = {
220
+ canonicalize_name(dep.name)
221
+ for dist in packages
222
+ for dep in (dist.iter_dependencies() or ())
223
+ }
224
+
225
+ # Create a set to remove duplicate packages, and cast it to a list
226
+ # to keep the return type consistent with get_outdated and
227
+ # get_uptodate
228
+ return list({pkg for pkg in packages if pkg.canonical_name not in dep_keys})
229
+
230
+ def iter_packages_latest_infos(
231
+ self, packages: "_ProcessedDists", options: Values
232
+ ) -> Generator["_DistWithLatestInfo", None, None]:
233
+ with self._build_session(options) as session:
234
+ finder = self._build_package_finder(options, session)
235
+
236
+ def latest_info(
237
+ dist: "_DistWithLatestInfo",
238
+ ) -> Optional["_DistWithLatestInfo"]:
239
+ all_candidates = finder.find_all_candidates(dist.canonical_name)
240
+ if not options.pre:
241
+ # Remove prereleases
242
+ all_candidates = [
243
+ candidate
244
+ for candidate in all_candidates
245
+ if not candidate.version.is_prerelease
246
+ ]
247
+
248
+ evaluator = finder.make_candidate_evaluator(
249
+ project_name=dist.canonical_name,
250
+ )
251
+ best_candidate = evaluator.sort_best_candidate(all_candidates)
252
+ if best_candidate is None:
253
+ return None
254
+
255
+ remote_version = best_candidate.version
256
+ if best_candidate.link.is_wheel:
257
+ typ = "wheel"
258
+ else:
259
+ typ = "sdist"
260
+ dist.latest_version = remote_version
261
+ dist.latest_filetype = typ
262
+ return dist
263
+
264
+ for dist in map(latest_info, packages):
265
+ if dist is not None:
266
+ yield dist
267
+
268
+ def output_package_listing(
269
+ self, packages: "_ProcessedDists", options: Values
270
+ ) -> None:
271
+ packages = sorted(
272
+ packages,
273
+ key=lambda dist: dist.canonical_name,
274
+ )
275
+ if options.list_format == "columns" and packages:
276
+ data, header = format_for_columns(packages, options)
277
+ self.output_package_listing_columns(data, header)
278
+ elif options.list_format == "freeze":
279
+ for dist in packages:
280
+ if options.verbose >= 1:
281
+ write_output(
282
+ "%s==%s (%s)", dist.raw_name, dist.version, dist.location
283
+ )
284
+ else:
285
+ write_output("%s==%s", dist.raw_name, dist.version)
286
+ elif options.list_format == "json":
287
+ write_output(format_for_json(packages, options))
288
+
289
+ def output_package_listing_columns(
290
+ self, data: List[List[str]], header: List[str]
291
+ ) -> None:
292
+ # insert the header first: we need to know the size of column names
293
+ if len(data) > 0:
294
+ data.insert(0, header)
295
+
296
+ pkg_strings, sizes = tabulate(data)
297
+
298
+ # Create and add a separator.
299
+ if len(data) > 0:
300
+ pkg_strings.insert(1, " ".join("-" * x for x in sizes))
301
+
302
+ for val in pkg_strings:
303
+ write_output(val)
304
+
305
+
306
+ def format_for_columns(
307
+ pkgs: "_ProcessedDists", options: Values
308
+ ) -> Tuple[List[List[str]], List[str]]:
309
+ """
310
+ Convert the package data into something usable
311
+ by output_package_listing_columns.
312
+ """
313
+ header = ["Package", "Version"]
314
+
315
+ running_outdated = options.outdated
316
+ if running_outdated:
317
+ header.extend(["Latest", "Type"])
318
+
319
+ has_editables = any(x.editable for x in pkgs)
320
+ if has_editables:
321
+ header.append("Editable project location")
322
+
323
+ if options.verbose >= 1:
324
+ header.append("Location")
325
+ if options.verbose >= 1:
326
+ header.append("Installer")
327
+
328
+ data = []
329
+ for proj in pkgs:
330
+ # if we're working on the 'outdated' list, separate out the
331
+ # latest_version and type
332
+ row = [proj.raw_name, str(proj.version)]
333
+
334
+ if running_outdated:
335
+ row.append(str(proj.latest_version))
336
+ row.append(proj.latest_filetype)
337
+
338
+ if has_editables:
339
+ row.append(proj.editable_project_location or "")
340
+
341
+ if options.verbose >= 1:
342
+ row.append(proj.location or "")
343
+ if options.verbose >= 1:
344
+ row.append(proj.installer)
345
+
346
+ data.append(row)
347
+
348
+ return data, header
349
+
350
+
351
+ def format_for_json(packages: "_ProcessedDists", options: Values) -> str:
352
+ data = []
353
+ for dist in packages:
354
+ info = {
355
+ "name": dist.raw_name,
356
+ "version": str(dist.version),
357
+ }
358
+ if options.verbose >= 1:
359
+ info["location"] = dist.location or ""
360
+ info["installer"] = dist.installer
361
+ if options.outdated:
362
+ info["latest_version"] = str(dist.latest_version)
363
+ info["latest_filetype"] = dist.latest_filetype
364
+ editable_project_location = dist.editable_project_location
365
+ if editable_project_location:
366
+ info["editable_project_location"] = editable_project_location
367
+ data.append(info)
368
+ return json.dumps(data)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/search.py ADDED
@@ -0,0 +1,174 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import shutil
3
+ import sys
4
+ import textwrap
5
+ import xmlrpc.client
6
+ from collections import OrderedDict
7
+ from optparse import Values
8
+ from typing import TYPE_CHECKING, Dict, List, Optional
9
+
10
+ from pip._vendor.packaging.version import parse as parse_version
11
+
12
+ from pip._internal.cli.base_command import Command
13
+ from pip._internal.cli.req_command import SessionCommandMixin
14
+ from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
15
+ from pip._internal.exceptions import CommandError
16
+ from pip._internal.metadata import get_default_environment
17
+ from pip._internal.models.index import PyPI
18
+ from pip._internal.network.xmlrpc import PipXmlrpcTransport
19
+ from pip._internal.utils.logging import indent_log
20
+ from pip._internal.utils.misc import write_output
21
+
22
+ if TYPE_CHECKING:
23
+ from typing import TypedDict
24
+
25
+ class TransformedHit(TypedDict):
26
+ name: str
27
+ summary: str
28
+ versions: List[str]
29
+
30
+
31
+ logger = logging.getLogger(__name__)
32
+
33
+
34
+ class SearchCommand(Command, SessionCommandMixin):
35
+ """Search for PyPI packages whose name or summary contains <query>."""
36
+
37
+ usage = """
38
+ %prog [options] <query>"""
39
+ ignore_require_venv = True
40
+
41
+ def add_options(self) -> None:
42
+ self.cmd_opts.add_option(
43
+ "-i",
44
+ "--index",
45
+ dest="index",
46
+ metavar="URL",
47
+ default=PyPI.pypi_url,
48
+ help="Base URL of Python Package Index (default %default)",
49
+ )
50
+
51
+ self.parser.insert_option_group(0, self.cmd_opts)
52
+
53
+ def run(self, options: Values, args: List[str]) -> int:
54
+ if not args:
55
+ raise CommandError("Missing required argument (search query).")
56
+ query = args
57
+ pypi_hits = self.search(query, options)
58
+ hits = transform_hits(pypi_hits)
59
+
60
+ terminal_width = None
61
+ if sys.stdout.isatty():
62
+ terminal_width = shutil.get_terminal_size()[0]
63
+
64
+ print_results(hits, terminal_width=terminal_width)
65
+ if pypi_hits:
66
+ return SUCCESS
67
+ return NO_MATCHES_FOUND
68
+
69
+ def search(self, query: List[str], options: Values) -> List[Dict[str, str]]:
70
+ index_url = options.index
71
+
72
+ session = self.get_default_session(options)
73
+
74
+ transport = PipXmlrpcTransport(index_url, session)
75
+ pypi = xmlrpc.client.ServerProxy(index_url, transport)
76
+ try:
77
+ hits = pypi.search({"name": query, "summary": query}, "or")
78
+ except xmlrpc.client.Fault as fault:
79
+ message = "XMLRPC request failed [code: {code}]\n{string}".format(
80
+ code=fault.faultCode,
81
+ string=fault.faultString,
82
+ )
83
+ raise CommandError(message)
84
+ assert isinstance(hits, list)
85
+ return hits
86
+
87
+
88
+ def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]:
89
+ """
90
+ The list from pypi is really a list of versions. We want a list of
91
+ packages with the list of versions stored inline. This converts the
92
+ list from pypi into one we can use.
93
+ """
94
+ packages: Dict[str, "TransformedHit"] = OrderedDict()
95
+ for hit in hits:
96
+ name = hit["name"]
97
+ summary = hit["summary"]
98
+ version = hit["version"]
99
+
100
+ if name not in packages.keys():
101
+ packages[name] = {
102
+ "name": name,
103
+ "summary": summary,
104
+ "versions": [version],
105
+ }
106
+ else:
107
+ packages[name]["versions"].append(version)
108
+
109
+ # if this is the highest version, replace summary and score
110
+ if version == highest_version(packages[name]["versions"]):
111
+ packages[name]["summary"] = summary
112
+
113
+ return list(packages.values())
114
+
115
+
116
+ def print_dist_installation_info(name: str, latest: str) -> None:
117
+ env = get_default_environment()
118
+ dist = env.get_distribution(name)
119
+ if dist is not None:
120
+ with indent_log():
121
+ if dist.version == latest:
122
+ write_output("INSTALLED: %s (latest)", dist.version)
123
+ else:
124
+ write_output("INSTALLED: %s", dist.version)
125
+ if parse_version(latest).pre:
126
+ write_output(
127
+ "LATEST: %s (pre-release; install"
128
+ " with `pip install --pre`)",
129
+ latest,
130
+ )
131
+ else:
132
+ write_output("LATEST: %s", latest)
133
+
134
+
135
+ def print_results(
136
+ hits: List["TransformedHit"],
137
+ name_column_width: Optional[int] = None,
138
+ terminal_width: Optional[int] = None,
139
+ ) -> None:
140
+ if not hits:
141
+ return
142
+ if name_column_width is None:
143
+ name_column_width = (
144
+ max(
145
+ [
146
+ len(hit["name"]) + len(highest_version(hit.get("versions", ["-"])))
147
+ for hit in hits
148
+ ]
149
+ )
150
+ + 4
151
+ )
152
+
153
+ for hit in hits:
154
+ name = hit["name"]
155
+ summary = hit["summary"] or ""
156
+ latest = highest_version(hit.get("versions", ["-"]))
157
+ if terminal_width is not None:
158
+ target_width = terminal_width - name_column_width - 5
159
+ if target_width > 10:
160
+ # wrap and indent summary to fit terminal
161
+ summary_lines = textwrap.wrap(summary, target_width)
162
+ summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines)
163
+
164
+ name_latest = f"{name} ({latest})"
165
+ line = f"{name_latest:{name_column_width}} - {summary}"
166
+ try:
167
+ write_output(line)
168
+ print_dist_installation_info(name, latest)
169
+ except UnicodeEncodeError:
170
+ pass
171
+
172
+
173
+ def highest_version(versions: List[str]) -> str:
174
+ return max(versions, key=parse_version)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/show.py ADDED
@@ -0,0 +1,189 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from optparse import Values
3
+ from typing import Generator, Iterable, Iterator, List, NamedTuple, Optional
4
+
5
+ from pip._vendor.packaging.utils import canonicalize_name
6
+
7
+ from pip._internal.cli.base_command import Command
8
+ from pip._internal.cli.status_codes import ERROR, SUCCESS
9
+ from pip._internal.metadata import BaseDistribution, get_default_environment
10
+ from pip._internal.utils.misc import write_output
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ class ShowCommand(Command):
16
+ """
17
+ Show information about one or more installed packages.
18
+
19
+ The output is in RFC-compliant mail header format.
20
+ """
21
+
22
+ usage = """
23
+ %prog [options] <package> ..."""
24
+ ignore_require_venv = True
25
+
26
+ def add_options(self) -> None:
27
+ self.cmd_opts.add_option(
28
+ "-f",
29
+ "--files",
30
+ dest="files",
31
+ action="store_true",
32
+ default=False,
33
+ help="Show the full list of installed files for each package.",
34
+ )
35
+
36
+ self.parser.insert_option_group(0, self.cmd_opts)
37
+
38
+ def run(self, options: Values, args: List[str]) -> int:
39
+ if not args:
40
+ logger.warning("ERROR: Please provide a package name or names.")
41
+ return ERROR
42
+ query = args
43
+
44
+ results = search_packages_info(query)
45
+ if not print_results(
46
+ results, list_files=options.files, verbose=options.verbose
47
+ ):
48
+ return ERROR
49
+ return SUCCESS
50
+
51
+
52
+ class _PackageInfo(NamedTuple):
53
+ name: str
54
+ version: str
55
+ location: str
56
+ editable_project_location: Optional[str]
57
+ requires: List[str]
58
+ required_by: List[str]
59
+ installer: str
60
+ metadata_version: str
61
+ classifiers: List[str]
62
+ summary: str
63
+ homepage: str
64
+ project_urls: List[str]
65
+ author: str
66
+ author_email: str
67
+ license: str
68
+ entry_points: List[str]
69
+ files: Optional[List[str]]
70
+
71
+
72
+ def search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None]:
73
+ """
74
+ Gather details from installed distributions. Print distribution name,
75
+ version, location, and installed files. Installed files requires a
76
+ pip generated 'installed-files.txt' in the distributions '.egg-info'
77
+ directory.
78
+ """
79
+ env = get_default_environment()
80
+
81
+ installed = {dist.canonical_name: dist for dist in env.iter_all_distributions()}
82
+ query_names = [canonicalize_name(name) for name in query]
83
+ missing = sorted(
84
+ [name for name, pkg in zip(query, query_names) if pkg not in installed]
85
+ )
86
+ if missing:
87
+ logger.warning("Package(s) not found: %s", ", ".join(missing))
88
+
89
+ def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]:
90
+ return (
91
+ dist.metadata["Name"] or "UNKNOWN"
92
+ for dist in installed.values()
93
+ if current_dist.canonical_name
94
+ in {canonicalize_name(d.name) for d in dist.iter_dependencies()}
95
+ )
96
+
97
+ for query_name in query_names:
98
+ try:
99
+ dist = installed[query_name]
100
+ except KeyError:
101
+ continue
102
+
103
+ requires = sorted((req.name for req in dist.iter_dependencies()), key=str.lower)
104
+ required_by = sorted(_get_requiring_packages(dist), key=str.lower)
105
+
106
+ try:
107
+ entry_points_text = dist.read_text("entry_points.txt")
108
+ entry_points = entry_points_text.splitlines(keepends=False)
109
+ except FileNotFoundError:
110
+ entry_points = []
111
+
112
+ files_iter = dist.iter_declared_entries()
113
+ if files_iter is None:
114
+ files: Optional[List[str]] = None
115
+ else:
116
+ files = sorted(files_iter)
117
+
118
+ metadata = dist.metadata
119
+
120
+ yield _PackageInfo(
121
+ name=dist.raw_name,
122
+ version=str(dist.version),
123
+ location=dist.location or "",
124
+ editable_project_location=dist.editable_project_location,
125
+ requires=requires,
126
+ required_by=required_by,
127
+ installer=dist.installer,
128
+ metadata_version=dist.metadata_version or "",
129
+ classifiers=metadata.get_all("Classifier", []),
130
+ summary=metadata.get("Summary", ""),
131
+ homepage=metadata.get("Home-page", ""),
132
+ project_urls=metadata.get_all("Project-URL", []),
133
+ author=metadata.get("Author", ""),
134
+ author_email=metadata.get("Author-email", ""),
135
+ license=metadata.get("License", ""),
136
+ entry_points=entry_points,
137
+ files=files,
138
+ )
139
+
140
+
141
+ def print_results(
142
+ distributions: Iterable[_PackageInfo],
143
+ list_files: bool,
144
+ verbose: bool,
145
+ ) -> bool:
146
+ """
147
+ Print the information from installed distributions found.
148
+ """
149
+ results_printed = False
150
+ for i, dist in enumerate(distributions):
151
+ results_printed = True
152
+ if i > 0:
153
+ write_output("---")
154
+
155
+ write_output("Name: %s", dist.name)
156
+ write_output("Version: %s", dist.version)
157
+ write_output("Summary: %s", dist.summary)
158
+ write_output("Home-page: %s", dist.homepage)
159
+ write_output("Author: %s", dist.author)
160
+ write_output("Author-email: %s", dist.author_email)
161
+ write_output("License: %s", dist.license)
162
+ write_output("Location: %s", dist.location)
163
+ if dist.editable_project_location is not None:
164
+ write_output(
165
+ "Editable project location: %s", dist.editable_project_location
166
+ )
167
+ write_output("Requires: %s", ", ".join(dist.requires))
168
+ write_output("Required-by: %s", ", ".join(dist.required_by))
169
+
170
+ if verbose:
171
+ write_output("Metadata-Version: %s", dist.metadata_version)
172
+ write_output("Installer: %s", dist.installer)
173
+ write_output("Classifiers:")
174
+ for classifier in dist.classifiers:
175
+ write_output(" %s", classifier)
176
+ write_output("Entry-points:")
177
+ for entry in dist.entry_points:
178
+ write_output(" %s", entry.strip())
179
+ write_output("Project-URLs:")
180
+ for project_url in dist.project_urls:
181
+ write_output(" %s", project_url)
182
+ if list_files:
183
+ write_output("Files:")
184
+ if dist.files is None:
185
+ write_output("Cannot locate RECORD or installed-files.txt")
186
+ else:
187
+ for line in dist.files:
188
+ write_output(" %s", line.strip())
189
+ return results_printed
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/uninstall.py ADDED
@@ -0,0 +1,113 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from optparse import Values
3
+ from typing import List
4
+
5
+ from pip._vendor.packaging.utils import canonicalize_name
6
+
7
+ from pip._internal.cli import cmdoptions
8
+ from pip._internal.cli.base_command import Command
9
+ from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root
10
+ from pip._internal.cli.status_codes import SUCCESS
11
+ from pip._internal.exceptions import InstallationError
12
+ from pip._internal.req import parse_requirements
13
+ from pip._internal.req.constructors import (
14
+ install_req_from_line,
15
+ install_req_from_parsed_requirement,
16
+ )
17
+ from pip._internal.utils.misc import (
18
+ check_externally_managed,
19
+ protect_pip_from_modification_on_windows,
20
+ )
21
+
22
+ logger = logging.getLogger(__name__)
23
+
24
+
25
+ class UninstallCommand(Command, SessionCommandMixin):
26
+ """
27
+ Uninstall packages.
28
+
29
+ pip is able to uninstall most installed packages. Known exceptions are:
30
+
31
+ - Pure distutils packages installed with ``python setup.py install``, which
32
+ leave behind no metadata to determine what files were installed.
33
+ - Script wrappers installed by ``python setup.py develop``.
34
+ """
35
+
36
+ usage = """
37
+ %prog [options] <package> ...
38
+ %prog [options] -r <requirements file> ..."""
39
+
40
+ def add_options(self) -> None:
41
+ self.cmd_opts.add_option(
42
+ "-r",
43
+ "--requirement",
44
+ dest="requirements",
45
+ action="append",
46
+ default=[],
47
+ metavar="file",
48
+ help=(
49
+ "Uninstall all the packages listed in the given requirements "
50
+ "file. This option can be used multiple times."
51
+ ),
52
+ )
53
+ self.cmd_opts.add_option(
54
+ "-y",
55
+ "--yes",
56
+ dest="yes",
57
+ action="store_true",
58
+ help="Don't ask for confirmation of uninstall deletions.",
59
+ )
60
+ self.cmd_opts.add_option(cmdoptions.root_user_action())
61
+ self.cmd_opts.add_option(cmdoptions.override_externally_managed())
62
+ self.parser.insert_option_group(0, self.cmd_opts)
63
+
64
+ def run(self, options: Values, args: List[str]) -> int:
65
+ session = self.get_default_session(options)
66
+
67
+ reqs_to_uninstall = {}
68
+ for name in args:
69
+ req = install_req_from_line(
70
+ name,
71
+ isolated=options.isolated_mode,
72
+ )
73
+ if req.name:
74
+ reqs_to_uninstall[canonicalize_name(req.name)] = req
75
+ else:
76
+ logger.warning(
77
+ "Invalid requirement: %r ignored -"
78
+ " the uninstall command expects named"
79
+ " requirements.",
80
+ name,
81
+ )
82
+ for filename in options.requirements:
83
+ for parsed_req in parse_requirements(
84
+ filename, options=options, session=session
85
+ ):
86
+ req = install_req_from_parsed_requirement(
87
+ parsed_req, isolated=options.isolated_mode
88
+ )
89
+ if req.name:
90
+ reqs_to_uninstall[canonicalize_name(req.name)] = req
91
+ if not reqs_to_uninstall:
92
+ raise InstallationError(
93
+ f"You must give at least one requirement to {self.name} (see "
94
+ f'"pip help {self.name}")'
95
+ )
96
+
97
+ if not options.override_externally_managed:
98
+ check_externally_managed()
99
+
100
+ protect_pip_from_modification_on_windows(
101
+ modifying_pip="pip" in reqs_to_uninstall
102
+ )
103
+
104
+ for req in reqs_to_uninstall.values():
105
+ uninstall_pathset = req.uninstall(
106
+ auto_confirm=options.yes,
107
+ verbose=self.verbosity > 0,
108
+ )
109
+ if uninstall_pathset:
110
+ uninstall_pathset.commit()
111
+ if options.root_user_action == "warn":
112
+ warn_if_run_as_root()
113
+ return SUCCESS
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/wheel.py ADDED
@@ -0,0 +1,183 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os
3
+ import shutil
4
+ from optparse import Values
5
+ from typing import List
6
+
7
+ from pip._internal.cache import WheelCache
8
+ from pip._internal.cli import cmdoptions
9
+ from pip._internal.cli.req_command import RequirementCommand, with_cleanup
10
+ from pip._internal.cli.status_codes import SUCCESS
11
+ from pip._internal.exceptions import CommandError
12
+ from pip._internal.operations.build.build_tracker import get_build_tracker
13
+ from pip._internal.req.req_install import (
14
+ InstallRequirement,
15
+ check_legacy_setup_py_options,
16
+ )
17
+ from pip._internal.utils.misc import ensure_dir, normalize_path
18
+ from pip._internal.utils.temp_dir import TempDirectory
19
+ from pip._internal.wheel_builder import build, should_build_for_wheel_command
20
+
21
+ logger = logging.getLogger(__name__)
22
+
23
+
24
+ class WheelCommand(RequirementCommand):
25
+ """
26
+ Build Wheel archives for your requirements and dependencies.
27
+
28
+ Wheel is a built-package format, and offers the advantage of not
29
+ recompiling your software during every install. For more details, see the
30
+ wheel docs: https://wheel.readthedocs.io/en/latest/
31
+
32
+ 'pip wheel' uses the build system interface as described here:
33
+ https://pip.pypa.io/en/stable/reference/build-system/
34
+
35
+ """
36
+
37
+ usage = """
38
+ %prog [options] <requirement specifier> ...
39
+ %prog [options] -r <requirements file> ...
40
+ %prog [options] [-e] <vcs project url> ...
41
+ %prog [options] [-e] <local project path> ...
42
+ %prog [options] <archive url/path> ..."""
43
+
44
+ def add_options(self) -> None:
45
+ self.cmd_opts.add_option(
46
+ "-w",
47
+ "--wheel-dir",
48
+ dest="wheel_dir",
49
+ metavar="dir",
50
+ default=os.curdir,
51
+ help=(
52
+ "Build wheels into <dir>, where the default is the "
53
+ "current working directory."
54
+ ),
55
+ )
56
+ self.cmd_opts.add_option(cmdoptions.no_binary())
57
+ self.cmd_opts.add_option(cmdoptions.only_binary())
58
+ self.cmd_opts.add_option(cmdoptions.prefer_binary())
59
+ self.cmd_opts.add_option(cmdoptions.no_build_isolation())
60
+ self.cmd_opts.add_option(cmdoptions.use_pep517())
61
+ self.cmd_opts.add_option(cmdoptions.no_use_pep517())
62
+ self.cmd_opts.add_option(cmdoptions.check_build_deps())
63
+ self.cmd_opts.add_option(cmdoptions.constraints())
64
+ self.cmd_opts.add_option(cmdoptions.editable())
65
+ self.cmd_opts.add_option(cmdoptions.requirements())
66
+ self.cmd_opts.add_option(cmdoptions.src())
67
+ self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
68
+ self.cmd_opts.add_option(cmdoptions.no_deps())
69
+ self.cmd_opts.add_option(cmdoptions.progress_bar())
70
+
71
+ self.cmd_opts.add_option(
72
+ "--no-verify",
73
+ dest="no_verify",
74
+ action="store_true",
75
+ default=False,
76
+ help="Don't verify if built wheel is valid.",
77
+ )
78
+
79
+ self.cmd_opts.add_option(cmdoptions.config_settings())
80
+ self.cmd_opts.add_option(cmdoptions.build_options())
81
+ self.cmd_opts.add_option(cmdoptions.global_options())
82
+
83
+ self.cmd_opts.add_option(
84
+ "--pre",
85
+ action="store_true",
86
+ default=False,
87
+ help=(
88
+ "Include pre-release and development versions. By default, "
89
+ "pip only finds stable versions."
90
+ ),
91
+ )
92
+
93
+ self.cmd_opts.add_option(cmdoptions.require_hashes())
94
+
95
+ index_opts = cmdoptions.make_option_group(
96
+ cmdoptions.index_group,
97
+ self.parser,
98
+ )
99
+
100
+ self.parser.insert_option_group(0, index_opts)
101
+ self.parser.insert_option_group(0, self.cmd_opts)
102
+
103
+ @with_cleanup
104
+ def run(self, options: Values, args: List[str]) -> int:
105
+ session = self.get_default_session(options)
106
+
107
+ finder = self._build_package_finder(options, session)
108
+
109
+ options.wheel_dir = normalize_path(options.wheel_dir)
110
+ ensure_dir(options.wheel_dir)
111
+
112
+ build_tracker = self.enter_context(get_build_tracker())
113
+
114
+ directory = TempDirectory(
115
+ delete=not options.no_clean,
116
+ kind="wheel",
117
+ globally_managed=True,
118
+ )
119
+
120
+ reqs = self.get_requirements(args, options, finder, session)
121
+ check_legacy_setup_py_options(options, reqs)
122
+
123
+ wheel_cache = WheelCache(options.cache_dir)
124
+
125
+ preparer = self.make_requirement_preparer(
126
+ temp_build_dir=directory,
127
+ options=options,
128
+ build_tracker=build_tracker,
129
+ session=session,
130
+ finder=finder,
131
+ download_dir=options.wheel_dir,
132
+ use_user_site=False,
133
+ verbosity=self.verbosity,
134
+ )
135
+
136
+ resolver = self.make_resolver(
137
+ preparer=preparer,
138
+ finder=finder,
139
+ options=options,
140
+ wheel_cache=wheel_cache,
141
+ ignore_requires_python=options.ignore_requires_python,
142
+ use_pep517=options.use_pep517,
143
+ )
144
+
145
+ self.trace_basic_info(finder)
146
+
147
+ requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
148
+
149
+ reqs_to_build: List[InstallRequirement] = []
150
+ for req in requirement_set.requirements.values():
151
+ if req.is_wheel:
152
+ preparer.save_linked_requirement(req)
153
+ elif should_build_for_wheel_command(req):
154
+ reqs_to_build.append(req)
155
+
156
+ preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
157
+ requirement_set.warn_legacy_versions_and_specifiers()
158
+
159
+ # build wheels
160
+ build_successes, build_failures = build(
161
+ reqs_to_build,
162
+ wheel_cache=wheel_cache,
163
+ verify=(not options.no_verify),
164
+ build_options=options.build_options or [],
165
+ global_options=options.global_options or [],
166
+ )
167
+ for req in build_successes:
168
+ assert req.link and req.link.is_wheel
169
+ assert req.local_file_path
170
+ # copy from cache to target directory
171
+ try:
172
+ shutil.copy(req.local_file_path, options.wheel_dir)
173
+ except OSError as e:
174
+ logger.warning(
175
+ "Building wheel for %s failed: %s",
176
+ req.name,
177
+ e,
178
+ )
179
+ build_failures.append(req)
180
+ if len(build_failures) != 0:
181
+ raise CommandError("Failed to build one or more wheels")
182
+
183
+ return SUCCESS
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/distributions/__init__.py ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pip._internal.distributions.base import AbstractDistribution
2
+ from pip._internal.distributions.sdist import SourceDistribution
3
+ from pip._internal.distributions.wheel import WheelDistribution
4
+ from pip._internal.req.req_install import InstallRequirement
5
+
6
+
7
+ def make_distribution_for_install_requirement(
8
+ install_req: InstallRequirement,
9
+ ) -> AbstractDistribution:
10
+ """Returns a Distribution for the given InstallRequirement"""
11
+ # Editable requirements will always be source distributions. They use the
12
+ # legacy logic until we create a modern standard for them.
13
+ if install_req.editable:
14
+ return SourceDistribution(install_req)
15
+
16
+ # If it's a wheel, it's a WheelDistribution
17
+ if install_req.is_wheel:
18
+ return WheelDistribution(install_req)
19
+
20
+ # Otherwise, a SourceDistribution
21
+ return SourceDistribution(install_req)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/distributions/base.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import abc
2
+ from typing import Optional
3
+
4
+ from pip._internal.index.package_finder import PackageFinder
5
+ from pip._internal.metadata.base import BaseDistribution
6
+ from pip._internal.req import InstallRequirement
7
+
8
+
9
+ class AbstractDistribution(metaclass=abc.ABCMeta):
10
+ """A base class for handling installable artifacts.
11
+
12
+ The requirements for anything installable are as follows:
13
+
14
+ - we must be able to determine the requirement name
15
+ (or we can't correctly handle the non-upgrade case).
16
+
17
+ - for packages with setup requirements, we must also be able
18
+ to determine their requirements without installing additional
19
+ packages (for the same reason as run-time dependencies)
20
+
21
+ - we must be able to create a Distribution object exposing the
22
+ above metadata.
23
+
24
+ - if we need to do work in the build tracker, we must be able to generate a unique
25
+ string to identify the requirement in the build tracker.
26
+ """
27
+
28
+ def __init__(self, req: InstallRequirement) -> None:
29
+ super().__init__()
30
+ self.req = req
31
+
32
+ @abc.abstractproperty
33
+ def build_tracker_id(self) -> Optional[str]:
34
+ """A string that uniquely identifies this requirement to the build tracker.
35
+
36
+ If None, then this dist has no work to do in the build tracker, and
37
+ ``.prepare_distribution_metadata()`` will not be called."""
38
+ raise NotImplementedError()
39
+
40
+ @abc.abstractmethod
41
+ def get_metadata_distribution(self) -> BaseDistribution:
42
+ raise NotImplementedError()
43
+
44
+ @abc.abstractmethod
45
+ def prepare_distribution_metadata(
46
+ self,
47
+ finder: PackageFinder,
48
+ build_isolation: bool,
49
+ check_build_deps: bool,
50
+ ) -> None:
51
+ raise NotImplementedError()
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/distributions/installed.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional
2
+
3
+ from pip._internal.distributions.base import AbstractDistribution
4
+ from pip._internal.index.package_finder import PackageFinder
5
+ from pip._internal.metadata import BaseDistribution
6
+
7
+
8
+ class InstalledDistribution(AbstractDistribution):
9
+ """Represents an installed package.
10
+
11
+ This does not need any preparation as the required information has already
12
+ been computed.
13
+ """
14
+
15
+ @property
16
+ def build_tracker_id(self) -> Optional[str]:
17
+ return None
18
+
19
+ def get_metadata_distribution(self) -> BaseDistribution:
20
+ assert self.req.satisfied_by is not None, "not actually installed"
21
+ return self.req.satisfied_by
22
+
23
+ def prepare_distribution_metadata(
24
+ self,
25
+ finder: PackageFinder,
26
+ build_isolation: bool,
27
+ check_build_deps: bool,
28
+ ) -> None:
29
+ pass
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/distributions/sdist.py ADDED
@@ -0,0 +1,156 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from typing import Iterable, Optional, Set, Tuple
3
+
4
+ from pip._internal.build_env import BuildEnvironment
5
+ from pip._internal.distributions.base import AbstractDistribution
6
+ from pip._internal.exceptions import InstallationError
7
+ from pip._internal.index.package_finder import PackageFinder
8
+ from pip._internal.metadata import BaseDistribution
9
+ from pip._internal.utils.subprocess import runner_with_spinner_message
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ class SourceDistribution(AbstractDistribution):
15
+ """Represents a source distribution.
16
+
17
+ The preparation step for these needs metadata for the packages to be
18
+ generated, either using PEP 517 or using the legacy `setup.py egg_info`.
19
+ """
20
+
21
+ @property
22
+ def build_tracker_id(self) -> Optional[str]:
23
+ """Identify this requirement uniquely by its link."""
24
+ assert self.req.link
25
+ return self.req.link.url_without_fragment
26
+
27
+ def get_metadata_distribution(self) -> BaseDistribution:
28
+ return self.req.get_dist()
29
+
30
+ def prepare_distribution_metadata(
31
+ self,
32
+ finder: PackageFinder,
33
+ build_isolation: bool,
34
+ check_build_deps: bool,
35
+ ) -> None:
36
+ # Load pyproject.toml, to determine whether PEP 517 is to be used
37
+ self.req.load_pyproject_toml()
38
+
39
+ # Set up the build isolation, if this requirement should be isolated
40
+ should_isolate = self.req.use_pep517 and build_isolation
41
+ if should_isolate:
42
+ # Setup an isolated environment and install the build backend static
43
+ # requirements in it.
44
+ self._prepare_build_backend(finder)
45
+ # Check that if the requirement is editable, it either supports PEP 660 or
46
+ # has a setup.py or a setup.cfg. This cannot be done earlier because we need
47
+ # to setup the build backend to verify it supports build_editable, nor can
48
+ # it be done later, because we want to avoid installing build requirements
49
+ # needlessly. Doing it here also works around setuptools generating
50
+ # UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory
51
+ # without setup.py nor setup.cfg.
52
+ self.req.isolated_editable_sanity_check()
53
+ # Install the dynamic build requirements.
54
+ self._install_build_reqs(finder)
55
+ # Check if the current environment provides build dependencies
56
+ should_check_deps = self.req.use_pep517 and check_build_deps
57
+ if should_check_deps:
58
+ pyproject_requires = self.req.pyproject_requires
59
+ assert pyproject_requires is not None
60
+ conflicting, missing = self.req.build_env.check_requirements(
61
+ pyproject_requires
62
+ )
63
+ if conflicting:
64
+ self._raise_conflicts("the backend dependencies", conflicting)
65
+ if missing:
66
+ self._raise_missing_reqs(missing)
67
+ self.req.prepare_metadata()
68
+
69
+ def _prepare_build_backend(self, finder: PackageFinder) -> None:
70
+ # Isolate in a BuildEnvironment and install the build-time
71
+ # requirements.
72
+ pyproject_requires = self.req.pyproject_requires
73
+ assert pyproject_requires is not None
74
+
75
+ self.req.build_env = BuildEnvironment()
76
+ self.req.build_env.install_requirements(
77
+ finder, pyproject_requires, "overlay", kind="build dependencies"
78
+ )
79
+ conflicting, missing = self.req.build_env.check_requirements(
80
+ self.req.requirements_to_check
81
+ )
82
+ if conflicting:
83
+ self._raise_conflicts("PEP 517/518 supported requirements", conflicting)
84
+ if missing:
85
+ logger.warning(
86
+ "Missing build requirements in pyproject.toml for %s.",
87
+ self.req,
88
+ )
89
+ logger.warning(
90
+ "The project does not specify a build backend, and "
91
+ "pip cannot fall back to setuptools without %s.",
92
+ " and ".join(map(repr, sorted(missing))),
93
+ )
94
+
95
+ def _get_build_requires_wheel(self) -> Iterable[str]:
96
+ with self.req.build_env:
97
+ runner = runner_with_spinner_message("Getting requirements to build wheel")
98
+ backend = self.req.pep517_backend
99
+ assert backend is not None
100
+ with backend.subprocess_runner(runner):
101
+ return backend.get_requires_for_build_wheel()
102
+
103
+ def _get_build_requires_editable(self) -> Iterable[str]:
104
+ with self.req.build_env:
105
+ runner = runner_with_spinner_message(
106
+ "Getting requirements to build editable"
107
+ )
108
+ backend = self.req.pep517_backend
109
+ assert backend is not None
110
+ with backend.subprocess_runner(runner):
111
+ return backend.get_requires_for_build_editable()
112
+
113
+ def _install_build_reqs(self, finder: PackageFinder) -> None:
114
+ # Install any extra build dependencies that the backend requests.
115
+ # This must be done in a second pass, as the pyproject.toml
116
+ # dependencies must be installed before we can call the backend.
117
+ if (
118
+ self.req.editable
119
+ and self.req.permit_editable_wheels
120
+ and self.req.supports_pyproject_editable()
121
+ ):
122
+ build_reqs = self._get_build_requires_editable()
123
+ else:
124
+ build_reqs = self._get_build_requires_wheel()
125
+ conflicting, missing = self.req.build_env.check_requirements(build_reqs)
126
+ if conflicting:
127
+ self._raise_conflicts("the backend dependencies", conflicting)
128
+ self.req.build_env.install_requirements(
129
+ finder, missing, "normal", kind="backend dependencies"
130
+ )
131
+
132
+ def _raise_conflicts(
133
+ self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]]
134
+ ) -> None:
135
+ format_string = (
136
+ "Some build dependencies for {requirement} "
137
+ "conflict with {conflicting_with}: {description}."
138
+ )
139
+ error_message = format_string.format(
140
+ requirement=self.req,
141
+ conflicting_with=conflicting_with,
142
+ description=", ".join(
143
+ f"{installed} is incompatible with {wanted}"
144
+ for installed, wanted in sorted(conflicting_reqs)
145
+ ),
146
+ )
147
+ raise InstallationError(error_message)
148
+
149
+ def _raise_missing_reqs(self, missing: Set[str]) -> None:
150
+ format_string = (
151
+ "Some build dependencies for {requirement} are missing: {missing}."
152
+ )
153
+ error_message = format_string.format(
154
+ requirement=self.req, missing=", ".join(map(repr, sorted(missing)))
155
+ )
156
+ raise InstallationError(error_message)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/distributions/wheel.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional
2
+
3
+ from pip._vendor.packaging.utils import canonicalize_name
4
+
5
+ from pip._internal.distributions.base import AbstractDistribution
6
+ from pip._internal.index.package_finder import PackageFinder
7
+ from pip._internal.metadata import (
8
+ BaseDistribution,
9
+ FilesystemWheel,
10
+ get_wheel_distribution,
11
+ )
12
+
13
+
14
+ class WheelDistribution(AbstractDistribution):
15
+ """Represents a wheel distribution.
16
+
17
+ This does not need any preparation as wheels can be directly unpacked.
18
+ """
19
+
20
+ @property
21
+ def build_tracker_id(self) -> Optional[str]:
22
+ return None
23
+
24
+ def get_metadata_distribution(self) -> BaseDistribution:
25
+ """Loads the metadata from the wheel file into memory and returns a
26
+ Distribution that uses it, not relying on the wheel file or
27
+ requirement.
28
+ """
29
+ assert self.req.local_file_path, "Set as part of preparation during download"
30
+ assert self.req.name, "Wheels are never unnamed"
31
+ wheel = FilesystemWheel(self.req.local_file_path)
32
+ return get_wheel_distribution(wheel, canonicalize_name(self.req.name))
33
+
34
+ def prepare_distribution_metadata(
35
+ self,
36
+ finder: PackageFinder,
37
+ build_isolation: bool,
38
+ check_build_deps: bool,
39
+ ) -> None:
40
+ pass
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/index/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ """Index interaction code
2
+ """
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/index/collector.py ADDED
@@ -0,0 +1,507 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ The main purpose of this module is to expose LinkCollector.collect_sources().
3
+ """
4
+
5
+ import collections
6
+ import email.message
7
+ import functools
8
+ import itertools
9
+ import json
10
+ import logging
11
+ import os
12
+ import urllib.parse
13
+ import urllib.request
14
+ from html.parser import HTMLParser
15
+ from optparse import Values
16
+ from typing import (
17
+ TYPE_CHECKING,
18
+ Callable,
19
+ Dict,
20
+ Iterable,
21
+ List,
22
+ MutableMapping,
23
+ NamedTuple,
24
+ Optional,
25
+ Sequence,
26
+ Tuple,
27
+ Union,
28
+ )
29
+
30
+ from pip._vendor import requests
31
+ from pip._vendor.requests import Response
32
+ from pip._vendor.requests.exceptions import RetryError, SSLError
33
+
34
+ from pip._internal.exceptions import NetworkConnectionError
35
+ from pip._internal.models.link import Link
36
+ from pip._internal.models.search_scope import SearchScope
37
+ from pip._internal.network.session import PipSession
38
+ from pip._internal.network.utils import raise_for_status
39
+ from pip._internal.utils.filetypes import is_archive_file
40
+ from pip._internal.utils.misc import redact_auth_from_url
41
+ from pip._internal.vcs import vcs
42
+
43
+ from .sources import CandidatesFromPage, LinkSource, build_source
44
+
45
+ if TYPE_CHECKING:
46
+ from typing import Protocol
47
+ else:
48
+ Protocol = object
49
+
50
+ logger = logging.getLogger(__name__)
51
+
52
+ ResponseHeaders = MutableMapping[str, str]
53
+
54
+
55
+ def _match_vcs_scheme(url: str) -> Optional[str]:
56
+ """Look for VCS schemes in the URL.
57
+
58
+ Returns the matched VCS scheme, or None if there's no match.
59
+ """
60
+ for scheme in vcs.schemes:
61
+ if url.lower().startswith(scheme) and url[len(scheme)] in "+:":
62
+ return scheme
63
+ return None
64
+
65
+
66
+ class _NotAPIContent(Exception):
67
+ def __init__(self, content_type: str, request_desc: str) -> None:
68
+ super().__init__(content_type, request_desc)
69
+ self.content_type = content_type
70
+ self.request_desc = request_desc
71
+
72
+
73
+ def _ensure_api_header(response: Response) -> None:
74
+ """
75
+ Check the Content-Type header to ensure the response contains a Simple
76
+ API Response.
77
+
78
+ Raises `_NotAPIContent` if the content type is not a valid content-type.
79
+ """
80
+ content_type = response.headers.get("Content-Type", "Unknown")
81
+
82
+ content_type_l = content_type.lower()
83
+ if content_type_l.startswith(
84
+ (
85
+ "text/html",
86
+ "application/vnd.pypi.simple.v1+html",
87
+ "application/vnd.pypi.simple.v1+json",
88
+ )
89
+ ):
90
+ return
91
+
92
+ raise _NotAPIContent(content_type, response.request.method)
93
+
94
+
95
+ class _NotHTTP(Exception):
96
+ pass
97
+
98
+
99
+ def _ensure_api_response(url: str, session: PipSession) -> None:
100
+ """
101
+ Send a HEAD request to the URL, and ensure the response contains a simple
102
+ API Response.
103
+
104
+ Raises `_NotHTTP` if the URL is not available for a HEAD request, or
105
+ `_NotAPIContent` if the content type is not a valid content type.
106
+ """
107
+ scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url)
108
+ if scheme not in {"http", "https"}:
109
+ raise _NotHTTP()
110
+
111
+ resp = session.head(url, allow_redirects=True)
112
+ raise_for_status(resp)
113
+
114
+ _ensure_api_header(resp)
115
+
116
+
117
+ def _get_simple_response(url: str, session: PipSession) -> Response:
118
+ """Access an Simple API response with GET, and return the response.
119
+
120
+ This consists of three parts:
121
+
122
+ 1. If the URL looks suspiciously like an archive, send a HEAD first to
123
+ check the Content-Type is HTML or Simple API, to avoid downloading a
124
+ large file. Raise `_NotHTTP` if the content type cannot be determined, or
125
+ `_NotAPIContent` if it is not HTML or a Simple API.
126
+ 2. Actually perform the request. Raise HTTP exceptions on network failures.
127
+ 3. Check the Content-Type header to make sure we got a Simple API response,
128
+ and raise `_NotAPIContent` otherwise.
129
+ """
130
+ if is_archive_file(Link(url).filename):
131
+ _ensure_api_response(url, session=session)
132
+
133
+ logger.debug("Getting page %s", redact_auth_from_url(url))
134
+
135
+ resp = session.get(
136
+ url,
137
+ headers={
138
+ "Accept": ", ".join(
139
+ [
140
+ "application/vnd.pypi.simple.v1+json",
141
+ "application/vnd.pypi.simple.v1+html; q=0.1",
142
+ "text/html; q=0.01",
143
+ ]
144
+ ),
145
+ # We don't want to blindly returned cached data for
146
+ # /simple/, because authors generally expecting that
147
+ # twine upload && pip install will function, but if
148
+ # they've done a pip install in the last ~10 minutes
149
+ # it won't. Thus by setting this to zero we will not
150
+ # blindly use any cached data, however the benefit of
151
+ # using max-age=0 instead of no-cache, is that we will
152
+ # still support conditional requests, so we will still
153
+ # minimize traffic sent in cases where the page hasn't
154
+ # changed at all, we will just always incur the round
155
+ # trip for the conditional GET now instead of only
156
+ # once per 10 minutes.
157
+ # For more information, please see pypa/pip#5670.
158
+ "Cache-Control": "max-age=0",
159
+ },
160
+ )
161
+ raise_for_status(resp)
162
+
163
+ # The check for archives above only works if the url ends with
164
+ # something that looks like an archive. However that is not a
165
+ # requirement of an url. Unless we issue a HEAD request on every
166
+ # url we cannot know ahead of time for sure if something is a
167
+ # Simple API response or not. However we can check after we've
168
+ # downloaded it.
169
+ _ensure_api_header(resp)
170
+
171
+ logger.debug(
172
+ "Fetched page %s as %s",
173
+ redact_auth_from_url(url),
174
+ resp.headers.get("Content-Type", "Unknown"),
175
+ )
176
+
177
+ return resp
178
+
179
+
180
+ def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]:
181
+ """Determine if we have any encoding information in our headers."""
182
+ if headers and "Content-Type" in headers:
183
+ m = email.message.Message()
184
+ m["content-type"] = headers["Content-Type"]
185
+ charset = m.get_param("charset")
186
+ if charset:
187
+ return str(charset)
188
+ return None
189
+
190
+
191
+ class CacheablePageContent:
192
+ def __init__(self, page: "IndexContent") -> None:
193
+ assert page.cache_link_parsing
194
+ self.page = page
195
+
196
+ def __eq__(self, other: object) -> bool:
197
+ return isinstance(other, type(self)) and self.page.url == other.page.url
198
+
199
+ def __hash__(self) -> int:
200
+ return hash(self.page.url)
201
+
202
+
203
+ class ParseLinks(Protocol):
204
+ def __call__(self, page: "IndexContent") -> Iterable[Link]:
205
+ ...
206
+
207
+
208
+ def with_cached_index_content(fn: ParseLinks) -> ParseLinks:
209
+ """
210
+ Given a function that parses an Iterable[Link] from an IndexContent, cache the
211
+ function's result (keyed by CacheablePageContent), unless the IndexContent
212
+ `page` has `page.cache_link_parsing == False`.
213
+ """
214
+
215
+ @functools.lru_cache(maxsize=None)
216
+ def wrapper(cacheable_page: CacheablePageContent) -> List[Link]:
217
+ return list(fn(cacheable_page.page))
218
+
219
+ @functools.wraps(fn)
220
+ def wrapper_wrapper(page: "IndexContent") -> List[Link]:
221
+ if page.cache_link_parsing:
222
+ return wrapper(CacheablePageContent(page))
223
+ return list(fn(page))
224
+
225
+ return wrapper_wrapper
226
+
227
+
228
+ @with_cached_index_content
229
+ def parse_links(page: "IndexContent") -> Iterable[Link]:
230
+ """
231
+ Parse a Simple API's Index Content, and yield its anchor elements as Link objects.
232
+ """
233
+
234
+ content_type_l = page.content_type.lower()
235
+ if content_type_l.startswith("application/vnd.pypi.simple.v1+json"):
236
+ data = json.loads(page.content)
237
+ for file in data.get("files", []):
238
+ link = Link.from_json(file, page.url)
239
+ if link is None:
240
+ continue
241
+ yield link
242
+ return
243
+
244
+ parser = HTMLLinkParser(page.url)
245
+ encoding = page.encoding or "utf-8"
246
+ parser.feed(page.content.decode(encoding))
247
+
248
+ url = page.url
249
+ base_url = parser.base_url or url
250
+ for anchor in parser.anchors:
251
+ link = Link.from_element(anchor, page_url=url, base_url=base_url)
252
+ if link is None:
253
+ continue
254
+ yield link
255
+
256
+
257
+ class IndexContent:
258
+ """Represents one response (or page), along with its URL"""
259
+
260
+ def __init__(
261
+ self,
262
+ content: bytes,
263
+ content_type: str,
264
+ encoding: Optional[str],
265
+ url: str,
266
+ cache_link_parsing: bool = True,
267
+ ) -> None:
268
+ """
269
+ :param encoding: the encoding to decode the given content.
270
+ :param url: the URL from which the HTML was downloaded.
271
+ :param cache_link_parsing: whether links parsed from this page's url
272
+ should be cached. PyPI index urls should
273
+ have this set to False, for example.
274
+ """
275
+ self.content = content
276
+ self.content_type = content_type
277
+ self.encoding = encoding
278
+ self.url = url
279
+ self.cache_link_parsing = cache_link_parsing
280
+
281
+ def __str__(self) -> str:
282
+ return redact_auth_from_url(self.url)
283
+
284
+
285
+ class HTMLLinkParser(HTMLParser):
286
+ """
287
+ HTMLParser that keeps the first base HREF and a list of all anchor
288
+ elements' attributes.
289
+ """
290
+
291
+ def __init__(self, url: str) -> None:
292
+ super().__init__(convert_charrefs=True)
293
+
294
+ self.url: str = url
295
+ self.base_url: Optional[str] = None
296
+ self.anchors: List[Dict[str, Optional[str]]] = []
297
+
298
+ def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None:
299
+ if tag == "base" and self.base_url is None:
300
+ href = self.get_href(attrs)
301
+ if href is not None:
302
+ self.base_url = href
303
+ elif tag == "a":
304
+ self.anchors.append(dict(attrs))
305
+
306
+ def get_href(self, attrs: List[Tuple[str, Optional[str]]]) -> Optional[str]:
307
+ for name, value in attrs:
308
+ if name == "href":
309
+ return value
310
+ return None
311
+
312
+
313
+ def _handle_get_simple_fail(
314
+ link: Link,
315
+ reason: Union[str, Exception],
316
+ meth: Optional[Callable[..., None]] = None,
317
+ ) -> None:
318
+ if meth is None:
319
+ meth = logger.debug
320
+ meth("Could not fetch URL %s: %s - skipping", link, reason)
321
+
322
+
323
+ def _make_index_content(
324
+ response: Response, cache_link_parsing: bool = True
325
+ ) -> IndexContent:
326
+ encoding = _get_encoding_from_headers(response.headers)
327
+ return IndexContent(
328
+ response.content,
329
+ response.headers["Content-Type"],
330
+ encoding=encoding,
331
+ url=response.url,
332
+ cache_link_parsing=cache_link_parsing,
333
+ )
334
+
335
+
336
+ def _get_index_content(link: Link, *, session: PipSession) -> Optional["IndexContent"]:
337
+ url = link.url.split("#", 1)[0]
338
+
339
+ # Check for VCS schemes that do not support lookup as web pages.
340
+ vcs_scheme = _match_vcs_scheme(url)
341
+ if vcs_scheme:
342
+ logger.warning(
343
+ "Cannot look at %s URL %s because it does not support lookup as web pages.",
344
+ vcs_scheme,
345
+ link,
346
+ )
347
+ return None
348
+
349
+ # Tack index.html onto file:// URLs that point to directories
350
+ scheme, _, path, _, _, _ = urllib.parse.urlparse(url)
351
+ if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)):
352
+ # add trailing slash if not present so urljoin doesn't trim
353
+ # final segment
354
+ if not url.endswith("/"):
355
+ url += "/"
356
+ # TODO: In the future, it would be nice if pip supported PEP 691
357
+ # style responses in the file:// URLs, however there's no
358
+ # standard file extension for application/vnd.pypi.simple.v1+json
359
+ # so we'll need to come up with something on our own.
360
+ url = urllib.parse.urljoin(url, "index.html")
361
+ logger.debug(" file: URL is directory, getting %s", url)
362
+
363
+ try:
364
+ resp = _get_simple_response(url, session=session)
365
+ except _NotHTTP:
366
+ logger.warning(
367
+ "Skipping page %s because it looks like an archive, and cannot "
368
+ "be checked by a HTTP HEAD request.",
369
+ link,
370
+ )
371
+ except _NotAPIContent as exc:
372
+ logger.warning(
373
+ "Skipping page %s because the %s request got Content-Type: %s. "
374
+ "The only supported Content-Types are application/vnd.pypi.simple.v1+json, "
375
+ "application/vnd.pypi.simple.v1+html, and text/html",
376
+ link,
377
+ exc.request_desc,
378
+ exc.content_type,
379
+ )
380
+ except NetworkConnectionError as exc:
381
+ _handle_get_simple_fail(link, exc)
382
+ except RetryError as exc:
383
+ _handle_get_simple_fail(link, exc)
384
+ except SSLError as exc:
385
+ reason = "There was a problem confirming the ssl certificate: "
386
+ reason += str(exc)
387
+ _handle_get_simple_fail(link, reason, meth=logger.info)
388
+ except requests.ConnectionError as exc:
389
+ _handle_get_simple_fail(link, f"connection error: {exc}")
390
+ except requests.Timeout:
391
+ _handle_get_simple_fail(link, "timed out")
392
+ else:
393
+ return _make_index_content(resp, cache_link_parsing=link.cache_link_parsing)
394
+ return None
395
+
396
+
397
+ class CollectedSources(NamedTuple):
398
+ find_links: Sequence[Optional[LinkSource]]
399
+ index_urls: Sequence[Optional[LinkSource]]
400
+
401
+
402
+ class LinkCollector:
403
+
404
+ """
405
+ Responsible for collecting Link objects from all configured locations,
406
+ making network requests as needed.
407
+
408
+ The class's main method is its collect_sources() method.
409
+ """
410
+
411
+ def __init__(
412
+ self,
413
+ session: PipSession,
414
+ search_scope: SearchScope,
415
+ ) -> None:
416
+ self.search_scope = search_scope
417
+ self.session = session
418
+
419
+ @classmethod
420
+ def create(
421
+ cls,
422
+ session: PipSession,
423
+ options: Values,
424
+ suppress_no_index: bool = False,
425
+ ) -> "LinkCollector":
426
+ """
427
+ :param session: The Session to use to make requests.
428
+ :param suppress_no_index: Whether to ignore the --no-index option
429
+ when constructing the SearchScope object.
430
+ """
431
+ index_urls = [options.index_url] + options.extra_index_urls
432
+ if options.no_index and not suppress_no_index:
433
+ logger.debug(
434
+ "Ignoring indexes: %s",
435
+ ",".join(redact_auth_from_url(url) for url in index_urls),
436
+ )
437
+ index_urls = []
438
+
439
+ # Make sure find_links is a list before passing to create().
440
+ find_links = options.find_links or []
441
+
442
+ search_scope = SearchScope.create(
443
+ find_links=find_links,
444
+ index_urls=index_urls,
445
+ no_index=options.no_index,
446
+ )
447
+ link_collector = LinkCollector(
448
+ session=session,
449
+ search_scope=search_scope,
450
+ )
451
+ return link_collector
452
+
453
+ @property
454
+ def find_links(self) -> List[str]:
455
+ return self.search_scope.find_links
456
+
457
+ def fetch_response(self, location: Link) -> Optional[IndexContent]:
458
+ """
459
+ Fetch an HTML page containing package links.
460
+ """
461
+ return _get_index_content(location, session=self.session)
462
+
463
+ def collect_sources(
464
+ self,
465
+ project_name: str,
466
+ candidates_from_page: CandidatesFromPage,
467
+ ) -> CollectedSources:
468
+ # The OrderedDict calls deduplicate sources by URL.
469
+ index_url_sources = collections.OrderedDict(
470
+ build_source(
471
+ loc,
472
+ candidates_from_page=candidates_from_page,
473
+ page_validator=self.session.is_secure_origin,
474
+ expand_dir=False,
475
+ cache_link_parsing=False,
476
+ project_name=project_name,
477
+ )
478
+ for loc in self.search_scope.get_index_urls_locations(project_name)
479
+ ).values()
480
+ find_links_sources = collections.OrderedDict(
481
+ build_source(
482
+ loc,
483
+ candidates_from_page=candidates_from_page,
484
+ page_validator=self.session.is_secure_origin,
485
+ expand_dir=True,
486
+ cache_link_parsing=True,
487
+ project_name=project_name,
488
+ )
489
+ for loc in self.find_links
490
+ ).values()
491
+
492
+ if logger.isEnabledFor(logging.DEBUG):
493
+ lines = [
494
+ f"* {s.link}"
495
+ for s in itertools.chain(find_links_sources, index_url_sources)
496
+ if s is not None and s.link is not None
497
+ ]
498
+ lines = [
499
+ f"{len(lines)} location(s) to search "
500
+ f"for versions of {project_name}:"
501
+ ] + lines
502
+ logger.debug("\n".join(lines))
503
+
504
+ return CollectedSources(
505
+ find_links=list(find_links_sources),
506
+ index_urls=list(index_url_sources),
507
+ )
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/index/package_finder.py ADDED
@@ -0,0 +1,1027 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Routines related to PyPI, indexes"""
2
+
3
+ import enum
4
+ import functools
5
+ import itertools
6
+ import logging
7
+ import re
8
+ from typing import TYPE_CHECKING, FrozenSet, Iterable, List, Optional, Set, Tuple, Union
9
+
10
+ from pip._vendor.packaging import specifiers
11
+ from pip._vendor.packaging.tags import Tag
12
+ from pip._vendor.packaging.utils import canonicalize_name
13
+ from pip._vendor.packaging.version import _BaseVersion
14
+ from pip._vendor.packaging.version import parse as parse_version
15
+
16
+ from pip._internal.exceptions import (
17
+ BestVersionAlreadyInstalled,
18
+ DistributionNotFound,
19
+ InvalidWheelFilename,
20
+ UnsupportedWheel,
21
+ )
22
+ from pip._internal.index.collector import LinkCollector, parse_links
23
+ from pip._internal.models.candidate import InstallationCandidate
24
+ from pip._internal.models.format_control import FormatControl
25
+ from pip._internal.models.link import Link
26
+ from pip._internal.models.search_scope import SearchScope
27
+ from pip._internal.models.selection_prefs import SelectionPreferences
28
+ from pip._internal.models.target_python import TargetPython
29
+ from pip._internal.models.wheel import Wheel
30
+ from pip._internal.req import InstallRequirement
31
+ from pip._internal.utils._log import getLogger
32
+ from pip._internal.utils.filetypes import WHEEL_EXTENSION
33
+ from pip._internal.utils.hashes import Hashes
34
+ from pip._internal.utils.logging import indent_log
35
+ from pip._internal.utils.misc import build_netloc
36
+ from pip._internal.utils.packaging import check_requires_python
37
+ from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS
38
+
39
+ if TYPE_CHECKING:
40
+ from pip._vendor.typing_extensions import TypeGuard
41
+
42
+ __all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"]
43
+
44
+
45
+ logger = getLogger(__name__)
46
+
47
+ BuildTag = Union[Tuple[()], Tuple[int, str]]
48
+ CandidateSortingKey = Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag]
49
+
50
+
51
+ def _check_link_requires_python(
52
+ link: Link,
53
+ version_info: Tuple[int, int, int],
54
+ ignore_requires_python: bool = False,
55
+ ) -> bool:
56
+ """
57
+ Return whether the given Python version is compatible with a link's
58
+ "Requires-Python" value.
59
+
60
+ :param version_info: A 3-tuple of ints representing the Python
61
+ major-minor-micro version to check.
62
+ :param ignore_requires_python: Whether to ignore the "Requires-Python"
63
+ value if the given Python version isn't compatible.
64
+ """
65
+ try:
66
+ is_compatible = check_requires_python(
67
+ link.requires_python,
68
+ version_info=version_info,
69
+ )
70
+ except specifiers.InvalidSpecifier:
71
+ logger.debug(
72
+ "Ignoring invalid Requires-Python (%r) for link: %s",
73
+ link.requires_python,
74
+ link,
75
+ )
76
+ else:
77
+ if not is_compatible:
78
+ version = ".".join(map(str, version_info))
79
+ if not ignore_requires_python:
80
+ logger.verbose(
81
+ "Link requires a different Python (%s not in: %r): %s",
82
+ version,
83
+ link.requires_python,
84
+ link,
85
+ )
86
+ return False
87
+
88
+ logger.debug(
89
+ "Ignoring failed Requires-Python check (%s not in: %r) for link: %s",
90
+ version,
91
+ link.requires_python,
92
+ link,
93
+ )
94
+
95
+ return True
96
+
97
+
98
+ class LinkType(enum.Enum):
99
+ candidate = enum.auto()
100
+ different_project = enum.auto()
101
+ yanked = enum.auto()
102
+ format_unsupported = enum.auto()
103
+ format_invalid = enum.auto()
104
+ platform_mismatch = enum.auto()
105
+ requires_python_mismatch = enum.auto()
106
+
107
+
108
+ class LinkEvaluator:
109
+
110
+ """
111
+ Responsible for evaluating links for a particular project.
112
+ """
113
+
114
+ _py_version_re = re.compile(r"-py([123]\.?[0-9]?)$")
115
+
116
+ # Don't include an allow_yanked default value to make sure each call
117
+ # site considers whether yanked releases are allowed. This also causes
118
+ # that decision to be made explicit in the calling code, which helps
119
+ # people when reading the code.
120
+ def __init__(
121
+ self,
122
+ project_name: str,
123
+ canonical_name: str,
124
+ formats: FrozenSet[str],
125
+ target_python: TargetPython,
126
+ allow_yanked: bool,
127
+ ignore_requires_python: Optional[bool] = None,
128
+ ) -> None:
129
+ """
130
+ :param project_name: The user supplied package name.
131
+ :param canonical_name: The canonical package name.
132
+ :param formats: The formats allowed for this package. Should be a set
133
+ with 'binary' or 'source' or both in it.
134
+ :param target_python: The target Python interpreter to use when
135
+ evaluating link compatibility. This is used, for example, to
136
+ check wheel compatibility, as well as when checking the Python
137
+ version, e.g. the Python version embedded in a link filename
138
+ (or egg fragment) and against an HTML link's optional PEP 503
139
+ "data-requires-python" attribute.
140
+ :param allow_yanked: Whether files marked as yanked (in the sense
141
+ of PEP 592) are permitted to be candidates for install.
142
+ :param ignore_requires_python: Whether to ignore incompatible
143
+ PEP 503 "data-requires-python" values in HTML links. Defaults
144
+ to False.
145
+ """
146
+ if ignore_requires_python is None:
147
+ ignore_requires_python = False
148
+
149
+ self._allow_yanked = allow_yanked
150
+ self._canonical_name = canonical_name
151
+ self._ignore_requires_python = ignore_requires_python
152
+ self._formats = formats
153
+ self._target_python = target_python
154
+
155
+ self.project_name = project_name
156
+
157
+ def evaluate_link(self, link: Link) -> Tuple[LinkType, str]:
158
+ """
159
+ Determine whether a link is a candidate for installation.
160
+
161
+ :return: A tuple (result, detail), where *result* is an enum
162
+ representing whether the evaluation found a candidate, or the reason
163
+ why one is not found. If a candidate is found, *detail* will be the
164
+ candidate's version string; if one is not found, it contains the
165
+ reason the link fails to qualify.
166
+ """
167
+ version = None
168
+ if link.is_yanked and not self._allow_yanked:
169
+ reason = link.yanked_reason or "<none given>"
170
+ return (LinkType.yanked, f"yanked for reason: {reason}")
171
+
172
+ if link.egg_fragment:
173
+ egg_info = link.egg_fragment
174
+ ext = link.ext
175
+ else:
176
+ egg_info, ext = link.splitext()
177
+ if not ext:
178
+ return (LinkType.format_unsupported, "not a file")
179
+ if ext not in SUPPORTED_EXTENSIONS:
180
+ return (
181
+ LinkType.format_unsupported,
182
+ f"unsupported archive format: {ext}",
183
+ )
184
+ if "binary" not in self._formats and ext == WHEEL_EXTENSION:
185
+ reason = f"No binaries permitted for {self.project_name}"
186
+ return (LinkType.format_unsupported, reason)
187
+ if "macosx10" in link.path and ext == ".zip":
188
+ return (LinkType.format_unsupported, "macosx10 one")
189
+ if ext == WHEEL_EXTENSION:
190
+ try:
191
+ wheel = Wheel(link.filename)
192
+ except InvalidWheelFilename:
193
+ return (
194
+ LinkType.format_invalid,
195
+ "invalid wheel filename",
196
+ )
197
+ if canonicalize_name(wheel.name) != self._canonical_name:
198
+ reason = f"wrong project name (not {self.project_name})"
199
+ return (LinkType.different_project, reason)
200
+
201
+ supported_tags = self._target_python.get_unsorted_tags()
202
+ if not wheel.supported(supported_tags):
203
+ # Include the wheel's tags in the reason string to
204
+ # simplify troubleshooting compatibility issues.
205
+ file_tags = ", ".join(wheel.get_formatted_file_tags())
206
+ reason = (
207
+ f"none of the wheel's tags ({file_tags}) are compatible "
208
+ f"(run pip debug --verbose to show compatible tags)"
209
+ )
210
+ return (LinkType.platform_mismatch, reason)
211
+
212
+ version = wheel.version
213
+
214
+ # This should be up by the self.ok_binary check, but see issue 2700.
215
+ if "source" not in self._formats and ext != WHEEL_EXTENSION:
216
+ reason = f"No sources permitted for {self.project_name}"
217
+ return (LinkType.format_unsupported, reason)
218
+
219
+ if not version:
220
+ version = _extract_version_from_fragment(
221
+ egg_info,
222
+ self._canonical_name,
223
+ )
224
+ if not version:
225
+ reason = f"Missing project version for {self.project_name}"
226
+ return (LinkType.format_invalid, reason)
227
+
228
+ match = self._py_version_re.search(version)
229
+ if match:
230
+ version = version[: match.start()]
231
+ py_version = match.group(1)
232
+ if py_version != self._target_python.py_version:
233
+ return (
234
+ LinkType.platform_mismatch,
235
+ "Python version is incorrect",
236
+ )
237
+
238
+ supports_python = _check_link_requires_python(
239
+ link,
240
+ version_info=self._target_python.py_version_info,
241
+ ignore_requires_python=self._ignore_requires_python,
242
+ )
243
+ if not supports_python:
244
+ reason = f"{version} Requires-Python {link.requires_python}"
245
+ return (LinkType.requires_python_mismatch, reason)
246
+
247
+ logger.debug("Found link %s, version: %s", link, version)
248
+
249
+ return (LinkType.candidate, version)
250
+
251
+
252
+ def filter_unallowed_hashes(
253
+ candidates: List[InstallationCandidate],
254
+ hashes: Optional[Hashes],
255
+ project_name: str,
256
+ ) -> List[InstallationCandidate]:
257
+ """
258
+ Filter out candidates whose hashes aren't allowed, and return a new
259
+ list of candidates.
260
+
261
+ If at least one candidate has an allowed hash, then all candidates with
262
+ either an allowed hash or no hash specified are returned. Otherwise,
263
+ the given candidates are returned.
264
+
265
+ Including the candidates with no hash specified when there is a match
266
+ allows a warning to be logged if there is a more preferred candidate
267
+ with no hash specified. Returning all candidates in the case of no
268
+ matches lets pip report the hash of the candidate that would otherwise
269
+ have been installed (e.g. permitting the user to more easily update
270
+ their requirements file with the desired hash).
271
+ """
272
+ if not hashes:
273
+ logger.debug(
274
+ "Given no hashes to check %s links for project %r: "
275
+ "discarding no candidates",
276
+ len(candidates),
277
+ project_name,
278
+ )
279
+ # Make sure we're not returning back the given value.
280
+ return list(candidates)
281
+
282
+ matches_or_no_digest = []
283
+ # Collect the non-matches for logging purposes.
284
+ non_matches = []
285
+ match_count = 0
286
+ for candidate in candidates:
287
+ link = candidate.link
288
+ if not link.has_hash:
289
+ pass
290
+ elif link.is_hash_allowed(hashes=hashes):
291
+ match_count += 1
292
+ else:
293
+ non_matches.append(candidate)
294
+ continue
295
+
296
+ matches_or_no_digest.append(candidate)
297
+
298
+ if match_count:
299
+ filtered = matches_or_no_digest
300
+ else:
301
+ # Make sure we're not returning back the given value.
302
+ filtered = list(candidates)
303
+
304
+ if len(filtered) == len(candidates):
305
+ discard_message = "discarding no candidates"
306
+ else:
307
+ discard_message = "discarding {} non-matches:\n {}".format(
308
+ len(non_matches),
309
+ "\n ".join(str(candidate.link) for candidate in non_matches),
310
+ )
311
+
312
+ logger.debug(
313
+ "Checked %s links for project %r against %s hashes "
314
+ "(%s matches, %s no digest): %s",
315
+ len(candidates),
316
+ project_name,
317
+ hashes.digest_count,
318
+ match_count,
319
+ len(matches_or_no_digest) - match_count,
320
+ discard_message,
321
+ )
322
+
323
+ return filtered
324
+
325
+
326
+ class CandidatePreferences:
327
+
328
+ """
329
+ Encapsulates some of the preferences for filtering and sorting
330
+ InstallationCandidate objects.
331
+ """
332
+
333
+ def __init__(
334
+ self,
335
+ prefer_binary: bool = False,
336
+ allow_all_prereleases: bool = False,
337
+ ) -> None:
338
+ """
339
+ :param allow_all_prereleases: Whether to allow all pre-releases.
340
+ """
341
+ self.allow_all_prereleases = allow_all_prereleases
342
+ self.prefer_binary = prefer_binary
343
+
344
+
345
+ class BestCandidateResult:
346
+ """A collection of candidates, returned by `PackageFinder.find_best_candidate`.
347
+
348
+ This class is only intended to be instantiated by CandidateEvaluator's
349
+ `compute_best_candidate()` method.
350
+ """
351
+
352
+ def __init__(
353
+ self,
354
+ candidates: List[InstallationCandidate],
355
+ applicable_candidates: List[InstallationCandidate],
356
+ best_candidate: Optional[InstallationCandidate],
357
+ ) -> None:
358
+ """
359
+ :param candidates: A sequence of all available candidates found.
360
+ :param applicable_candidates: The applicable candidates.
361
+ :param best_candidate: The most preferred candidate found, or None
362
+ if no applicable candidates were found.
363
+ """
364
+ assert set(applicable_candidates) <= set(candidates)
365
+
366
+ if best_candidate is None:
367
+ assert not applicable_candidates
368
+ else:
369
+ assert best_candidate in applicable_candidates
370
+
371
+ self._applicable_candidates = applicable_candidates
372
+ self._candidates = candidates
373
+
374
+ self.best_candidate = best_candidate
375
+
376
+ def iter_all(self) -> Iterable[InstallationCandidate]:
377
+ """Iterate through all candidates."""
378
+ return iter(self._candidates)
379
+
380
+ def iter_applicable(self) -> Iterable[InstallationCandidate]:
381
+ """Iterate through the applicable candidates."""
382
+ return iter(self._applicable_candidates)
383
+
384
+
385
+ class CandidateEvaluator:
386
+
387
+ """
388
+ Responsible for filtering and sorting candidates for installation based
389
+ on what tags are valid.
390
+ """
391
+
392
+ @classmethod
393
+ def create(
394
+ cls,
395
+ project_name: str,
396
+ target_python: Optional[TargetPython] = None,
397
+ prefer_binary: bool = False,
398
+ allow_all_prereleases: bool = False,
399
+ specifier: Optional[specifiers.BaseSpecifier] = None,
400
+ hashes: Optional[Hashes] = None,
401
+ ) -> "CandidateEvaluator":
402
+ """Create a CandidateEvaluator object.
403
+
404
+ :param target_python: The target Python interpreter to use when
405
+ checking compatibility. If None (the default), a TargetPython
406
+ object will be constructed from the running Python.
407
+ :param specifier: An optional object implementing `filter`
408
+ (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
409
+ versions.
410
+ :param hashes: An optional collection of allowed hashes.
411
+ """
412
+ if target_python is None:
413
+ target_python = TargetPython()
414
+ if specifier is None:
415
+ specifier = specifiers.SpecifierSet()
416
+
417
+ supported_tags = target_python.get_sorted_tags()
418
+
419
+ return cls(
420
+ project_name=project_name,
421
+ supported_tags=supported_tags,
422
+ specifier=specifier,
423
+ prefer_binary=prefer_binary,
424
+ allow_all_prereleases=allow_all_prereleases,
425
+ hashes=hashes,
426
+ )
427
+
428
+ def __init__(
429
+ self,
430
+ project_name: str,
431
+ supported_tags: List[Tag],
432
+ specifier: specifiers.BaseSpecifier,
433
+ prefer_binary: bool = False,
434
+ allow_all_prereleases: bool = False,
435
+ hashes: Optional[Hashes] = None,
436
+ ) -> None:
437
+ """
438
+ :param supported_tags: The PEP 425 tags supported by the target
439
+ Python in order of preference (most preferred first).
440
+ """
441
+ self._allow_all_prereleases = allow_all_prereleases
442
+ self._hashes = hashes
443
+ self._prefer_binary = prefer_binary
444
+ self._project_name = project_name
445
+ self._specifier = specifier
446
+ self._supported_tags = supported_tags
447
+ # Since the index of the tag in the _supported_tags list is used
448
+ # as a priority, precompute a map from tag to index/priority to be
449
+ # used in wheel.find_most_preferred_tag.
450
+ self._wheel_tag_preferences = {
451
+ tag: idx for idx, tag in enumerate(supported_tags)
452
+ }
453
+
454
+ def get_applicable_candidates(
455
+ self,
456
+ candidates: List[InstallationCandidate],
457
+ ) -> List[InstallationCandidate]:
458
+ """
459
+ Return the applicable candidates from a list of candidates.
460
+ """
461
+ # Using None infers from the specifier instead.
462
+ allow_prereleases = self._allow_all_prereleases or None
463
+ specifier = self._specifier
464
+ versions = {
465
+ str(v)
466
+ for v in specifier.filter(
467
+ # We turn the version object into a str here because otherwise
468
+ # when we're debundled but setuptools isn't, Python will see
469
+ # packaging.version.Version and
470
+ # pkg_resources._vendor.packaging.version.Version as different
471
+ # types. This way we'll use a str as a common data interchange
472
+ # format. If we stop using the pkg_resources provided specifier
473
+ # and start using our own, we can drop the cast to str().
474
+ (str(c.version) for c in candidates),
475
+ prereleases=allow_prereleases,
476
+ )
477
+ }
478
+
479
+ # Again, converting version to str to deal with debundling.
480
+ applicable_candidates = [c for c in candidates if str(c.version) in versions]
481
+
482
+ filtered_applicable_candidates = filter_unallowed_hashes(
483
+ candidates=applicable_candidates,
484
+ hashes=self._hashes,
485
+ project_name=self._project_name,
486
+ )
487
+
488
+ return sorted(filtered_applicable_candidates, key=self._sort_key)
489
+
490
+ def _sort_key(self, candidate: InstallationCandidate) -> CandidateSortingKey:
491
+ """
492
+ Function to pass as the `key` argument to a call to sorted() to sort
493
+ InstallationCandidates by preference.
494
+
495
+ Returns a tuple such that tuples sorting as greater using Python's
496
+ default comparison operator are more preferred.
497
+
498
+ The preference is as follows:
499
+
500
+ First and foremost, candidates with allowed (matching) hashes are
501
+ always preferred over candidates without matching hashes. This is
502
+ because e.g. if the only candidate with an allowed hash is yanked,
503
+ we still want to use that candidate.
504
+
505
+ Second, excepting hash considerations, candidates that have been
506
+ yanked (in the sense of PEP 592) are always less preferred than
507
+ candidates that haven't been yanked. Then:
508
+
509
+ If not finding wheels, they are sorted by version only.
510
+ If finding wheels, then the sort order is by version, then:
511
+ 1. existing installs
512
+ 2. wheels ordered via Wheel.support_index_min(self._supported_tags)
513
+ 3. source archives
514
+ If prefer_binary was set, then all wheels are sorted above sources.
515
+
516
+ Note: it was considered to embed this logic into the Link
517
+ comparison operators, but then different sdist links
518
+ with the same version, would have to be considered equal
519
+ """
520
+ valid_tags = self._supported_tags
521
+ support_num = len(valid_tags)
522
+ build_tag: BuildTag = ()
523
+ binary_preference = 0
524
+ link = candidate.link
525
+ if link.is_wheel:
526
+ # can raise InvalidWheelFilename
527
+ wheel = Wheel(link.filename)
528
+ try:
529
+ pri = -(
530
+ wheel.find_most_preferred_tag(
531
+ valid_tags, self._wheel_tag_preferences
532
+ )
533
+ )
534
+ except ValueError:
535
+ raise UnsupportedWheel(
536
+ f"{wheel.filename} is not a supported wheel for this platform. It "
537
+ "can't be sorted."
538
+ )
539
+ if self._prefer_binary:
540
+ binary_preference = 1
541
+ if wheel.build_tag is not None:
542
+ match = re.match(r"^(\d+)(.*)$", wheel.build_tag)
543
+ assert match is not None, "guaranteed by filename validation"
544
+ build_tag_groups = match.groups()
545
+ build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
546
+ else: # sdist
547
+ pri = -(support_num)
548
+ has_allowed_hash = int(link.is_hash_allowed(self._hashes))
549
+ yank_value = -1 * int(link.is_yanked) # -1 for yanked.
550
+ return (
551
+ has_allowed_hash,
552
+ yank_value,
553
+ binary_preference,
554
+ candidate.version,
555
+ pri,
556
+ build_tag,
557
+ )
558
+
559
+ def sort_best_candidate(
560
+ self,
561
+ candidates: List[InstallationCandidate],
562
+ ) -> Optional[InstallationCandidate]:
563
+ """
564
+ Return the best candidate per the instance's sort order, or None if
565
+ no candidate is acceptable.
566
+ """
567
+ if not candidates:
568
+ return None
569
+ best_candidate = max(candidates, key=self._sort_key)
570
+ return best_candidate
571
+
572
+ def compute_best_candidate(
573
+ self,
574
+ candidates: List[InstallationCandidate],
575
+ ) -> BestCandidateResult:
576
+ """
577
+ Compute and return a `BestCandidateResult` instance.
578
+ """
579
+ applicable_candidates = self.get_applicable_candidates(candidates)
580
+
581
+ best_candidate = self.sort_best_candidate(applicable_candidates)
582
+
583
+ return BestCandidateResult(
584
+ candidates,
585
+ applicable_candidates=applicable_candidates,
586
+ best_candidate=best_candidate,
587
+ )
588
+
589
+
590
+ class PackageFinder:
591
+ """This finds packages.
592
+
593
+ This is meant to match easy_install's technique for looking for
594
+ packages, by reading pages and looking for appropriate links.
595
+ """
596
+
597
+ def __init__(
598
+ self,
599
+ link_collector: LinkCollector,
600
+ target_python: TargetPython,
601
+ allow_yanked: bool,
602
+ format_control: Optional[FormatControl] = None,
603
+ candidate_prefs: Optional[CandidatePreferences] = None,
604
+ ignore_requires_python: Optional[bool] = None,
605
+ ) -> None:
606
+ """
607
+ This constructor is primarily meant to be used by the create() class
608
+ method and from tests.
609
+
610
+ :param format_control: A FormatControl object, used to control
611
+ the selection of source packages / binary packages when consulting
612
+ the index and links.
613
+ :param candidate_prefs: Options to use when creating a
614
+ CandidateEvaluator object.
615
+ """
616
+ if candidate_prefs is None:
617
+ candidate_prefs = CandidatePreferences()
618
+
619
+ format_control = format_control or FormatControl(set(), set())
620
+
621
+ self._allow_yanked = allow_yanked
622
+ self._candidate_prefs = candidate_prefs
623
+ self._ignore_requires_python = ignore_requires_python
624
+ self._link_collector = link_collector
625
+ self._target_python = target_python
626
+
627
+ self.format_control = format_control
628
+
629
+ # These are boring links that have already been logged somehow.
630
+ self._logged_links: Set[Tuple[Link, LinkType, str]] = set()
631
+
632
+ # Don't include an allow_yanked default value to make sure each call
633
+ # site considers whether yanked releases are allowed. This also causes
634
+ # that decision to be made explicit in the calling code, which helps
635
+ # people when reading the code.
636
+ @classmethod
637
+ def create(
638
+ cls,
639
+ link_collector: LinkCollector,
640
+ selection_prefs: SelectionPreferences,
641
+ target_python: Optional[TargetPython] = None,
642
+ ) -> "PackageFinder":
643
+ """Create a PackageFinder.
644
+
645
+ :param selection_prefs: The candidate selection preferences, as a
646
+ SelectionPreferences object.
647
+ :param target_python: The target Python interpreter to use when
648
+ checking compatibility. If None (the default), a TargetPython
649
+ object will be constructed from the running Python.
650
+ """
651
+ if target_python is None:
652
+ target_python = TargetPython()
653
+
654
+ candidate_prefs = CandidatePreferences(
655
+ prefer_binary=selection_prefs.prefer_binary,
656
+ allow_all_prereleases=selection_prefs.allow_all_prereleases,
657
+ )
658
+
659
+ return cls(
660
+ candidate_prefs=candidate_prefs,
661
+ link_collector=link_collector,
662
+ target_python=target_python,
663
+ allow_yanked=selection_prefs.allow_yanked,
664
+ format_control=selection_prefs.format_control,
665
+ ignore_requires_python=selection_prefs.ignore_requires_python,
666
+ )
667
+
668
+ @property
669
+ def target_python(self) -> TargetPython:
670
+ return self._target_python
671
+
672
+ @property
673
+ def search_scope(self) -> SearchScope:
674
+ return self._link_collector.search_scope
675
+
676
+ @search_scope.setter
677
+ def search_scope(self, search_scope: SearchScope) -> None:
678
+ self._link_collector.search_scope = search_scope
679
+
680
+ @property
681
+ def find_links(self) -> List[str]:
682
+ return self._link_collector.find_links
683
+
684
+ @property
685
+ def index_urls(self) -> List[str]:
686
+ return self.search_scope.index_urls
687
+
688
+ @property
689
+ def trusted_hosts(self) -> Iterable[str]:
690
+ for host_port in self._link_collector.session.pip_trusted_origins:
691
+ yield build_netloc(*host_port)
692
+
693
+ @property
694
+ def allow_all_prereleases(self) -> bool:
695
+ return self._candidate_prefs.allow_all_prereleases
696
+
697
+ def set_allow_all_prereleases(self) -> None:
698
+ self._candidate_prefs.allow_all_prereleases = True
699
+
700
+ @property
701
+ def prefer_binary(self) -> bool:
702
+ return self._candidate_prefs.prefer_binary
703
+
704
+ def set_prefer_binary(self) -> None:
705
+ self._candidate_prefs.prefer_binary = True
706
+
707
+ def requires_python_skipped_reasons(self) -> List[str]:
708
+ reasons = {
709
+ detail
710
+ for _, result, detail in self._logged_links
711
+ if result == LinkType.requires_python_mismatch
712
+ }
713
+ return sorted(reasons)
714
+
715
+ def make_link_evaluator(self, project_name: str) -> LinkEvaluator:
716
+ canonical_name = canonicalize_name(project_name)
717
+ formats = self.format_control.get_allowed_formats(canonical_name)
718
+
719
+ return LinkEvaluator(
720
+ project_name=project_name,
721
+ canonical_name=canonical_name,
722
+ formats=formats,
723
+ target_python=self._target_python,
724
+ allow_yanked=self._allow_yanked,
725
+ ignore_requires_python=self._ignore_requires_python,
726
+ )
727
+
728
+ def _sort_links(self, links: Iterable[Link]) -> List[Link]:
729
+ """
730
+ Returns elements of links in order, non-egg links first, egg links
731
+ second, while eliminating duplicates
732
+ """
733
+ eggs, no_eggs = [], []
734
+ seen: Set[Link] = set()
735
+ for link in links:
736
+ if link not in seen:
737
+ seen.add(link)
738
+ if link.egg_fragment:
739
+ eggs.append(link)
740
+ else:
741
+ no_eggs.append(link)
742
+ return no_eggs + eggs
743
+
744
+ def _log_skipped_link(self, link: Link, result: LinkType, detail: str) -> None:
745
+ entry = (link, result, detail)
746
+ if entry not in self._logged_links:
747
+ # Put the link at the end so the reason is more visible and because
748
+ # the link string is usually very long.
749
+ logger.debug("Skipping link: %s: %s", detail, link)
750
+ self._logged_links.add(entry)
751
+
752
+ def get_install_candidate(
753
+ self, link_evaluator: LinkEvaluator, link: Link
754
+ ) -> Optional[InstallationCandidate]:
755
+ """
756
+ If the link is a candidate for install, convert it to an
757
+ InstallationCandidate and return it. Otherwise, return None.
758
+ """
759
+ result, detail = link_evaluator.evaluate_link(link)
760
+ if result != LinkType.candidate:
761
+ self._log_skipped_link(link, result, detail)
762
+ return None
763
+
764
+ return InstallationCandidate(
765
+ name=link_evaluator.project_name,
766
+ link=link,
767
+ version=detail,
768
+ )
769
+
770
+ def evaluate_links(
771
+ self, link_evaluator: LinkEvaluator, links: Iterable[Link]
772
+ ) -> List[InstallationCandidate]:
773
+ """
774
+ Convert links that are candidates to InstallationCandidate objects.
775
+ """
776
+ candidates = []
777
+ for link in self._sort_links(links):
778
+ candidate = self.get_install_candidate(link_evaluator, link)
779
+ if candidate is not None:
780
+ candidates.append(candidate)
781
+
782
+ return candidates
783
+
784
+ def process_project_url(
785
+ self, project_url: Link, link_evaluator: LinkEvaluator
786
+ ) -> List[InstallationCandidate]:
787
+ logger.debug(
788
+ "Fetching project page and analyzing links: %s",
789
+ project_url,
790
+ )
791
+ index_response = self._link_collector.fetch_response(project_url)
792
+ if index_response is None:
793
+ return []
794
+
795
+ page_links = list(parse_links(index_response))
796
+
797
+ with indent_log():
798
+ package_links = self.evaluate_links(
799
+ link_evaluator,
800
+ links=page_links,
801
+ )
802
+
803
+ return package_links
804
+
805
+ @functools.lru_cache(maxsize=None)
806
+ def find_all_candidates(self, project_name: str) -> List[InstallationCandidate]:
807
+ """Find all available InstallationCandidate for project_name
808
+
809
+ This checks index_urls and find_links.
810
+ All versions found are returned as an InstallationCandidate list.
811
+
812
+ See LinkEvaluator.evaluate_link() for details on which files
813
+ are accepted.
814
+ """
815
+ link_evaluator = self.make_link_evaluator(project_name)
816
+
817
+ collected_sources = self._link_collector.collect_sources(
818
+ project_name=project_name,
819
+ candidates_from_page=functools.partial(
820
+ self.process_project_url,
821
+ link_evaluator=link_evaluator,
822
+ ),
823
+ )
824
+
825
+ page_candidates_it = itertools.chain.from_iterable(
826
+ source.page_candidates()
827
+ for sources in collected_sources
828
+ for source in sources
829
+ if source is not None
830
+ )
831
+ page_candidates = list(page_candidates_it)
832
+
833
+ file_links_it = itertools.chain.from_iterable(
834
+ source.file_links()
835
+ for sources in collected_sources
836
+ for source in sources
837
+ if source is not None
838
+ )
839
+ file_candidates = self.evaluate_links(
840
+ link_evaluator,
841
+ sorted(file_links_it, reverse=True),
842
+ )
843
+
844
+ if logger.isEnabledFor(logging.DEBUG) and file_candidates:
845
+ paths = []
846
+ for candidate in file_candidates:
847
+ assert candidate.link.url # we need to have a URL
848
+ try:
849
+ paths.append(candidate.link.file_path)
850
+ except Exception:
851
+ paths.append(candidate.link.url) # it's not a local file
852
+
853
+ logger.debug("Local files found: %s", ", ".join(paths))
854
+
855
+ # This is an intentional priority ordering
856
+ return file_candidates + page_candidates
857
+
858
+ def make_candidate_evaluator(
859
+ self,
860
+ project_name: str,
861
+ specifier: Optional[specifiers.BaseSpecifier] = None,
862
+ hashes: Optional[Hashes] = None,
863
+ ) -> CandidateEvaluator:
864
+ """Create a CandidateEvaluator object to use."""
865
+ candidate_prefs = self._candidate_prefs
866
+ return CandidateEvaluator.create(
867
+ project_name=project_name,
868
+ target_python=self._target_python,
869
+ prefer_binary=candidate_prefs.prefer_binary,
870
+ allow_all_prereleases=candidate_prefs.allow_all_prereleases,
871
+ specifier=specifier,
872
+ hashes=hashes,
873
+ )
874
+
875
+ @functools.lru_cache(maxsize=None)
876
+ def find_best_candidate(
877
+ self,
878
+ project_name: str,
879
+ specifier: Optional[specifiers.BaseSpecifier] = None,
880
+ hashes: Optional[Hashes] = None,
881
+ ) -> BestCandidateResult:
882
+ """Find matches for the given project and specifier.
883
+
884
+ :param specifier: An optional object implementing `filter`
885
+ (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
886
+ versions.
887
+
888
+ :return: A `BestCandidateResult` instance.
889
+ """
890
+ candidates = self.find_all_candidates(project_name)
891
+ candidate_evaluator = self.make_candidate_evaluator(
892
+ project_name=project_name,
893
+ specifier=specifier,
894
+ hashes=hashes,
895
+ )
896
+ return candidate_evaluator.compute_best_candidate(candidates)
897
+
898
+ def find_requirement(
899
+ self, req: InstallRequirement, upgrade: bool
900
+ ) -> Optional[InstallationCandidate]:
901
+ """Try to find a Link matching req
902
+
903
+ Expects req, an InstallRequirement and upgrade, a boolean
904
+ Returns a InstallationCandidate if found,
905
+ Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
906
+ """
907
+ hashes = req.hashes(trust_internet=False)
908
+ best_candidate_result = self.find_best_candidate(
909
+ req.name,
910
+ specifier=req.specifier,
911
+ hashes=hashes,
912
+ )
913
+ best_candidate = best_candidate_result.best_candidate
914
+
915
+ installed_version: Optional[_BaseVersion] = None
916
+ if req.satisfied_by is not None:
917
+ installed_version = req.satisfied_by.version
918
+
919
+ def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str:
920
+ # This repeated parse_version and str() conversion is needed to
921
+ # handle different vendoring sources from pip and pkg_resources.
922
+ # If we stop using the pkg_resources provided specifier and start
923
+ # using our own, we can drop the cast to str().
924
+ return (
925
+ ", ".join(
926
+ sorted(
927
+ {str(c.version) for c in cand_iter},
928
+ key=parse_version,
929
+ )
930
+ )
931
+ or "none"
932
+ )
933
+
934
+ if installed_version is None and best_candidate is None:
935
+ logger.critical(
936
+ "Could not find a version that satisfies the requirement %s "
937
+ "(from versions: %s)",
938
+ req,
939
+ _format_versions(best_candidate_result.iter_all()),
940
+ )
941
+
942
+ raise DistributionNotFound(f"No matching distribution found for {req}")
943
+
944
+ def _should_install_candidate(
945
+ candidate: Optional[InstallationCandidate],
946
+ ) -> "TypeGuard[InstallationCandidate]":
947
+ if installed_version is None:
948
+ return True
949
+ if best_candidate is None:
950
+ return False
951
+ return best_candidate.version > installed_version
952
+
953
+ if not upgrade and installed_version is not None:
954
+ if _should_install_candidate(best_candidate):
955
+ logger.debug(
956
+ "Existing installed version (%s) satisfies requirement "
957
+ "(most up-to-date version is %s)",
958
+ installed_version,
959
+ best_candidate.version,
960
+ )
961
+ else:
962
+ logger.debug(
963
+ "Existing installed version (%s) is most up-to-date and "
964
+ "satisfies requirement",
965
+ installed_version,
966
+ )
967
+ return None
968
+
969
+ if _should_install_candidate(best_candidate):
970
+ logger.debug(
971
+ "Using version %s (newest of versions: %s)",
972
+ best_candidate.version,
973
+ _format_versions(best_candidate_result.iter_applicable()),
974
+ )
975
+ return best_candidate
976
+
977
+ # We have an existing version, and its the best version
978
+ logger.debug(
979
+ "Installed version (%s) is most up-to-date (past versions: %s)",
980
+ installed_version,
981
+ _format_versions(best_candidate_result.iter_applicable()),
982
+ )
983
+ raise BestVersionAlreadyInstalled
984
+
985
+
986
+ def _find_name_version_sep(fragment: str, canonical_name: str) -> int:
987
+ """Find the separator's index based on the package's canonical name.
988
+
989
+ :param fragment: A <package>+<version> filename "fragment" (stem) or
990
+ egg fragment.
991
+ :param canonical_name: The package's canonical name.
992
+
993
+ This function is needed since the canonicalized name does not necessarily
994
+ have the same length as the egg info's name part. An example::
995
+
996
+ >>> fragment = 'foo__bar-1.0'
997
+ >>> canonical_name = 'foo-bar'
998
+ >>> _find_name_version_sep(fragment, canonical_name)
999
+ 8
1000
+ """
1001
+ # Project name and version must be separated by one single dash. Find all
1002
+ # occurrences of dashes; if the string in front of it matches the canonical
1003
+ # name, this is the one separating the name and version parts.
1004
+ for i, c in enumerate(fragment):
1005
+ if c != "-":
1006
+ continue
1007
+ if canonicalize_name(fragment[:i]) == canonical_name:
1008
+ return i
1009
+ raise ValueError(f"{fragment} does not match {canonical_name}")
1010
+
1011
+
1012
+ def _extract_version_from_fragment(fragment: str, canonical_name: str) -> Optional[str]:
1013
+ """Parse the version string from a <package>+<version> filename
1014
+ "fragment" (stem) or egg fragment.
1015
+
1016
+ :param fragment: The string to parse. E.g. foo-2.1
1017
+ :param canonical_name: The canonicalized name of the package this
1018
+ belongs to.
1019
+ """
1020
+ try:
1021
+ version_start = _find_name_version_sep(fragment, canonical_name) + 1
1022
+ except ValueError:
1023
+ return None
1024
+ version = fragment[version_start:]
1025
+ if not version:
1026
+ return None
1027
+ return version
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/index/sources.py ADDED
@@ -0,0 +1,285 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import mimetypes
3
+ import os
4
+ from collections import defaultdict
5
+ from typing import Callable, Dict, Iterable, List, Optional, Tuple
6
+
7
+ from pip._vendor.packaging.utils import (
8
+ InvalidSdistFilename,
9
+ InvalidVersion,
10
+ InvalidWheelFilename,
11
+ canonicalize_name,
12
+ parse_sdist_filename,
13
+ parse_wheel_filename,
14
+ )
15
+
16
+ from pip._internal.models.candidate import InstallationCandidate
17
+ from pip._internal.models.link import Link
18
+ from pip._internal.utils.urls import path_to_url, url_to_path
19
+ from pip._internal.vcs import is_url
20
+
21
+ logger = logging.getLogger(__name__)
22
+
23
+ FoundCandidates = Iterable[InstallationCandidate]
24
+ FoundLinks = Iterable[Link]
25
+ CandidatesFromPage = Callable[[Link], Iterable[InstallationCandidate]]
26
+ PageValidator = Callable[[Link], bool]
27
+
28
+
29
+ class LinkSource:
30
+ @property
31
+ def link(self) -> Optional[Link]:
32
+ """Returns the underlying link, if there's one."""
33
+ raise NotImplementedError()
34
+
35
+ def page_candidates(self) -> FoundCandidates:
36
+ """Candidates found by parsing an archive listing HTML file."""
37
+ raise NotImplementedError()
38
+
39
+ def file_links(self) -> FoundLinks:
40
+ """Links found by specifying archives directly."""
41
+ raise NotImplementedError()
42
+
43
+
44
+ def _is_html_file(file_url: str) -> bool:
45
+ return mimetypes.guess_type(file_url, strict=False)[0] == "text/html"
46
+
47
+
48
+ class _FlatDirectoryToUrls:
49
+ """Scans directory and caches results"""
50
+
51
+ def __init__(self, path: str) -> None:
52
+ self._path = path
53
+ self._page_candidates: List[str] = []
54
+ self._project_name_to_urls: Dict[str, List[str]] = defaultdict(list)
55
+ self._scanned_directory = False
56
+
57
+ def _scan_directory(self) -> None:
58
+ """Scans directory once and populates both page_candidates
59
+ and project_name_to_urls at the same time
60
+ """
61
+ for entry in os.scandir(self._path):
62
+ url = path_to_url(entry.path)
63
+ if _is_html_file(url):
64
+ self._page_candidates.append(url)
65
+ continue
66
+
67
+ # File must have a valid wheel or sdist name,
68
+ # otherwise not worth considering as a package
69
+ try:
70
+ project_filename = parse_wheel_filename(entry.name)[0]
71
+ except (InvalidWheelFilename, InvalidVersion):
72
+ try:
73
+ project_filename = parse_sdist_filename(entry.name)[0]
74
+ except (InvalidSdistFilename, InvalidVersion):
75
+ continue
76
+
77
+ self._project_name_to_urls[project_filename].append(url)
78
+ self._scanned_directory = True
79
+
80
+ @property
81
+ def page_candidates(self) -> List[str]:
82
+ if not self._scanned_directory:
83
+ self._scan_directory()
84
+
85
+ return self._page_candidates
86
+
87
+ @property
88
+ def project_name_to_urls(self) -> Dict[str, List[str]]:
89
+ if not self._scanned_directory:
90
+ self._scan_directory()
91
+
92
+ return self._project_name_to_urls
93
+
94
+
95
+ class _FlatDirectorySource(LinkSource):
96
+ """Link source specified by ``--find-links=<path-to-dir>``.
97
+
98
+ This looks the content of the directory, and returns:
99
+
100
+ * ``page_candidates``: Links listed on each HTML file in the directory.
101
+ * ``file_candidates``: Archives in the directory.
102
+ """
103
+
104
+ _paths_to_urls: Dict[str, _FlatDirectoryToUrls] = {}
105
+
106
+ def __init__(
107
+ self,
108
+ candidates_from_page: CandidatesFromPage,
109
+ path: str,
110
+ project_name: str,
111
+ ) -> None:
112
+ self._candidates_from_page = candidates_from_page
113
+ self._project_name = canonicalize_name(project_name)
114
+
115
+ # Get existing instance of _FlatDirectoryToUrls if it exists
116
+ if path in self._paths_to_urls:
117
+ self._path_to_urls = self._paths_to_urls[path]
118
+ else:
119
+ self._path_to_urls = _FlatDirectoryToUrls(path=path)
120
+ self._paths_to_urls[path] = self._path_to_urls
121
+
122
+ @property
123
+ def link(self) -> Optional[Link]:
124
+ return None
125
+
126
+ def page_candidates(self) -> FoundCandidates:
127
+ for url in self._path_to_urls.page_candidates:
128
+ yield from self._candidates_from_page(Link(url))
129
+
130
+ def file_links(self) -> FoundLinks:
131
+ for url in self._path_to_urls.project_name_to_urls[self._project_name]:
132
+ yield Link(url)
133
+
134
+
135
+ class _LocalFileSource(LinkSource):
136
+ """``--find-links=<path-or-url>`` or ``--[extra-]index-url=<path-or-url>``.
137
+
138
+ If a URL is supplied, it must be a ``file:`` URL. If a path is supplied to
139
+ the option, it is converted to a URL first. This returns:
140
+
141
+ * ``page_candidates``: Links listed on an HTML file.
142
+ * ``file_candidates``: The non-HTML file.
143
+ """
144
+
145
+ def __init__(
146
+ self,
147
+ candidates_from_page: CandidatesFromPage,
148
+ link: Link,
149
+ ) -> None:
150
+ self._candidates_from_page = candidates_from_page
151
+ self._link = link
152
+
153
+ @property
154
+ def link(self) -> Optional[Link]:
155
+ return self._link
156
+
157
+ def page_candidates(self) -> FoundCandidates:
158
+ if not _is_html_file(self._link.url):
159
+ return
160
+ yield from self._candidates_from_page(self._link)
161
+
162
+ def file_links(self) -> FoundLinks:
163
+ if _is_html_file(self._link.url):
164
+ return
165
+ yield self._link
166
+
167
+
168
+ class _RemoteFileSource(LinkSource):
169
+ """``--find-links=<url>`` or ``--[extra-]index-url=<url>``.
170
+
171
+ This returns:
172
+
173
+ * ``page_candidates``: Links listed on an HTML file.
174
+ * ``file_candidates``: The non-HTML file.
175
+ """
176
+
177
+ def __init__(
178
+ self,
179
+ candidates_from_page: CandidatesFromPage,
180
+ page_validator: PageValidator,
181
+ link: Link,
182
+ ) -> None:
183
+ self._candidates_from_page = candidates_from_page
184
+ self._page_validator = page_validator
185
+ self._link = link
186
+
187
+ @property
188
+ def link(self) -> Optional[Link]:
189
+ return self._link
190
+
191
+ def page_candidates(self) -> FoundCandidates:
192
+ if not self._page_validator(self._link):
193
+ return
194
+ yield from self._candidates_from_page(self._link)
195
+
196
+ def file_links(self) -> FoundLinks:
197
+ yield self._link
198
+
199
+
200
+ class _IndexDirectorySource(LinkSource):
201
+ """``--[extra-]index-url=<path-to-directory>``.
202
+
203
+ This is treated like a remote URL; ``candidates_from_page`` contains logic
204
+ for this by appending ``index.html`` to the link.
205
+ """
206
+
207
+ def __init__(
208
+ self,
209
+ candidates_from_page: CandidatesFromPage,
210
+ link: Link,
211
+ ) -> None:
212
+ self._candidates_from_page = candidates_from_page
213
+ self._link = link
214
+
215
+ @property
216
+ def link(self) -> Optional[Link]:
217
+ return self._link
218
+
219
+ def page_candidates(self) -> FoundCandidates:
220
+ yield from self._candidates_from_page(self._link)
221
+
222
+ def file_links(self) -> FoundLinks:
223
+ return ()
224
+
225
+
226
+ def build_source(
227
+ location: str,
228
+ *,
229
+ candidates_from_page: CandidatesFromPage,
230
+ page_validator: PageValidator,
231
+ expand_dir: bool,
232
+ cache_link_parsing: bool,
233
+ project_name: str,
234
+ ) -> Tuple[Optional[str], Optional[LinkSource]]:
235
+ path: Optional[str] = None
236
+ url: Optional[str] = None
237
+ if os.path.exists(location): # Is a local path.
238
+ url = path_to_url(location)
239
+ path = location
240
+ elif location.startswith("file:"): # A file: URL.
241
+ url = location
242
+ path = url_to_path(location)
243
+ elif is_url(location):
244
+ url = location
245
+
246
+ if url is None:
247
+ msg = (
248
+ "Location '%s' is ignored: "
249
+ "it is either a non-existing path or lacks a specific scheme."
250
+ )
251
+ logger.warning(msg, location)
252
+ return (None, None)
253
+
254
+ if path is None:
255
+ source: LinkSource = _RemoteFileSource(
256
+ candidates_from_page=candidates_from_page,
257
+ page_validator=page_validator,
258
+ link=Link(url, cache_link_parsing=cache_link_parsing),
259
+ )
260
+ return (url, source)
261
+
262
+ if os.path.isdir(path):
263
+ if expand_dir:
264
+ source = _FlatDirectorySource(
265
+ candidates_from_page=candidates_from_page,
266
+ path=path,
267
+ project_name=project_name,
268
+ )
269
+ else:
270
+ source = _IndexDirectorySource(
271
+ candidates_from_page=candidates_from_page,
272
+ link=Link(url, cache_link_parsing=cache_link_parsing),
273
+ )
274
+ return (url, source)
275
+ elif os.path.isfile(path):
276
+ source = _LocalFileSource(
277
+ candidates_from_page=candidates_from_page,
278
+ link=Link(url, cache_link_parsing=cache_link_parsing),
279
+ )
280
+ return (url, source)
281
+ logger.warning(
282
+ "Location '%s' is ignored: it is neither a file nor a directory.",
283
+ location,
284
+ )
285
+ return (url, None)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-38.pyc ADDED
Binary file (2.24 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-38.pyc ADDED
Binary file (10.6 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ """A package that contains models that represent entities.
2
+ """
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/candidate.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pip._vendor.packaging.version import parse as parse_version
2
+
3
+ from pip._internal.models.link import Link
4
+ from pip._internal.utils.models import KeyBasedCompareMixin
5
+
6
+
7
+ class InstallationCandidate(KeyBasedCompareMixin):
8
+ """Represents a potential "candidate" for installation."""
9
+
10
+ __slots__ = ["name", "version", "link"]
11
+
12
+ def __init__(self, name: str, version: str, link: Link) -> None:
13
+ self.name = name
14
+ self.version = parse_version(version)
15
+ self.link = link
16
+
17
+ super().__init__(
18
+ key=(self.name, self.version, self.link),
19
+ defining_class=InstallationCandidate,
20
+ )
21
+
22
+ def __repr__(self) -> str:
23
+ return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
24
+ self.name,
25
+ self.version,
26
+ self.link,
27
+ )
28
+
29
+ def __str__(self) -> str:
30
+ return f"{self.name!r} candidate (version {self.version} at {self.link})"
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/direct_url.py ADDED
@@ -0,0 +1,235 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ PEP 610 """
2
+ import json
3
+ import re
4
+ import urllib.parse
5
+ from typing import Any, Dict, Iterable, Optional, Type, TypeVar, Union
6
+
7
+ __all__ = [
8
+ "DirectUrl",
9
+ "DirectUrlValidationError",
10
+ "DirInfo",
11
+ "ArchiveInfo",
12
+ "VcsInfo",
13
+ ]
14
+
15
+ T = TypeVar("T")
16
+
17
+ DIRECT_URL_METADATA_NAME = "direct_url.json"
18
+ ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$")
19
+
20
+
21
+ class DirectUrlValidationError(Exception):
22
+ pass
23
+
24
+
25
+ def _get(
26
+ d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
27
+ ) -> Optional[T]:
28
+ """Get value from dictionary and verify expected type."""
29
+ if key not in d:
30
+ return default
31
+ value = d[key]
32
+ if not isinstance(value, expected_type):
33
+ raise DirectUrlValidationError(
34
+ f"{value!r} has unexpected type for {key} (expected {expected_type})"
35
+ )
36
+ return value
37
+
38
+
39
+ def _get_required(
40
+ d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
41
+ ) -> T:
42
+ value = _get(d, expected_type, key, default)
43
+ if value is None:
44
+ raise DirectUrlValidationError(f"{key} must have a value")
45
+ return value
46
+
47
+
48
+ def _exactly_one_of(infos: Iterable[Optional["InfoType"]]) -> "InfoType":
49
+ infos = [info for info in infos if info is not None]
50
+ if not infos:
51
+ raise DirectUrlValidationError(
52
+ "missing one of archive_info, dir_info, vcs_info"
53
+ )
54
+ if len(infos) > 1:
55
+ raise DirectUrlValidationError(
56
+ "more than one of archive_info, dir_info, vcs_info"
57
+ )
58
+ assert infos[0] is not None
59
+ return infos[0]
60
+
61
+
62
+ def _filter_none(**kwargs: Any) -> Dict[str, Any]:
63
+ """Make dict excluding None values."""
64
+ return {k: v for k, v in kwargs.items() if v is not None}
65
+
66
+
67
+ class VcsInfo:
68
+ name = "vcs_info"
69
+
70
+ def __init__(
71
+ self,
72
+ vcs: str,
73
+ commit_id: str,
74
+ requested_revision: Optional[str] = None,
75
+ ) -> None:
76
+ self.vcs = vcs
77
+ self.requested_revision = requested_revision
78
+ self.commit_id = commit_id
79
+
80
+ @classmethod
81
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]:
82
+ if d is None:
83
+ return None
84
+ return cls(
85
+ vcs=_get_required(d, str, "vcs"),
86
+ commit_id=_get_required(d, str, "commit_id"),
87
+ requested_revision=_get(d, str, "requested_revision"),
88
+ )
89
+
90
+ def _to_dict(self) -> Dict[str, Any]:
91
+ return _filter_none(
92
+ vcs=self.vcs,
93
+ requested_revision=self.requested_revision,
94
+ commit_id=self.commit_id,
95
+ )
96
+
97
+
98
+ class ArchiveInfo:
99
+ name = "archive_info"
100
+
101
+ def __init__(
102
+ self,
103
+ hash: Optional[str] = None,
104
+ hashes: Optional[Dict[str, str]] = None,
105
+ ) -> None:
106
+ # set hashes before hash, since the hash setter will further populate hashes
107
+ self.hashes = hashes
108
+ self.hash = hash
109
+
110
+ @property
111
+ def hash(self) -> Optional[str]:
112
+ return self._hash
113
+
114
+ @hash.setter
115
+ def hash(self, value: Optional[str]) -> None:
116
+ if value is not None:
117
+ # Auto-populate the hashes key to upgrade to the new format automatically.
118
+ # We don't back-populate the legacy hash key from hashes.
119
+ try:
120
+ hash_name, hash_value = value.split("=", 1)
121
+ except ValueError:
122
+ raise DirectUrlValidationError(
123
+ f"invalid archive_info.hash format: {value!r}"
124
+ )
125
+ if self.hashes is None:
126
+ self.hashes = {hash_name: hash_value}
127
+ elif hash_name not in self.hashes:
128
+ self.hashes = self.hashes.copy()
129
+ self.hashes[hash_name] = hash_value
130
+ self._hash = value
131
+
132
+ @classmethod
133
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]:
134
+ if d is None:
135
+ return None
136
+ return cls(hash=_get(d, str, "hash"), hashes=_get(d, dict, "hashes"))
137
+
138
+ def _to_dict(self) -> Dict[str, Any]:
139
+ return _filter_none(hash=self.hash, hashes=self.hashes)
140
+
141
+
142
+ class DirInfo:
143
+ name = "dir_info"
144
+
145
+ def __init__(
146
+ self,
147
+ editable: bool = False,
148
+ ) -> None:
149
+ self.editable = editable
150
+
151
+ @classmethod
152
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]:
153
+ if d is None:
154
+ return None
155
+ return cls(editable=_get_required(d, bool, "editable", default=False))
156
+
157
+ def _to_dict(self) -> Dict[str, Any]:
158
+ return _filter_none(editable=self.editable or None)
159
+
160
+
161
+ InfoType = Union[ArchiveInfo, DirInfo, VcsInfo]
162
+
163
+
164
+ class DirectUrl:
165
+ def __init__(
166
+ self,
167
+ url: str,
168
+ info: InfoType,
169
+ subdirectory: Optional[str] = None,
170
+ ) -> None:
171
+ self.url = url
172
+ self.info = info
173
+ self.subdirectory = subdirectory
174
+
175
+ def _remove_auth_from_netloc(self, netloc: str) -> str:
176
+ if "@" not in netloc:
177
+ return netloc
178
+ user_pass, netloc_no_user_pass = netloc.split("@", 1)
179
+ if (
180
+ isinstance(self.info, VcsInfo)
181
+ and self.info.vcs == "git"
182
+ and user_pass == "git"
183
+ ):
184
+ return netloc
185
+ if ENV_VAR_RE.match(user_pass):
186
+ return netloc
187
+ return netloc_no_user_pass
188
+
189
+ @property
190
+ def redacted_url(self) -> str:
191
+ """url with user:password part removed unless it is formed with
192
+ environment variables as specified in PEP 610, or it is ``git``
193
+ in the case of a git URL.
194
+ """
195
+ purl = urllib.parse.urlsplit(self.url)
196
+ netloc = self._remove_auth_from_netloc(purl.netloc)
197
+ surl = urllib.parse.urlunsplit(
198
+ (purl.scheme, netloc, purl.path, purl.query, purl.fragment)
199
+ )
200
+ return surl
201
+
202
+ def validate(self) -> None:
203
+ self.from_dict(self.to_dict())
204
+
205
+ @classmethod
206
+ def from_dict(cls, d: Dict[str, Any]) -> "DirectUrl":
207
+ return DirectUrl(
208
+ url=_get_required(d, str, "url"),
209
+ subdirectory=_get(d, str, "subdirectory"),
210
+ info=_exactly_one_of(
211
+ [
212
+ ArchiveInfo._from_dict(_get(d, dict, "archive_info")),
213
+ DirInfo._from_dict(_get(d, dict, "dir_info")),
214
+ VcsInfo._from_dict(_get(d, dict, "vcs_info")),
215
+ ]
216
+ ),
217
+ )
218
+
219
+ def to_dict(self) -> Dict[str, Any]:
220
+ res = _filter_none(
221
+ url=self.redacted_url,
222
+ subdirectory=self.subdirectory,
223
+ )
224
+ res[self.info.name] = self.info._to_dict()
225
+ return res
226
+
227
+ @classmethod
228
+ def from_json(cls, s: str) -> "DirectUrl":
229
+ return cls.from_dict(json.loads(s))
230
+
231
+ def to_json(self) -> str:
232
+ return json.dumps(self.to_dict(), sort_keys=True)
233
+
234
+ def is_local_editable(self) -> bool:
235
+ return isinstance(self.info, DirInfo) and self.info.editable
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/format_control.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import FrozenSet, Optional, Set
2
+
3
+ from pip._vendor.packaging.utils import canonicalize_name
4
+
5
+ from pip._internal.exceptions import CommandError
6
+
7
+
8
+ class FormatControl:
9
+ """Helper for managing formats from which a package can be installed."""
10
+
11
+ __slots__ = ["no_binary", "only_binary"]
12
+
13
+ def __init__(
14
+ self,
15
+ no_binary: Optional[Set[str]] = None,
16
+ only_binary: Optional[Set[str]] = None,
17
+ ) -> None:
18
+ if no_binary is None:
19
+ no_binary = set()
20
+ if only_binary is None:
21
+ only_binary = set()
22
+
23
+ self.no_binary = no_binary
24
+ self.only_binary = only_binary
25
+
26
+ def __eq__(self, other: object) -> bool:
27
+ if not isinstance(other, self.__class__):
28
+ return NotImplemented
29
+
30
+ if self.__slots__ != other.__slots__:
31
+ return False
32
+
33
+ return all(getattr(self, k) == getattr(other, k) for k in self.__slots__)
34
+
35
+ def __repr__(self) -> str:
36
+ return f"{self.__class__.__name__}({self.no_binary}, {self.only_binary})"
37
+
38
+ @staticmethod
39
+ def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None:
40
+ if value.startswith("-"):
41
+ raise CommandError(
42
+ "--no-binary / --only-binary option requires 1 argument."
43
+ )
44
+ new = value.split(",")
45
+ while ":all:" in new:
46
+ other.clear()
47
+ target.clear()
48
+ target.add(":all:")
49
+ del new[: new.index(":all:") + 1]
50
+ # Without a none, we want to discard everything as :all: covers it
51
+ if ":none:" not in new:
52
+ return
53
+ for name in new:
54
+ if name == ":none:":
55
+ target.clear()
56
+ continue
57
+ name = canonicalize_name(name)
58
+ other.discard(name)
59
+ target.add(name)
60
+
61
+ def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]:
62
+ result = {"binary", "source"}
63
+ if canonical_name in self.only_binary:
64
+ result.discard("source")
65
+ elif canonical_name in self.no_binary:
66
+ result.discard("binary")
67
+ elif ":all:" in self.only_binary:
68
+ result.discard("source")
69
+ elif ":all:" in self.no_binary:
70
+ result.discard("binary")
71
+ return frozenset(result)
72
+
73
+ def disallow_binaries(self) -> None:
74
+ self.handle_mutual_excludes(
75
+ ":all:",
76
+ self.no_binary,
77
+ self.only_binary,
78
+ )
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/link.py ADDED
@@ -0,0 +1,579 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import itertools
3
+ import logging
4
+ import os
5
+ import posixpath
6
+ import re
7
+ import urllib.parse
8
+ from dataclasses import dataclass
9
+ from typing import (
10
+ TYPE_CHECKING,
11
+ Any,
12
+ Dict,
13
+ List,
14
+ Mapping,
15
+ NamedTuple,
16
+ Optional,
17
+ Tuple,
18
+ Union,
19
+ )
20
+
21
+ from pip._internal.utils.deprecation import deprecated
22
+ from pip._internal.utils.filetypes import WHEEL_EXTENSION
23
+ from pip._internal.utils.hashes import Hashes
24
+ from pip._internal.utils.misc import (
25
+ pairwise,
26
+ redact_auth_from_url,
27
+ split_auth_from_netloc,
28
+ splitext,
29
+ )
30
+ from pip._internal.utils.models import KeyBasedCompareMixin
31
+ from pip._internal.utils.urls import path_to_url, url_to_path
32
+
33
+ if TYPE_CHECKING:
34
+ from pip._internal.index.collector import IndexContent
35
+
36
+ logger = logging.getLogger(__name__)
37
+
38
+
39
+ # Order matters, earlier hashes have a precedence over later hashes for what
40
+ # we will pick to use.
41
+ _SUPPORTED_HASHES = ("sha512", "sha384", "sha256", "sha224", "sha1", "md5")
42
+
43
+
44
+ @dataclass(frozen=True)
45
+ class LinkHash:
46
+ """Links to content may have embedded hash values. This class parses those.
47
+
48
+ `name` must be any member of `_SUPPORTED_HASHES`.
49
+
50
+ This class can be converted to and from `ArchiveInfo`. While ArchiveInfo intends to
51
+ be JSON-serializable to conform to PEP 610, this class contains the logic for
52
+ parsing a hash name and value for correctness, and then checking whether that hash
53
+ conforms to a schema with `.is_hash_allowed()`."""
54
+
55
+ name: str
56
+ value: str
57
+
58
+ _hash_url_fragment_re = re.compile(
59
+ # NB: we do not validate that the second group (.*) is a valid hex
60
+ # digest. Instead, we simply keep that string in this class, and then check it
61
+ # against Hashes when hash-checking is needed. This is easier to debug than
62
+ # proactively discarding an invalid hex digest, as we handle incorrect hashes
63
+ # and malformed hashes in the same place.
64
+ r"[#&]({choices})=([^&]*)".format(
65
+ choices="|".join(re.escape(hash_name) for hash_name in _SUPPORTED_HASHES)
66
+ ),
67
+ )
68
+
69
+ def __post_init__(self) -> None:
70
+ assert self.name in _SUPPORTED_HASHES
71
+
72
+ @classmethod
73
+ @functools.lru_cache(maxsize=None)
74
+ def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]:
75
+ """Search a string for a checksum algorithm name and encoded output value."""
76
+ match = cls._hash_url_fragment_re.search(url)
77
+ if match is None:
78
+ return None
79
+ name, value = match.groups()
80
+ return cls(name=name, value=value)
81
+
82
+ def as_dict(self) -> Dict[str, str]:
83
+ return {self.name: self.value}
84
+
85
+ def as_hashes(self) -> Hashes:
86
+ """Return a Hashes instance which checks only for the current hash."""
87
+ return Hashes({self.name: [self.value]})
88
+
89
+ def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
90
+ """
91
+ Return True if the current hash is allowed by `hashes`.
92
+ """
93
+ if hashes is None:
94
+ return False
95
+ return hashes.is_hash_allowed(self.name, hex_digest=self.value)
96
+
97
+
98
+ @dataclass(frozen=True)
99
+ class MetadataFile:
100
+ """Information about a core metadata file associated with a distribution."""
101
+
102
+ hashes: Optional[Dict[str, str]]
103
+
104
+ def __post_init__(self) -> None:
105
+ if self.hashes is not None:
106
+ assert all(name in _SUPPORTED_HASHES for name in self.hashes)
107
+
108
+
109
+ def supported_hashes(hashes: Optional[Dict[str, str]]) -> Optional[Dict[str, str]]:
110
+ # Remove any unsupported hash types from the mapping. If this leaves no
111
+ # supported hashes, return None
112
+ if hashes is None:
113
+ return None
114
+ hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES}
115
+ if not hashes:
116
+ return None
117
+ return hashes
118
+
119
+
120
+ def _clean_url_path_part(part: str) -> str:
121
+ """
122
+ Clean a "part" of a URL path (i.e. after splitting on "@" characters).
123
+ """
124
+ # We unquote prior to quoting to make sure nothing is double quoted.
125
+ return urllib.parse.quote(urllib.parse.unquote(part))
126
+
127
+
128
+ def _clean_file_url_path(part: str) -> str:
129
+ """
130
+ Clean the first part of a URL path that corresponds to a local
131
+ filesystem path (i.e. the first part after splitting on "@" characters).
132
+ """
133
+ # We unquote prior to quoting to make sure nothing is double quoted.
134
+ # Also, on Windows the path part might contain a drive letter which
135
+ # should not be quoted. On Linux where drive letters do not
136
+ # exist, the colon should be quoted. We rely on urllib.request
137
+ # to do the right thing here.
138
+ return urllib.request.pathname2url(urllib.request.url2pathname(part))
139
+
140
+
141
+ # percent-encoded: /
142
+ _reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE)
143
+
144
+
145
+ def _clean_url_path(path: str, is_local_path: bool) -> str:
146
+ """
147
+ Clean the path portion of a URL.
148
+ """
149
+ if is_local_path:
150
+ clean_func = _clean_file_url_path
151
+ else:
152
+ clean_func = _clean_url_path_part
153
+
154
+ # Split on the reserved characters prior to cleaning so that
155
+ # revision strings in VCS URLs are properly preserved.
156
+ parts = _reserved_chars_re.split(path)
157
+
158
+ cleaned_parts = []
159
+ for to_clean, reserved in pairwise(itertools.chain(parts, [""])):
160
+ cleaned_parts.append(clean_func(to_clean))
161
+ # Normalize %xx escapes (e.g. %2f -> %2F)
162
+ cleaned_parts.append(reserved.upper())
163
+
164
+ return "".join(cleaned_parts)
165
+
166
+
167
+ def _ensure_quoted_url(url: str) -> str:
168
+ """
169
+ Make sure a link is fully quoted.
170
+ For example, if ' ' occurs in the URL, it will be replaced with "%20",
171
+ and without double-quoting other characters.
172
+ """
173
+ # Split the URL into parts according to the general structure
174
+ # `scheme://netloc/path;parameters?query#fragment`.
175
+ result = urllib.parse.urlparse(url)
176
+ # If the netloc is empty, then the URL refers to a local filesystem path.
177
+ is_local_path = not result.netloc
178
+ path = _clean_url_path(result.path, is_local_path=is_local_path)
179
+ return urllib.parse.urlunparse(result._replace(path=path))
180
+
181
+
182
+ class Link(KeyBasedCompareMixin):
183
+ """Represents a parsed link from a Package Index's simple URL"""
184
+
185
+ __slots__ = [
186
+ "_parsed_url",
187
+ "_url",
188
+ "_hashes",
189
+ "comes_from",
190
+ "requires_python",
191
+ "yanked_reason",
192
+ "metadata_file_data",
193
+ "cache_link_parsing",
194
+ "egg_fragment",
195
+ ]
196
+
197
+ def __init__(
198
+ self,
199
+ url: str,
200
+ comes_from: Optional[Union[str, "IndexContent"]] = None,
201
+ requires_python: Optional[str] = None,
202
+ yanked_reason: Optional[str] = None,
203
+ metadata_file_data: Optional[MetadataFile] = None,
204
+ cache_link_parsing: bool = True,
205
+ hashes: Optional[Mapping[str, str]] = None,
206
+ ) -> None:
207
+ """
208
+ :param url: url of the resource pointed to (href of the link)
209
+ :param comes_from: instance of IndexContent where the link was found,
210
+ or string.
211
+ :param requires_python: String containing the `Requires-Python`
212
+ metadata field, specified in PEP 345. This may be specified by
213
+ a data-requires-python attribute in the HTML link tag, as
214
+ described in PEP 503.
215
+ :param yanked_reason: the reason the file has been yanked, if the
216
+ file has been yanked, or None if the file hasn't been yanked.
217
+ This is the value of the "data-yanked" attribute, if present, in
218
+ a simple repository HTML link. If the file has been yanked but
219
+ no reason was provided, this should be the empty string. See
220
+ PEP 592 for more information and the specification.
221
+ :param metadata_file_data: the metadata attached to the file, or None if
222
+ no such metadata is provided. This argument, if not None, indicates
223
+ that a separate metadata file exists, and also optionally supplies
224
+ hashes for that file.
225
+ :param cache_link_parsing: A flag that is used elsewhere to determine
226
+ whether resources retrieved from this link should be cached. PyPI
227
+ URLs should generally have this set to False, for example.
228
+ :param hashes: A mapping of hash names to digests to allow us to
229
+ determine the validity of a download.
230
+ """
231
+
232
+ # The comes_from, requires_python, and metadata_file_data arguments are
233
+ # only used by classmethods of this class, and are not used in client
234
+ # code directly.
235
+
236
+ # url can be a UNC windows share
237
+ if url.startswith("\\\\"):
238
+ url = path_to_url(url)
239
+
240
+ self._parsed_url = urllib.parse.urlsplit(url)
241
+ # Store the url as a private attribute to prevent accidentally
242
+ # trying to set a new value.
243
+ self._url = url
244
+
245
+ link_hash = LinkHash.find_hash_url_fragment(url)
246
+ hashes_from_link = {} if link_hash is None else link_hash.as_dict()
247
+ if hashes is None:
248
+ self._hashes = hashes_from_link
249
+ else:
250
+ self._hashes = {**hashes, **hashes_from_link}
251
+
252
+ self.comes_from = comes_from
253
+ self.requires_python = requires_python if requires_python else None
254
+ self.yanked_reason = yanked_reason
255
+ self.metadata_file_data = metadata_file_data
256
+
257
+ super().__init__(key=url, defining_class=Link)
258
+
259
+ self.cache_link_parsing = cache_link_parsing
260
+ self.egg_fragment = self._egg_fragment()
261
+
262
+ @classmethod
263
+ def from_json(
264
+ cls,
265
+ file_data: Dict[str, Any],
266
+ page_url: str,
267
+ ) -> Optional["Link"]:
268
+ """
269
+ Convert an pypi json document from a simple repository page into a Link.
270
+ """
271
+ file_url = file_data.get("url")
272
+ if file_url is None:
273
+ return None
274
+
275
+ url = _ensure_quoted_url(urllib.parse.urljoin(page_url, file_url))
276
+ pyrequire = file_data.get("requires-python")
277
+ yanked_reason = file_data.get("yanked")
278
+ hashes = file_data.get("hashes", {})
279
+
280
+ # PEP 714: Indexes must use the name core-metadata, but
281
+ # clients should support the old name as a fallback for compatibility.
282
+ metadata_info = file_data.get("core-metadata")
283
+ if metadata_info is None:
284
+ metadata_info = file_data.get("dist-info-metadata")
285
+
286
+ # The metadata info value may be a boolean, or a dict of hashes.
287
+ if isinstance(metadata_info, dict):
288
+ # The file exists, and hashes have been supplied
289
+ metadata_file_data = MetadataFile(supported_hashes(metadata_info))
290
+ elif metadata_info:
291
+ # The file exists, but there are no hashes
292
+ metadata_file_data = MetadataFile(None)
293
+ else:
294
+ # False or not present: the file does not exist
295
+ metadata_file_data = None
296
+
297
+ # The Link.yanked_reason expects an empty string instead of a boolean.
298
+ if yanked_reason and not isinstance(yanked_reason, str):
299
+ yanked_reason = ""
300
+ # The Link.yanked_reason expects None instead of False.
301
+ elif not yanked_reason:
302
+ yanked_reason = None
303
+
304
+ return cls(
305
+ url,
306
+ comes_from=page_url,
307
+ requires_python=pyrequire,
308
+ yanked_reason=yanked_reason,
309
+ hashes=hashes,
310
+ metadata_file_data=metadata_file_data,
311
+ )
312
+
313
+ @classmethod
314
+ def from_element(
315
+ cls,
316
+ anchor_attribs: Dict[str, Optional[str]],
317
+ page_url: str,
318
+ base_url: str,
319
+ ) -> Optional["Link"]:
320
+ """
321
+ Convert an anchor element's attributes in a simple repository page to a Link.
322
+ """
323
+ href = anchor_attribs.get("href")
324
+ if not href:
325
+ return None
326
+
327
+ url = _ensure_quoted_url(urllib.parse.urljoin(base_url, href))
328
+ pyrequire = anchor_attribs.get("data-requires-python")
329
+ yanked_reason = anchor_attribs.get("data-yanked")
330
+
331
+ # PEP 714: Indexes must use the name data-core-metadata, but
332
+ # clients should support the old name as a fallback for compatibility.
333
+ metadata_info = anchor_attribs.get("data-core-metadata")
334
+ if metadata_info is None:
335
+ metadata_info = anchor_attribs.get("data-dist-info-metadata")
336
+ # The metadata info value may be the string "true", or a string of
337
+ # the form "hashname=hashval"
338
+ if metadata_info == "true":
339
+ # The file exists, but there are no hashes
340
+ metadata_file_data = MetadataFile(None)
341
+ elif metadata_info is None:
342
+ # The file does not exist
343
+ metadata_file_data = None
344
+ else:
345
+ # The file exists, and hashes have been supplied
346
+ hashname, sep, hashval = metadata_info.partition("=")
347
+ if sep == "=":
348
+ metadata_file_data = MetadataFile(supported_hashes({hashname: hashval}))
349
+ else:
350
+ # Error - data is wrong. Treat as no hashes supplied.
351
+ logger.debug(
352
+ "Index returned invalid data-dist-info-metadata value: %s",
353
+ metadata_info,
354
+ )
355
+ metadata_file_data = MetadataFile(None)
356
+
357
+ return cls(
358
+ url,
359
+ comes_from=page_url,
360
+ requires_python=pyrequire,
361
+ yanked_reason=yanked_reason,
362
+ metadata_file_data=metadata_file_data,
363
+ )
364
+
365
+ def __str__(self) -> str:
366
+ if self.requires_python:
367
+ rp = f" (requires-python:{self.requires_python})"
368
+ else:
369
+ rp = ""
370
+ if self.comes_from:
371
+ return f"{redact_auth_from_url(self._url)} (from {self.comes_from}){rp}"
372
+ else:
373
+ return redact_auth_from_url(str(self._url))
374
+
375
+ def __repr__(self) -> str:
376
+ return f"<Link {self}>"
377
+
378
+ @property
379
+ def url(self) -> str:
380
+ return self._url
381
+
382
+ @property
383
+ def filename(self) -> str:
384
+ path = self.path.rstrip("/")
385
+ name = posixpath.basename(path)
386
+ if not name:
387
+ # Make sure we don't leak auth information if the netloc
388
+ # includes a username and password.
389
+ netloc, user_pass = split_auth_from_netloc(self.netloc)
390
+ return netloc
391
+
392
+ name = urllib.parse.unquote(name)
393
+ assert name, f"URL {self._url!r} produced no filename"
394
+ return name
395
+
396
+ @property
397
+ def file_path(self) -> str:
398
+ return url_to_path(self.url)
399
+
400
+ @property
401
+ def scheme(self) -> str:
402
+ return self._parsed_url.scheme
403
+
404
+ @property
405
+ def netloc(self) -> str:
406
+ """
407
+ This can contain auth information.
408
+ """
409
+ return self._parsed_url.netloc
410
+
411
+ @property
412
+ def path(self) -> str:
413
+ return urllib.parse.unquote(self._parsed_url.path)
414
+
415
+ def splitext(self) -> Tuple[str, str]:
416
+ return splitext(posixpath.basename(self.path.rstrip("/")))
417
+
418
+ @property
419
+ def ext(self) -> str:
420
+ return self.splitext()[1]
421
+
422
+ @property
423
+ def url_without_fragment(self) -> str:
424
+ scheme, netloc, path, query, fragment = self._parsed_url
425
+ return urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
426
+
427
+ _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)")
428
+
429
+ # Per PEP 508.
430
+ _project_name_re = re.compile(
431
+ r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
432
+ )
433
+
434
+ def _egg_fragment(self) -> Optional[str]:
435
+ match = self._egg_fragment_re.search(self._url)
436
+ if not match:
437
+ return None
438
+
439
+ # An egg fragment looks like a PEP 508 project name, along with
440
+ # an optional extras specifier. Anything else is invalid.
441
+ project_name = match.group(1)
442
+ if not self._project_name_re.match(project_name):
443
+ deprecated(
444
+ reason=f"{self} contains an egg fragment with a non-PEP 508 name",
445
+ replacement="to use the req @ url syntax, and remove the egg fragment",
446
+ gone_in="25.0",
447
+ issue=11617,
448
+ )
449
+
450
+ return project_name
451
+
452
+ _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)")
453
+
454
+ @property
455
+ def subdirectory_fragment(self) -> Optional[str]:
456
+ match = self._subdirectory_fragment_re.search(self._url)
457
+ if not match:
458
+ return None
459
+ return match.group(1)
460
+
461
+ def metadata_link(self) -> Optional["Link"]:
462
+ """Return a link to the associated core metadata file (if any)."""
463
+ if self.metadata_file_data is None:
464
+ return None
465
+ metadata_url = f"{self.url_without_fragment}.metadata"
466
+ if self.metadata_file_data.hashes is None:
467
+ return Link(metadata_url)
468
+ return Link(metadata_url, hashes=self.metadata_file_data.hashes)
469
+
470
+ def as_hashes(self) -> Hashes:
471
+ return Hashes({k: [v] for k, v in self._hashes.items()})
472
+
473
+ @property
474
+ def hash(self) -> Optional[str]:
475
+ return next(iter(self._hashes.values()), None)
476
+
477
+ @property
478
+ def hash_name(self) -> Optional[str]:
479
+ return next(iter(self._hashes), None)
480
+
481
+ @property
482
+ def show_url(self) -> str:
483
+ return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0])
484
+
485
+ @property
486
+ def is_file(self) -> bool:
487
+ return self.scheme == "file"
488
+
489
+ def is_existing_dir(self) -> bool:
490
+ return self.is_file and os.path.isdir(self.file_path)
491
+
492
+ @property
493
+ def is_wheel(self) -> bool:
494
+ return self.ext == WHEEL_EXTENSION
495
+
496
+ @property
497
+ def is_vcs(self) -> bool:
498
+ from pip._internal.vcs import vcs
499
+
500
+ return self.scheme in vcs.all_schemes
501
+
502
+ @property
503
+ def is_yanked(self) -> bool:
504
+ return self.yanked_reason is not None
505
+
506
+ @property
507
+ def has_hash(self) -> bool:
508
+ return bool(self._hashes)
509
+
510
+ def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
511
+ """
512
+ Return True if the link has a hash and it is allowed by `hashes`.
513
+ """
514
+ if hashes is None:
515
+ return False
516
+ return any(hashes.is_hash_allowed(k, v) for k, v in self._hashes.items())
517
+
518
+
519
+ class _CleanResult(NamedTuple):
520
+ """Convert link for equivalency check.
521
+
522
+ This is used in the resolver to check whether two URL-specified requirements
523
+ likely point to the same distribution and can be considered equivalent. This
524
+ equivalency logic avoids comparing URLs literally, which can be too strict
525
+ (e.g. "a=1&b=2" vs "b=2&a=1") and produce conflicts unexpecting to users.
526
+
527
+ Currently this does three things:
528
+
529
+ 1. Drop the basic auth part. This is technically wrong since a server can
530
+ serve different content based on auth, but if it does that, it is even
531
+ impossible to guarantee two URLs without auth are equivalent, since
532
+ the user can input different auth information when prompted. So the
533
+ practical solution is to assume the auth doesn't affect the response.
534
+ 2. Parse the query to avoid the ordering issue. Note that ordering under the
535
+ same key in the query are NOT cleaned; i.e. "a=1&a=2" and "a=2&a=1" are
536
+ still considered different.
537
+ 3. Explicitly drop most of the fragment part, except ``subdirectory=`` and
538
+ hash values, since it should have no impact the downloaded content. Note
539
+ that this drops the "egg=" part historically used to denote the requested
540
+ project (and extras), which is wrong in the strictest sense, but too many
541
+ people are supplying it inconsistently to cause superfluous resolution
542
+ conflicts, so we choose to also ignore them.
543
+ """
544
+
545
+ parsed: urllib.parse.SplitResult
546
+ query: Dict[str, List[str]]
547
+ subdirectory: str
548
+ hashes: Dict[str, str]
549
+
550
+
551
+ def _clean_link(link: Link) -> _CleanResult:
552
+ parsed = link._parsed_url
553
+ netloc = parsed.netloc.rsplit("@", 1)[-1]
554
+ # According to RFC 8089, an empty host in file: means localhost.
555
+ if parsed.scheme == "file" and not netloc:
556
+ netloc = "localhost"
557
+ fragment = urllib.parse.parse_qs(parsed.fragment)
558
+ if "egg" in fragment:
559
+ logger.debug("Ignoring egg= fragment in %s", link)
560
+ try:
561
+ # If there are multiple subdirectory values, use the first one.
562
+ # This matches the behavior of Link.subdirectory_fragment.
563
+ subdirectory = fragment["subdirectory"][0]
564
+ except (IndexError, KeyError):
565
+ subdirectory = ""
566
+ # If there are multiple hash values under the same algorithm, use the
567
+ # first one. This matches the behavior of Link.hash_value.
568
+ hashes = {k: fragment[k][0] for k in _SUPPORTED_HASHES if k in fragment}
569
+ return _CleanResult(
570
+ parsed=parsed._replace(netloc=netloc, query="", fragment=""),
571
+ query=urllib.parse.parse_qs(parsed.query),
572
+ subdirectory=subdirectory,
573
+ hashes=hashes,
574
+ )
575
+
576
+
577
+ @functools.lru_cache(maxsize=None)
578
+ def links_equivalent(link1: Link, link2: Link) -> bool:
579
+ return _clean_link(link1) == _clean_link(link2)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/scheme.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ For types associated with installation schemes.
3
+
4
+ For a general overview of available schemes and their context, see
5
+ https://docs.python.org/3/install/index.html#alternate-installation.
6
+ """
7
+
8
+
9
+ SCHEME_KEYS = ["platlib", "purelib", "headers", "scripts", "data"]
10
+
11
+
12
+ class Scheme:
13
+ """A Scheme holds paths which are used as the base directories for
14
+ artifacts associated with a Python package.
15
+ """
16
+
17
+ __slots__ = SCHEME_KEYS
18
+
19
+ def __init__(
20
+ self,
21
+ platlib: str,
22
+ purelib: str,
23
+ headers: str,
24
+ scripts: str,
25
+ data: str,
26
+ ) -> None:
27
+ self.platlib = platlib
28
+ self.purelib = purelib
29
+ self.headers = headers
30
+ self.scripts = scripts
31
+ self.data = data
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/search_scope.py ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import itertools
2
+ import logging
3
+ import os
4
+ import posixpath
5
+ import urllib.parse
6
+ from typing import List
7
+
8
+ from pip._vendor.packaging.utils import canonicalize_name
9
+
10
+ from pip._internal.models.index import PyPI
11
+ from pip._internal.utils.compat import has_tls
12
+ from pip._internal.utils.misc import normalize_path, redact_auth_from_url
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class SearchScope:
18
+
19
+ """
20
+ Encapsulates the locations that pip is configured to search.
21
+ """
22
+
23
+ __slots__ = ["find_links", "index_urls", "no_index"]
24
+
25
+ @classmethod
26
+ def create(
27
+ cls,
28
+ find_links: List[str],
29
+ index_urls: List[str],
30
+ no_index: bool,
31
+ ) -> "SearchScope":
32
+ """
33
+ Create a SearchScope object after normalizing the `find_links`.
34
+ """
35
+ # Build find_links. If an argument starts with ~, it may be
36
+ # a local file relative to a home directory. So try normalizing
37
+ # it and if it exists, use the normalized version.
38
+ # This is deliberately conservative - it might be fine just to
39
+ # blindly normalize anything starting with a ~...
40
+ built_find_links: List[str] = []
41
+ for link in find_links:
42
+ if link.startswith("~"):
43
+ new_link = normalize_path(link)
44
+ if os.path.exists(new_link):
45
+ link = new_link
46
+ built_find_links.append(link)
47
+
48
+ # If we don't have TLS enabled, then WARN if anyplace we're looking
49
+ # relies on TLS.
50
+ if not has_tls():
51
+ for link in itertools.chain(index_urls, built_find_links):
52
+ parsed = urllib.parse.urlparse(link)
53
+ if parsed.scheme == "https":
54
+ logger.warning(
55
+ "pip is configured with locations that require "
56
+ "TLS/SSL, however the ssl module in Python is not "
57
+ "available."
58
+ )
59
+ break
60
+
61
+ return cls(
62
+ find_links=built_find_links,
63
+ index_urls=index_urls,
64
+ no_index=no_index,
65
+ )
66
+
67
+ def __init__(
68
+ self,
69
+ find_links: List[str],
70
+ index_urls: List[str],
71
+ no_index: bool,
72
+ ) -> None:
73
+ self.find_links = find_links
74
+ self.index_urls = index_urls
75
+ self.no_index = no_index
76
+
77
+ def get_formatted_locations(self) -> str:
78
+ lines = []
79
+ redacted_index_urls = []
80
+ if self.index_urls and self.index_urls != [PyPI.simple_url]:
81
+ for url in self.index_urls:
82
+ redacted_index_url = redact_auth_from_url(url)
83
+
84
+ # Parse the URL
85
+ purl = urllib.parse.urlsplit(redacted_index_url)
86
+
87
+ # URL is generally invalid if scheme and netloc is missing
88
+ # there are issues with Python and URL parsing, so this test
89
+ # is a bit crude. See bpo-20271, bpo-23505. Python doesn't
90
+ # always parse invalid URLs correctly - it should raise
91
+ # exceptions for malformed URLs
92
+ if not purl.scheme and not purl.netloc:
93
+ logger.warning(
94
+ 'The index url "%s" seems invalid, please provide a scheme.',
95
+ redacted_index_url,
96
+ )
97
+
98
+ redacted_index_urls.append(redacted_index_url)
99
+
100
+ lines.append(
101
+ "Looking in indexes: {}".format(", ".join(redacted_index_urls))
102
+ )
103
+
104
+ if self.find_links:
105
+ lines.append(
106
+ "Looking in links: {}".format(
107
+ ", ".join(redact_auth_from_url(url) for url in self.find_links)
108
+ )
109
+ )
110
+ return "\n".join(lines)
111
+
112
+ def get_index_urls_locations(self, project_name: str) -> List[str]:
113
+ """Returns the locations found via self.index_urls
114
+
115
+ Checks the url_name on the main (first in the list) index and
116
+ use this url_name to produce all locations
117
+ """
118
+
119
+ def mkurl_pypi_url(url: str) -> str:
120
+ loc = posixpath.join(
121
+ url, urllib.parse.quote(canonicalize_name(project_name))
122
+ )
123
+ # For maximum compatibility with easy_install, ensure the path
124
+ # ends in a trailing slash. Although this isn't in the spec
125
+ # (and PyPI can handle it without the slash) some other index
126
+ # implementations might break if they relied on easy_install's
127
+ # behavior.
128
+ if not loc.endswith("/"):
129
+ loc = loc + "/"
130
+ return loc
131
+
132
+ return [mkurl_pypi_url(url) for url in self.index_urls]
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/selection_prefs.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional
2
+
3
+ from pip._internal.models.format_control import FormatControl
4
+
5
+
6
+ class SelectionPreferences:
7
+ """
8
+ Encapsulates the candidate selection preferences for downloading
9
+ and installing files.
10
+ """
11
+
12
+ __slots__ = [
13
+ "allow_yanked",
14
+ "allow_all_prereleases",
15
+ "format_control",
16
+ "prefer_binary",
17
+ "ignore_requires_python",
18
+ ]
19
+
20
+ # Don't include an allow_yanked default value to make sure each call
21
+ # site considers whether yanked releases are allowed. This also causes
22
+ # that decision to be made explicit in the calling code, which helps
23
+ # people when reading the code.
24
+ def __init__(
25
+ self,
26
+ allow_yanked: bool,
27
+ allow_all_prereleases: bool = False,
28
+ format_control: Optional[FormatControl] = None,
29
+ prefer_binary: bool = False,
30
+ ignore_requires_python: Optional[bool] = None,
31
+ ) -> None:
32
+ """Create a SelectionPreferences object.
33
+
34
+ :param allow_yanked: Whether files marked as yanked (in the sense
35
+ of PEP 592) are permitted to be candidates for install.
36
+ :param format_control: A FormatControl object or None. Used to control
37
+ the selection of source packages / binary packages when consulting
38
+ the index and links.
39
+ :param prefer_binary: Whether to prefer an old, but valid, binary
40
+ dist over a new source dist.
41
+ :param ignore_requires_python: Whether to ignore incompatible
42
+ "Requires-Python" values in links. Defaults to False.
43
+ """
44
+ if ignore_requires_python is None:
45
+ ignore_requires_python = False
46
+
47
+ self.allow_yanked = allow_yanked
48
+ self.allow_all_prereleases = allow_all_prereleases
49
+ self.format_control = format_control
50
+ self.prefer_binary = prefer_binary
51
+ self.ignore_requires_python = ignore_requires_python
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/target_python.py ADDED
@@ -0,0 +1,122 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ from typing import List, Optional, Set, Tuple
3
+
4
+ from pip._vendor.packaging.tags import Tag
5
+
6
+ from pip._internal.utils.compatibility_tags import get_supported, version_info_to_nodot
7
+ from pip._internal.utils.misc import normalize_version_info
8
+
9
+
10
+ class TargetPython:
11
+
12
+ """
13
+ Encapsulates the properties of a Python interpreter one is targeting
14
+ for a package install, download, etc.
15
+ """
16
+
17
+ __slots__ = [
18
+ "_given_py_version_info",
19
+ "abis",
20
+ "implementation",
21
+ "platforms",
22
+ "py_version",
23
+ "py_version_info",
24
+ "_valid_tags",
25
+ "_valid_tags_set",
26
+ ]
27
+
28
+ def __init__(
29
+ self,
30
+ platforms: Optional[List[str]] = None,
31
+ py_version_info: Optional[Tuple[int, ...]] = None,
32
+ abis: Optional[List[str]] = None,
33
+ implementation: Optional[str] = None,
34
+ ) -> None:
35
+ """
36
+ :param platforms: A list of strings or None. If None, searches for
37
+ packages that are supported by the current system. Otherwise, will
38
+ find packages that can be built on the platforms passed in. These
39
+ packages will only be downloaded for distribution: they will
40
+ not be built locally.
41
+ :param py_version_info: An optional tuple of ints representing the
42
+ Python version information to use (e.g. `sys.version_info[:3]`).
43
+ This can have length 1, 2, or 3 when provided.
44
+ :param abis: A list of strings or None. This is passed to
45
+ compatibility_tags.py's get_supported() function as is.
46
+ :param implementation: A string or None. This is passed to
47
+ compatibility_tags.py's get_supported() function as is.
48
+ """
49
+ # Store the given py_version_info for when we call get_supported().
50
+ self._given_py_version_info = py_version_info
51
+
52
+ if py_version_info is None:
53
+ py_version_info = sys.version_info[:3]
54
+ else:
55
+ py_version_info = normalize_version_info(py_version_info)
56
+
57
+ py_version = ".".join(map(str, py_version_info[:2]))
58
+
59
+ self.abis = abis
60
+ self.implementation = implementation
61
+ self.platforms = platforms
62
+ self.py_version = py_version
63
+ self.py_version_info = py_version_info
64
+
65
+ # This is used to cache the return value of get_(un)sorted_tags.
66
+ self._valid_tags: Optional[List[Tag]] = None
67
+ self._valid_tags_set: Optional[Set[Tag]] = None
68
+
69
+ def format_given(self) -> str:
70
+ """
71
+ Format the given, non-None attributes for display.
72
+ """
73
+ display_version = None
74
+ if self._given_py_version_info is not None:
75
+ display_version = ".".join(
76
+ str(part) for part in self._given_py_version_info
77
+ )
78
+
79
+ key_values = [
80
+ ("platforms", self.platforms),
81
+ ("version_info", display_version),
82
+ ("abis", self.abis),
83
+ ("implementation", self.implementation),
84
+ ]
85
+ return " ".join(
86
+ f"{key}={value!r}" for key, value in key_values if value is not None
87
+ )
88
+
89
+ def get_sorted_tags(self) -> List[Tag]:
90
+ """
91
+ Return the supported PEP 425 tags to check wheel candidates against.
92
+
93
+ The tags are returned in order of preference (most preferred first).
94
+ """
95
+ if self._valid_tags is None:
96
+ # Pass versions=None if no py_version_info was given since
97
+ # versions=None uses special default logic.
98
+ py_version_info = self._given_py_version_info
99
+ if py_version_info is None:
100
+ version = None
101
+ else:
102
+ version = version_info_to_nodot(py_version_info)
103
+
104
+ tags = get_supported(
105
+ version=version,
106
+ platforms=self.platforms,
107
+ abis=self.abis,
108
+ impl=self.implementation,
109
+ )
110
+ self._valid_tags = tags
111
+
112
+ return self._valid_tags
113
+
114
+ def get_unsorted_tags(self) -> Set[Tag]:
115
+ """Exactly the same as get_sorted_tags, but returns a set.
116
+
117
+ This is important for performance.
118
+ """
119
+ if self._valid_tags_set is None:
120
+ self._valid_tags_set = set(self.get_sorted_tags())
121
+
122
+ return self._valid_tags_set
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/wheel.py ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Represents a wheel file and provides access to the various parts of the
2
+ name that have meaning.
3
+ """
4
+ import re
5
+ from typing import Dict, Iterable, List
6
+
7
+ from pip._vendor.packaging.tags import Tag
8
+
9
+ from pip._internal.exceptions import InvalidWheelFilename
10
+
11
+
12
+ class Wheel:
13
+ """A wheel file"""
14
+
15
+ wheel_file_re = re.compile(
16
+ r"""^(?P<namever>(?P<name>[^\s-]+?)-(?P<ver>[^\s-]*?))
17
+ ((-(?P<build>\d[^-]*?))?-(?P<pyver>[^\s-]+?)-(?P<abi>[^\s-]+?)-(?P<plat>[^\s-]+?)
18
+ \.whl|\.dist-info)$""",
19
+ re.VERBOSE,
20
+ )
21
+
22
+ def __init__(self, filename: str) -> None:
23
+ """
24
+ :raises InvalidWheelFilename: when the filename is invalid for a wheel
25
+ """
26
+ wheel_info = self.wheel_file_re.match(filename)
27
+ if not wheel_info:
28
+ raise InvalidWheelFilename(f"{filename} is not a valid wheel filename.")
29
+ self.filename = filename
30
+ self.name = wheel_info.group("name").replace("_", "-")
31
+ # we'll assume "_" means "-" due to wheel naming scheme
32
+ # (https://github.com/pypa/pip/issues/1150)
33
+ self.version = wheel_info.group("ver").replace("_", "-")
34
+ self.build_tag = wheel_info.group("build")
35
+ self.pyversions = wheel_info.group("pyver").split(".")
36
+ self.abis = wheel_info.group("abi").split(".")
37
+ self.plats = wheel_info.group("plat").split(".")
38
+
39
+ # All the tag combinations from this file
40
+ self.file_tags = {
41
+ Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats
42
+ }
43
+
44
+ def get_formatted_file_tags(self) -> List[str]:
45
+ """Return the wheel's tags as a sorted list of strings."""
46
+ return sorted(str(tag) for tag in self.file_tags)
47
+
48
+ def support_index_min(self, tags: List[Tag]) -> int:
49
+ """Return the lowest index that one of the wheel's file_tag combinations
50
+ achieves in the given list of supported tags.
51
+
52
+ For example, if there are 8 supported tags and one of the file tags
53
+ is first in the list, then return 0.
54
+
55
+ :param tags: the PEP 425 tags to check the wheel against, in order
56
+ with most preferred first.
57
+
58
+ :raises ValueError: If none of the wheel's file tags match one of
59
+ the supported tags.
60
+ """
61
+ try:
62
+ return next(i for i, t in enumerate(tags) if t in self.file_tags)
63
+ except StopIteration:
64
+ raise ValueError()
65
+
66
+ def find_most_preferred_tag(
67
+ self, tags: List[Tag], tag_to_priority: Dict[Tag, int]
68
+ ) -> int:
69
+ """Return the priority of the most preferred tag that one of the wheel's file
70
+ tag combinations achieves in the given list of supported tags using the given
71
+ tag_to_priority mapping, where lower priorities are more-preferred.
72
+
73
+ This is used in place of support_index_min in some cases in order to avoid
74
+ an expensive linear scan of a large list of tags.
75
+
76
+ :param tags: the PEP 425 tags to check the wheel against.
77
+ :param tag_to_priority: a mapping from tag to priority of that tag, where
78
+ lower is more preferred.
79
+
80
+ :raises ValueError: If none of the wheel's file tags match one of
81
+ the supported tags.
82
+ """
83
+ return min(
84
+ tag_to_priority[tag] for tag in self.file_tags if tag in tag_to_priority
85
+ )
86
+
87
+ def supported(self, tags: Iterable[Tag]) -> bool:
88
+ """Return whether the wheel is compatible with one of the given tags.
89
+
90
+ :param tags: the PEP 425 tags to check the wheel against.
91
+ """
92
+ return not self.file_tags.isdisjoint(tags)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/auth.py ADDED
@@ -0,0 +1,561 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Network Authentication Helpers
2
+
3
+ Contains interface (MultiDomainBasicAuth) and associated glue code for
4
+ providing credentials in the context of network requests.
5
+ """
6
+ import logging
7
+ import os
8
+ import shutil
9
+ import subprocess
10
+ import sysconfig
11
+ import typing
12
+ import urllib.parse
13
+ from abc import ABC, abstractmethod
14
+ from functools import lru_cache
15
+ from os.path import commonprefix
16
+ from pathlib import Path
17
+ from typing import Any, Dict, List, NamedTuple, Optional, Tuple
18
+
19
+ from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
20
+ from pip._vendor.requests.models import Request, Response
21
+ from pip._vendor.requests.utils import get_netrc_auth
22
+
23
+ from pip._internal.utils.logging import getLogger
24
+ from pip._internal.utils.misc import (
25
+ ask,
26
+ ask_input,
27
+ ask_password,
28
+ remove_auth_from_url,
29
+ split_auth_netloc_from_url,
30
+ )
31
+ from pip._internal.vcs.versioncontrol import AuthInfo
32
+
33
+ logger = getLogger(__name__)
34
+
35
+ KEYRING_DISABLED = False
36
+
37
+
38
+ class Credentials(NamedTuple):
39
+ url: str
40
+ username: str
41
+ password: str
42
+
43
+
44
+ class KeyRingBaseProvider(ABC):
45
+ """Keyring base provider interface"""
46
+
47
+ has_keyring: bool
48
+
49
+ @abstractmethod
50
+ def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
51
+ ...
52
+
53
+ @abstractmethod
54
+ def save_auth_info(self, url: str, username: str, password: str) -> None:
55
+ ...
56
+
57
+
58
+ class KeyRingNullProvider(KeyRingBaseProvider):
59
+ """Keyring null provider"""
60
+
61
+ has_keyring = False
62
+
63
+ def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
64
+ return None
65
+
66
+ def save_auth_info(self, url: str, username: str, password: str) -> None:
67
+ return None
68
+
69
+
70
+ class KeyRingPythonProvider(KeyRingBaseProvider):
71
+ """Keyring interface which uses locally imported `keyring`"""
72
+
73
+ has_keyring = True
74
+
75
+ def __init__(self) -> None:
76
+ import keyring
77
+
78
+ self.keyring = keyring
79
+
80
+ def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
81
+ # Support keyring's get_credential interface which supports getting
82
+ # credentials without a username. This is only available for
83
+ # keyring>=15.2.0.
84
+ if hasattr(self.keyring, "get_credential"):
85
+ logger.debug("Getting credentials from keyring for %s", url)
86
+ cred = self.keyring.get_credential(url, username)
87
+ if cred is not None:
88
+ return cred.username, cred.password
89
+ return None
90
+
91
+ if username is not None:
92
+ logger.debug("Getting password from keyring for %s", url)
93
+ password = self.keyring.get_password(url, username)
94
+ if password:
95
+ return username, password
96
+ return None
97
+
98
+ def save_auth_info(self, url: str, username: str, password: str) -> None:
99
+ self.keyring.set_password(url, username, password)
100
+
101
+
102
+ class KeyRingCliProvider(KeyRingBaseProvider):
103
+ """Provider which uses `keyring` cli
104
+
105
+ Instead of calling the keyring package installed alongside pip
106
+ we call keyring on the command line which will enable pip to
107
+ use which ever installation of keyring is available first in
108
+ PATH.
109
+ """
110
+
111
+ has_keyring = True
112
+
113
+ def __init__(self, cmd: str) -> None:
114
+ self.keyring = cmd
115
+
116
+ def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
117
+ # This is the default implementation of keyring.get_credential
118
+ # https://github.com/jaraco/keyring/blob/97689324abcf01bd1793d49063e7ca01e03d7d07/keyring/backend.py#L134-L139
119
+ if username is not None:
120
+ password = self._get_password(url, username)
121
+ if password is not None:
122
+ return username, password
123
+ return None
124
+
125
+ def save_auth_info(self, url: str, username: str, password: str) -> None:
126
+ return self._set_password(url, username, password)
127
+
128
+ def _get_password(self, service_name: str, username: str) -> Optional[str]:
129
+ """Mirror the implementation of keyring.get_password using cli"""
130
+ if self.keyring is None:
131
+ return None
132
+
133
+ cmd = [self.keyring, "get", service_name, username]
134
+ env = os.environ.copy()
135
+ env["PYTHONIOENCODING"] = "utf-8"
136
+ res = subprocess.run(
137
+ cmd,
138
+ stdin=subprocess.DEVNULL,
139
+ stdout=subprocess.PIPE,
140
+ env=env,
141
+ )
142
+ if res.returncode:
143
+ return None
144
+ return res.stdout.decode("utf-8").strip(os.linesep)
145
+
146
+ def _set_password(self, service_name: str, username: str, password: str) -> None:
147
+ """Mirror the implementation of keyring.set_password using cli"""
148
+ if self.keyring is None:
149
+ return None
150
+ env = os.environ.copy()
151
+ env["PYTHONIOENCODING"] = "utf-8"
152
+ subprocess.run(
153
+ [self.keyring, "set", service_name, username],
154
+ input=f"{password}{os.linesep}".encode("utf-8"),
155
+ env=env,
156
+ check=True,
157
+ )
158
+ return None
159
+
160
+
161
+ @lru_cache(maxsize=None)
162
+ def get_keyring_provider(provider: str) -> KeyRingBaseProvider:
163
+ logger.verbose("Keyring provider requested: %s", provider)
164
+
165
+ # keyring has previously failed and been disabled
166
+ if KEYRING_DISABLED:
167
+ provider = "disabled"
168
+ if provider in ["import", "auto"]:
169
+ try:
170
+ impl = KeyRingPythonProvider()
171
+ logger.verbose("Keyring provider set: import")
172
+ return impl
173
+ except ImportError:
174
+ pass
175
+ except Exception as exc:
176
+ # In the event of an unexpected exception
177
+ # we should warn the user
178
+ msg = "Installed copy of keyring fails with exception %s"
179
+ if provider == "auto":
180
+ msg = msg + ", trying to find a keyring executable as a fallback"
181
+ logger.warning(msg, exc, exc_info=logger.isEnabledFor(logging.DEBUG))
182
+ if provider in ["subprocess", "auto"]:
183
+ cli = shutil.which("keyring")
184
+ if cli and cli.startswith(sysconfig.get_path("scripts")):
185
+ # all code within this function is stolen from shutil.which implementation
186
+ @typing.no_type_check
187
+ def PATH_as_shutil_which_determines_it() -> str:
188
+ path = os.environ.get("PATH", None)
189
+ if path is None:
190
+ try:
191
+ path = os.confstr("CS_PATH")
192
+ except (AttributeError, ValueError):
193
+ # os.confstr() or CS_PATH is not available
194
+ path = os.defpath
195
+ # bpo-35755: Don't use os.defpath if the PATH environment variable is
196
+ # set to an empty string
197
+
198
+ return path
199
+
200
+ scripts = Path(sysconfig.get_path("scripts"))
201
+
202
+ paths = []
203
+ for path in PATH_as_shutil_which_determines_it().split(os.pathsep):
204
+ p = Path(path)
205
+ try:
206
+ if not p.samefile(scripts):
207
+ paths.append(path)
208
+ except FileNotFoundError:
209
+ pass
210
+
211
+ path = os.pathsep.join(paths)
212
+
213
+ cli = shutil.which("keyring", path=path)
214
+
215
+ if cli:
216
+ logger.verbose("Keyring provider set: subprocess with executable %s", cli)
217
+ return KeyRingCliProvider(cli)
218
+
219
+ logger.verbose("Keyring provider set: disabled")
220
+ return KeyRingNullProvider()
221
+
222
+
223
+ class MultiDomainBasicAuth(AuthBase):
224
+ def __init__(
225
+ self,
226
+ prompting: bool = True,
227
+ index_urls: Optional[List[str]] = None,
228
+ keyring_provider: str = "auto",
229
+ ) -> None:
230
+ self.prompting = prompting
231
+ self.index_urls = index_urls
232
+ self.keyring_provider = keyring_provider # type: ignore[assignment]
233
+ self.passwords: Dict[str, AuthInfo] = {}
234
+ # When the user is prompted to enter credentials and keyring is
235
+ # available, we will offer to save them. If the user accepts,
236
+ # this value is set to the credentials they entered. After the
237
+ # request authenticates, the caller should call
238
+ # ``save_credentials`` to save these.
239
+ self._credentials_to_save: Optional[Credentials] = None
240
+
241
+ @property
242
+ def keyring_provider(self) -> KeyRingBaseProvider:
243
+ return get_keyring_provider(self._keyring_provider)
244
+
245
+ @keyring_provider.setter
246
+ def keyring_provider(self, provider: str) -> None:
247
+ # The free function get_keyring_provider has been decorated with
248
+ # functools.cache. If an exception occurs in get_keyring_auth that
249
+ # cache will be cleared and keyring disabled, take that into account
250
+ # if you want to remove this indirection.
251
+ self._keyring_provider = provider
252
+
253
+ @property
254
+ def use_keyring(self) -> bool:
255
+ # We won't use keyring when --no-input is passed unless
256
+ # a specific provider is requested because it might require
257
+ # user interaction
258
+ return self.prompting or self._keyring_provider not in ["auto", "disabled"]
259
+
260
+ def _get_keyring_auth(
261
+ self,
262
+ url: Optional[str],
263
+ username: Optional[str],
264
+ ) -> Optional[AuthInfo]:
265
+ """Return the tuple auth for a given url from keyring."""
266
+ # Do nothing if no url was provided
267
+ if not url:
268
+ return None
269
+
270
+ try:
271
+ return self.keyring_provider.get_auth_info(url, username)
272
+ except Exception as exc:
273
+ logger.warning(
274
+ "Keyring is skipped due to an exception: %s",
275
+ str(exc),
276
+ )
277
+ global KEYRING_DISABLED
278
+ KEYRING_DISABLED = True
279
+ get_keyring_provider.cache_clear()
280
+ return None
281
+
282
+ def _get_index_url(self, url: str) -> Optional[str]:
283
+ """Return the original index URL matching the requested URL.
284
+
285
+ Cached or dynamically generated credentials may work against
286
+ the original index URL rather than just the netloc.
287
+
288
+ The provided url should have had its username and password
289
+ removed already. If the original index url had credentials then
290
+ they will be included in the return value.
291
+
292
+ Returns None if no matching index was found, or if --no-index
293
+ was specified by the user.
294
+ """
295
+ if not url or not self.index_urls:
296
+ return None
297
+
298
+ url = remove_auth_from_url(url).rstrip("/") + "/"
299
+ parsed_url = urllib.parse.urlsplit(url)
300
+
301
+ candidates = []
302
+
303
+ for index in self.index_urls:
304
+ index = index.rstrip("/") + "/"
305
+ parsed_index = urllib.parse.urlsplit(remove_auth_from_url(index))
306
+ if parsed_url == parsed_index:
307
+ return index
308
+
309
+ if parsed_url.netloc != parsed_index.netloc:
310
+ continue
311
+
312
+ candidate = urllib.parse.urlsplit(index)
313
+ candidates.append(candidate)
314
+
315
+ if not candidates:
316
+ return None
317
+
318
+ candidates.sort(
319
+ reverse=True,
320
+ key=lambda candidate: commonprefix(
321
+ [
322
+ parsed_url.path,
323
+ candidate.path,
324
+ ]
325
+ ).rfind("/"),
326
+ )
327
+
328
+ return urllib.parse.urlunsplit(candidates[0])
329
+
330
+ def _get_new_credentials(
331
+ self,
332
+ original_url: str,
333
+ *,
334
+ allow_netrc: bool = True,
335
+ allow_keyring: bool = False,
336
+ ) -> AuthInfo:
337
+ """Find and return credentials for the specified URL."""
338
+ # Split the credentials and netloc from the url.
339
+ url, netloc, url_user_password = split_auth_netloc_from_url(
340
+ original_url,
341
+ )
342
+
343
+ # Start with the credentials embedded in the url
344
+ username, password = url_user_password
345
+ if username is not None and password is not None:
346
+ logger.debug("Found credentials in url for %s", netloc)
347
+ return url_user_password
348
+
349
+ # Find a matching index url for this request
350
+ index_url = self._get_index_url(url)
351
+ if index_url:
352
+ # Split the credentials from the url.
353
+ index_info = split_auth_netloc_from_url(index_url)
354
+ if index_info:
355
+ index_url, _, index_url_user_password = index_info
356
+ logger.debug("Found index url %s", index_url)
357
+
358
+ # If an index URL was found, try its embedded credentials
359
+ if index_url and index_url_user_password[0] is not None:
360
+ username, password = index_url_user_password
361
+ if username is not None and password is not None:
362
+ logger.debug("Found credentials in index url for %s", netloc)
363
+ return index_url_user_password
364
+
365
+ # Get creds from netrc if we still don't have them
366
+ if allow_netrc:
367
+ netrc_auth = get_netrc_auth(original_url)
368
+ if netrc_auth:
369
+ logger.debug("Found credentials in netrc for %s", netloc)
370
+ return netrc_auth
371
+
372
+ # If we don't have a password and keyring is available, use it.
373
+ if allow_keyring:
374
+ # The index url is more specific than the netloc, so try it first
375
+ # fmt: off
376
+ kr_auth = (
377
+ self._get_keyring_auth(index_url, username) or
378
+ self._get_keyring_auth(netloc, username)
379
+ )
380
+ # fmt: on
381
+ if kr_auth:
382
+ logger.debug("Found credentials in keyring for %s", netloc)
383
+ return kr_auth
384
+
385
+ return username, password
386
+
387
+ def _get_url_and_credentials(
388
+ self, original_url: str
389
+ ) -> Tuple[str, Optional[str], Optional[str]]:
390
+ """Return the credentials to use for the provided URL.
391
+
392
+ If allowed, netrc and keyring may be used to obtain the
393
+ correct credentials.
394
+
395
+ Returns (url_without_credentials, username, password). Note
396
+ that even if the original URL contains credentials, this
397
+ function may return a different username and password.
398
+ """
399
+ url, netloc, _ = split_auth_netloc_from_url(original_url)
400
+
401
+ # Try to get credentials from original url
402
+ username, password = self._get_new_credentials(original_url)
403
+
404
+ # If credentials not found, use any stored credentials for this netloc.
405
+ # Do this if either the username or the password is missing.
406
+ # This accounts for the situation in which the user has specified
407
+ # the username in the index url, but the password comes from keyring.
408
+ if (username is None or password is None) and netloc in self.passwords:
409
+ un, pw = self.passwords[netloc]
410
+ # It is possible that the cached credentials are for a different username,
411
+ # in which case the cache should be ignored.
412
+ if username is None or username == un:
413
+ username, password = un, pw
414
+
415
+ if username is not None or password is not None:
416
+ # Convert the username and password if they're None, so that
417
+ # this netloc will show up as "cached" in the conditional above.
418
+ # Further, HTTPBasicAuth doesn't accept None, so it makes sense to
419
+ # cache the value that is going to be used.
420
+ username = username or ""
421
+ password = password or ""
422
+
423
+ # Store any acquired credentials.
424
+ self.passwords[netloc] = (username, password)
425
+
426
+ assert (
427
+ # Credentials were found
428
+ (username is not None and password is not None)
429
+ # Credentials were not found
430
+ or (username is None and password is None)
431
+ ), f"Could not load credentials from url: {original_url}"
432
+
433
+ return url, username, password
434
+
435
+ def __call__(self, req: Request) -> Request:
436
+ # Get credentials for this request
437
+ url, username, password = self._get_url_and_credentials(req.url)
438
+
439
+ # Set the url of the request to the url without any credentials
440
+ req.url = url
441
+
442
+ if username is not None and password is not None:
443
+ # Send the basic auth with this request
444
+ req = HTTPBasicAuth(username, password)(req)
445
+
446
+ # Attach a hook to handle 401 responses
447
+ req.register_hook("response", self.handle_401)
448
+
449
+ return req
450
+
451
+ # Factored out to allow for easy patching in tests
452
+ def _prompt_for_password(
453
+ self, netloc: str
454
+ ) -> Tuple[Optional[str], Optional[str], bool]:
455
+ username = ask_input(f"User for {netloc}: ") if self.prompting else None
456
+ if not username:
457
+ return None, None, False
458
+ if self.use_keyring:
459
+ auth = self._get_keyring_auth(netloc, username)
460
+ if auth and auth[0] is not None and auth[1] is not None:
461
+ return auth[0], auth[1], False
462
+ password = ask_password("Password: ")
463
+ return username, password, True
464
+
465
+ # Factored out to allow for easy patching in tests
466
+ def _should_save_password_to_keyring(self) -> bool:
467
+ if (
468
+ not self.prompting
469
+ or not self.use_keyring
470
+ or not self.keyring_provider.has_keyring
471
+ ):
472
+ return False
473
+ return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
474
+
475
+ def handle_401(self, resp: Response, **kwargs: Any) -> Response:
476
+ # We only care about 401 responses, anything else we want to just
477
+ # pass through the actual response
478
+ if resp.status_code != 401:
479
+ return resp
480
+
481
+ username, password = None, None
482
+
483
+ # Query the keyring for credentials:
484
+ if self.use_keyring:
485
+ username, password = self._get_new_credentials(
486
+ resp.url,
487
+ allow_netrc=False,
488
+ allow_keyring=True,
489
+ )
490
+
491
+ # We are not able to prompt the user so simply return the response
492
+ if not self.prompting and not username and not password:
493
+ return resp
494
+
495
+ parsed = urllib.parse.urlparse(resp.url)
496
+
497
+ # Prompt the user for a new username and password
498
+ save = False
499
+ if not username and not password:
500
+ username, password, save = self._prompt_for_password(parsed.netloc)
501
+
502
+ # Store the new username and password to use for future requests
503
+ self._credentials_to_save = None
504
+ if username is not None and password is not None:
505
+ self.passwords[parsed.netloc] = (username, password)
506
+
507
+ # Prompt to save the password to keyring
508
+ if save and self._should_save_password_to_keyring():
509
+ self._credentials_to_save = Credentials(
510
+ url=parsed.netloc,
511
+ username=username,
512
+ password=password,
513
+ )
514
+
515
+ # Consume content and release the original connection to allow our new
516
+ # request to reuse the same one.
517
+ # The result of the assignment isn't used, it's just needed to consume
518
+ # the content.
519
+ _ = resp.content
520
+ resp.raw.release_conn()
521
+
522
+ # Add our new username and password to the request
523
+ req = HTTPBasicAuth(username or "", password or "")(resp.request)
524
+ req.register_hook("response", self.warn_on_401)
525
+
526
+ # On successful request, save the credentials that were used to
527
+ # keyring. (Note that if the user responded "no" above, this member
528
+ # is not set and nothing will be saved.)
529
+ if self._credentials_to_save:
530
+ req.register_hook("response", self.save_credentials)
531
+
532
+ # Send our new request
533
+ new_resp = resp.connection.send(req, **kwargs)
534
+ new_resp.history.append(resp)
535
+
536
+ return new_resp
537
+
538
+ def warn_on_401(self, resp: Response, **kwargs: Any) -> None:
539
+ """Response callback to warn about incorrect credentials."""
540
+ if resp.status_code == 401:
541
+ logger.warning(
542
+ "401 Error, Credentials not correct for %s",
543
+ resp.request.url,
544
+ )
545
+
546
+ def save_credentials(self, resp: Response, **kwargs: Any) -> None:
547
+ """Response callback to save credentials on success."""
548
+ assert (
549
+ self.keyring_provider.has_keyring
550
+ ), "should never reach here without keyring"
551
+
552
+ creds = self._credentials_to_save
553
+ self._credentials_to_save = None
554
+ if creds and resp.status_code < 400:
555
+ try:
556
+ logger.info("Saving credentials to keyring")
557
+ self.keyring_provider.save_auth_info(
558
+ creds.url, creds.username, creds.password
559
+ )
560
+ except Exception:
561
+ logger.exception("Failed to save credentials")
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/network/cache.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """HTTP cache implementation.
2
+ """
3
+
4
+ import os
5
+ from contextlib import contextmanager
6
+ from datetime import datetime
7
+ from typing import BinaryIO, Generator, Optional, Union
8
+
9
+ from pip._vendor.cachecontrol.cache import SeparateBodyBaseCache
10
+ from pip._vendor.cachecontrol.caches import SeparateBodyFileCache
11
+ from pip._vendor.requests.models import Response
12
+
13
+ from pip._internal.utils.filesystem import adjacent_tmp_file, replace
14
+ from pip._internal.utils.misc import ensure_dir
15
+
16
+
17
+ def is_from_cache(response: Response) -> bool:
18
+ return getattr(response, "from_cache", False)
19
+
20
+
21
+ @contextmanager
22
+ def suppressed_cache_errors() -> Generator[None, None, None]:
23
+ """If we can't access the cache then we can just skip caching and process
24
+ requests as if caching wasn't enabled.
25
+ """
26
+ try:
27
+ yield
28
+ except OSError:
29
+ pass
30
+
31
+
32
+ class SafeFileCache(SeparateBodyBaseCache):
33
+ """
34
+ A file based cache which is safe to use even when the target directory may
35
+ not be accessible or writable.
36
+
37
+ There is a race condition when two processes try to write and/or read the
38
+ same entry at the same time, since each entry consists of two separate
39
+ files (https://github.com/psf/cachecontrol/issues/324). We therefore have
40
+ additional logic that makes sure that both files to be present before
41
+ returning an entry; this fixes the read side of the race condition.
42
+
43
+ For the write side, we assume that the server will only ever return the
44
+ same data for the same URL, which ought to be the case for files pip is
45
+ downloading. PyPI does not have a mechanism to swap out a wheel for
46
+ another wheel, for example. If this assumption is not true, the
47
+ CacheControl issue will need to be fixed.
48
+ """
49
+
50
+ def __init__(self, directory: str) -> None:
51
+ assert directory is not None, "Cache directory must not be None."
52
+ super().__init__()
53
+ self.directory = directory
54
+
55
+ def _get_cache_path(self, name: str) -> str:
56
+ # From cachecontrol.caches.file_cache.FileCache._fn, brought into our
57
+ # class for backwards-compatibility and to avoid using a non-public
58
+ # method.
59
+ hashed = SeparateBodyFileCache.encode(name)
60
+ parts = list(hashed[:5]) + [hashed]
61
+ return os.path.join(self.directory, *parts)
62
+
63
+ def get(self, key: str) -> Optional[bytes]:
64
+ # The cache entry is only valid if both metadata and body exist.
65
+ metadata_path = self._get_cache_path(key)
66
+ body_path = metadata_path + ".body"
67
+ if not (os.path.exists(metadata_path) and os.path.exists(body_path)):
68
+ return None
69
+ with suppressed_cache_errors():
70
+ with open(metadata_path, "rb") as f:
71
+ return f.read()
72
+
73
+ def _write(self, path: str, data: bytes) -> None:
74
+ with suppressed_cache_errors():
75
+ ensure_dir(os.path.dirname(path))
76
+
77
+ with adjacent_tmp_file(path) as f:
78
+ f.write(data)
79
+
80
+ replace(f.name, path)
81
+
82
+ def set(
83
+ self, key: str, value: bytes, expires: Union[int, datetime, None] = None
84
+ ) -> None:
85
+ path = self._get_cache_path(key)
86
+ self._write(path, value)
87
+
88
+ def delete(self, key: str) -> None:
89
+ path = self._get_cache_path(key)
90
+ with suppressed_cache_errors():
91
+ os.remove(path)
92
+ with suppressed_cache_errors():
93
+ os.remove(path + ".body")
94
+
95
+ def get_body(self, key: str) -> Optional[BinaryIO]:
96
+ # The cache entry is only valid if both metadata and body exist.
97
+ metadata_path = self._get_cache_path(key)
98
+ body_path = metadata_path + ".body"
99
+ if not (os.path.exists(metadata_path) and os.path.exists(body_path)):
100
+ return None
101
+ with suppressed_cache_errors():
102
+ return open(body_path, "rb")
103
+
104
+ def set_body(self, key: str, body: bytes) -> None:
105
+ path = self._get_cache_path(key) + ".body"
106
+ self._write(path, body)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/__init__.py ADDED
File without changes
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/check.py ADDED
@@ -0,0 +1,187 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Validation of dependencies of packages
2
+ """
3
+
4
+ import logging
5
+ from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple
6
+
7
+ from pip._vendor.packaging.requirements import Requirement
8
+ from pip._vendor.packaging.specifiers import LegacySpecifier
9
+ from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
10
+ from pip._vendor.packaging.version import LegacyVersion
11
+
12
+ from pip._internal.distributions import make_distribution_for_install_requirement
13
+ from pip._internal.metadata import get_default_environment
14
+ from pip._internal.metadata.base import DistributionVersion
15
+ from pip._internal.req.req_install import InstallRequirement
16
+ from pip._internal.utils.deprecation import deprecated
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ class PackageDetails(NamedTuple):
22
+ version: DistributionVersion
23
+ dependencies: List[Requirement]
24
+
25
+
26
+ # Shorthands
27
+ PackageSet = Dict[NormalizedName, PackageDetails]
28
+ Missing = Tuple[NormalizedName, Requirement]
29
+ Conflicting = Tuple[NormalizedName, DistributionVersion, Requirement]
30
+
31
+ MissingDict = Dict[NormalizedName, List[Missing]]
32
+ ConflictingDict = Dict[NormalizedName, List[Conflicting]]
33
+ CheckResult = Tuple[MissingDict, ConflictingDict]
34
+ ConflictDetails = Tuple[PackageSet, CheckResult]
35
+
36
+
37
+ def create_package_set_from_installed() -> Tuple[PackageSet, bool]:
38
+ """Converts a list of distributions into a PackageSet."""
39
+ package_set = {}
40
+ problems = False
41
+ env = get_default_environment()
42
+ for dist in env.iter_installed_distributions(local_only=False, skip=()):
43
+ name = dist.canonical_name
44
+ try:
45
+ dependencies = list(dist.iter_dependencies())
46
+ package_set[name] = PackageDetails(dist.version, dependencies)
47
+ except (OSError, ValueError) as e:
48
+ # Don't crash on unreadable or broken metadata.
49
+ logger.warning("Error parsing requirements for %s: %s", name, e)
50
+ problems = True
51
+ return package_set, problems
52
+
53
+
54
+ def check_package_set(
55
+ package_set: PackageSet, should_ignore: Optional[Callable[[str], bool]] = None
56
+ ) -> CheckResult:
57
+ """Check if a package set is consistent
58
+
59
+ If should_ignore is passed, it should be a callable that takes a
60
+ package name and returns a boolean.
61
+ """
62
+
63
+ warn_legacy_versions_and_specifiers(package_set)
64
+
65
+ missing = {}
66
+ conflicting = {}
67
+
68
+ for package_name, package_detail in package_set.items():
69
+ # Info about dependencies of package_name
70
+ missing_deps: Set[Missing] = set()
71
+ conflicting_deps: Set[Conflicting] = set()
72
+
73
+ if should_ignore and should_ignore(package_name):
74
+ continue
75
+
76
+ for req in package_detail.dependencies:
77
+ name = canonicalize_name(req.name)
78
+
79
+ # Check if it's missing
80
+ if name not in package_set:
81
+ missed = True
82
+ if req.marker is not None:
83
+ missed = req.marker.evaluate({"extra": ""})
84
+ if missed:
85
+ missing_deps.add((name, req))
86
+ continue
87
+
88
+ # Check if there's a conflict
89
+ version = package_set[name].version
90
+ if not req.specifier.contains(version, prereleases=True):
91
+ conflicting_deps.add((name, version, req))
92
+
93
+ if missing_deps:
94
+ missing[package_name] = sorted(missing_deps, key=str)
95
+ if conflicting_deps:
96
+ conflicting[package_name] = sorted(conflicting_deps, key=str)
97
+
98
+ return missing, conflicting
99
+
100
+
101
+ def check_install_conflicts(to_install: List[InstallRequirement]) -> ConflictDetails:
102
+ """For checking if the dependency graph would be consistent after \
103
+ installing given requirements
104
+ """
105
+ # Start from the current state
106
+ package_set, _ = create_package_set_from_installed()
107
+ # Install packages
108
+ would_be_installed = _simulate_installation_of(to_install, package_set)
109
+
110
+ # Only warn about directly-dependent packages; create a whitelist of them
111
+ whitelist = _create_whitelist(would_be_installed, package_set)
112
+
113
+ return (
114
+ package_set,
115
+ check_package_set(
116
+ package_set, should_ignore=lambda name: name not in whitelist
117
+ ),
118
+ )
119
+
120
+
121
+ def _simulate_installation_of(
122
+ to_install: List[InstallRequirement], package_set: PackageSet
123
+ ) -> Set[NormalizedName]:
124
+ """Computes the version of packages after installing to_install."""
125
+ # Keep track of packages that were installed
126
+ installed = set()
127
+
128
+ # Modify it as installing requirement_set would (assuming no errors)
129
+ for inst_req in to_install:
130
+ abstract_dist = make_distribution_for_install_requirement(inst_req)
131
+ dist = abstract_dist.get_metadata_distribution()
132
+ name = dist.canonical_name
133
+ package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies()))
134
+
135
+ installed.add(name)
136
+
137
+ return installed
138
+
139
+
140
+ def _create_whitelist(
141
+ would_be_installed: Set[NormalizedName], package_set: PackageSet
142
+ ) -> Set[NormalizedName]:
143
+ packages_affected = set(would_be_installed)
144
+
145
+ for package_name in package_set:
146
+ if package_name in packages_affected:
147
+ continue
148
+
149
+ for req in package_set[package_name].dependencies:
150
+ if canonicalize_name(req.name) in packages_affected:
151
+ packages_affected.add(package_name)
152
+ break
153
+
154
+ return packages_affected
155
+
156
+
157
+ def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None:
158
+ for project_name, package_details in package_set.items():
159
+ if isinstance(package_details.version, LegacyVersion):
160
+ deprecated(
161
+ reason=(
162
+ f"{project_name} {package_details.version} "
163
+ f"has a non-standard version number."
164
+ ),
165
+ replacement=(
166
+ f"to upgrade to a newer version of {project_name} "
167
+ f"or contact the author to suggest that they "
168
+ f"release a version with a conforming version number"
169
+ ),
170
+ issue=12063,
171
+ gone_in="24.1",
172
+ )
173
+ for dep in package_details.dependencies:
174
+ if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier):
175
+ deprecated(
176
+ reason=(
177
+ f"{project_name} {package_details.version} "
178
+ f"has a non-standard dependency specifier {dep}."
179
+ ),
180
+ replacement=(
181
+ f"to upgrade to a newer version of {project_name} "
182
+ f"or contact the author to suggest that they "
183
+ f"release a version with a conforming dependency specifiers"
184
+ ),
185
+ issue=12063,
186
+ gone_in="24.1",
187
+ )
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/freeze.py ADDED
@@ -0,0 +1,255 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import collections
2
+ import logging
3
+ import os
4
+ from typing import Container, Dict, Generator, Iterable, List, NamedTuple, Optional, Set
5
+
6
+ from pip._vendor.packaging.utils import canonicalize_name
7
+ from pip._vendor.packaging.version import Version
8
+
9
+ from pip._internal.exceptions import BadCommand, InstallationError
10
+ from pip._internal.metadata import BaseDistribution, get_environment
11
+ from pip._internal.req.constructors import (
12
+ install_req_from_editable,
13
+ install_req_from_line,
14
+ )
15
+ from pip._internal.req.req_file import COMMENT_RE
16
+ from pip._internal.utils.direct_url_helpers import direct_url_as_pep440_direct_reference
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ class _EditableInfo(NamedTuple):
22
+ requirement: str
23
+ comments: List[str]
24
+
25
+
26
+ def freeze(
27
+ requirement: Optional[List[str]] = None,
28
+ local_only: bool = False,
29
+ user_only: bool = False,
30
+ paths: Optional[List[str]] = None,
31
+ isolated: bool = False,
32
+ exclude_editable: bool = False,
33
+ skip: Container[str] = (),
34
+ ) -> Generator[str, None, None]:
35
+ installations: Dict[str, FrozenRequirement] = {}
36
+
37
+ dists = get_environment(paths).iter_installed_distributions(
38
+ local_only=local_only,
39
+ skip=(),
40
+ user_only=user_only,
41
+ )
42
+ for dist in dists:
43
+ req = FrozenRequirement.from_dist(dist)
44
+ if exclude_editable and req.editable:
45
+ continue
46
+ installations[req.canonical_name] = req
47
+
48
+ if requirement:
49
+ # the options that don't get turned into an InstallRequirement
50
+ # should only be emitted once, even if the same option is in multiple
51
+ # requirements files, so we need to keep track of what has been emitted
52
+ # so that we don't emit it again if it's seen again
53
+ emitted_options: Set[str] = set()
54
+ # keep track of which files a requirement is in so that we can
55
+ # give an accurate warning if a requirement appears multiple times.
56
+ req_files: Dict[str, List[str]] = collections.defaultdict(list)
57
+ for req_file_path in requirement:
58
+ with open(req_file_path) as req_file:
59
+ for line in req_file:
60
+ if (
61
+ not line.strip()
62
+ or line.strip().startswith("#")
63
+ or line.startswith(
64
+ (
65
+ "-r",
66
+ "--requirement",
67
+ "-f",
68
+ "--find-links",
69
+ "-i",
70
+ "--index-url",
71
+ "--pre",
72
+ "--trusted-host",
73
+ "--process-dependency-links",
74
+ "--extra-index-url",
75
+ "--use-feature",
76
+ )
77
+ )
78
+ ):
79
+ line = line.rstrip()
80
+ if line not in emitted_options:
81
+ emitted_options.add(line)
82
+ yield line
83
+ continue
84
+
85
+ if line.startswith("-e") or line.startswith("--editable"):
86
+ if line.startswith("-e"):
87
+ line = line[2:].strip()
88
+ else:
89
+ line = line[len("--editable") :].strip().lstrip("=")
90
+ line_req = install_req_from_editable(
91
+ line,
92
+ isolated=isolated,
93
+ )
94
+ else:
95
+ line_req = install_req_from_line(
96
+ COMMENT_RE.sub("", line).strip(),
97
+ isolated=isolated,
98
+ )
99
+
100
+ if not line_req.name:
101
+ logger.info(
102
+ "Skipping line in requirement file [%s] because "
103
+ "it's not clear what it would install: %s",
104
+ req_file_path,
105
+ line.strip(),
106
+ )
107
+ logger.info(
108
+ " (add #egg=PackageName to the URL to avoid"
109
+ " this warning)"
110
+ )
111
+ else:
112
+ line_req_canonical_name = canonicalize_name(line_req.name)
113
+ if line_req_canonical_name not in installations:
114
+ # either it's not installed, or it is installed
115
+ # but has been processed already
116
+ if not req_files[line_req.name]:
117
+ logger.warning(
118
+ "Requirement file [%s] contains %s, but "
119
+ "package %r is not installed",
120
+ req_file_path,
121
+ COMMENT_RE.sub("", line).strip(),
122
+ line_req.name,
123
+ )
124
+ else:
125
+ req_files[line_req.name].append(req_file_path)
126
+ else:
127
+ yield str(installations[line_req_canonical_name]).rstrip()
128
+ del installations[line_req_canonical_name]
129
+ req_files[line_req.name].append(req_file_path)
130
+
131
+ # Warn about requirements that were included multiple times (in a
132
+ # single requirements file or in different requirements files).
133
+ for name, files in req_files.items():
134
+ if len(files) > 1:
135
+ logger.warning(
136
+ "Requirement %s included multiple times [%s]",
137
+ name,
138
+ ", ".join(sorted(set(files))),
139
+ )
140
+
141
+ yield ("## The following requirements were added by pip freeze:")
142
+ for installation in sorted(installations.values(), key=lambda x: x.name.lower()):
143
+ if installation.canonical_name not in skip:
144
+ yield str(installation).rstrip()
145
+
146
+
147
+ def _format_as_name_version(dist: BaseDistribution) -> str:
148
+ dist_version = dist.version
149
+ if isinstance(dist_version, Version):
150
+ return f"{dist.raw_name}=={dist_version}"
151
+ return f"{dist.raw_name}==={dist_version}"
152
+
153
+
154
+ def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:
155
+ """
156
+ Compute and return values (req, comments) for use in
157
+ FrozenRequirement.from_dist().
158
+ """
159
+ editable_project_location = dist.editable_project_location
160
+ assert editable_project_location
161
+ location = os.path.normcase(os.path.abspath(editable_project_location))
162
+
163
+ from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs
164
+
165
+ vcs_backend = vcs.get_backend_for_dir(location)
166
+
167
+ if vcs_backend is None:
168
+ display = _format_as_name_version(dist)
169
+ logger.debug(
170
+ 'No VCS found for editable requirement "%s" in: %r',
171
+ display,
172
+ location,
173
+ )
174
+ return _EditableInfo(
175
+ requirement=location,
176
+ comments=[f"# Editable install with no version control ({display})"],
177
+ )
178
+
179
+ vcs_name = type(vcs_backend).__name__
180
+
181
+ try:
182
+ req = vcs_backend.get_src_requirement(location, dist.raw_name)
183
+ except RemoteNotFoundError:
184
+ display = _format_as_name_version(dist)
185
+ return _EditableInfo(
186
+ requirement=location,
187
+ comments=[f"# Editable {vcs_name} install with no remote ({display})"],
188
+ )
189
+ except RemoteNotValidError as ex:
190
+ display = _format_as_name_version(dist)
191
+ return _EditableInfo(
192
+ requirement=location,
193
+ comments=[
194
+ f"# Editable {vcs_name} install ({display}) with either a deleted "
195
+ f"local remote or invalid URI:",
196
+ f"# '{ex.url}'",
197
+ ],
198
+ )
199
+ except BadCommand:
200
+ logger.warning(
201
+ "cannot determine version of editable source in %s "
202
+ "(%s command not found in path)",
203
+ location,
204
+ vcs_backend.name,
205
+ )
206
+ return _EditableInfo(requirement=location, comments=[])
207
+ except InstallationError as exc:
208
+ logger.warning("Error when trying to get requirement for VCS system %s", exc)
209
+ else:
210
+ return _EditableInfo(requirement=req, comments=[])
211
+
212
+ logger.warning("Could not determine repository location of %s", location)
213
+
214
+ return _EditableInfo(
215
+ requirement=location,
216
+ comments=["## !! Could not determine repository location"],
217
+ )
218
+
219
+
220
+ class FrozenRequirement:
221
+ def __init__(
222
+ self,
223
+ name: str,
224
+ req: str,
225
+ editable: bool,
226
+ comments: Iterable[str] = (),
227
+ ) -> None:
228
+ self.name = name
229
+ self.canonical_name = canonicalize_name(name)
230
+ self.req = req
231
+ self.editable = editable
232
+ self.comments = comments
233
+
234
+ @classmethod
235
+ def from_dist(cls, dist: BaseDistribution) -> "FrozenRequirement":
236
+ editable = dist.editable
237
+ if editable:
238
+ req, comments = _get_editable_info(dist)
239
+ else:
240
+ comments = []
241
+ direct_url = dist.direct_url
242
+ if direct_url:
243
+ # if PEP 610 metadata is present, use it
244
+ req = direct_url_as_pep440_direct_reference(direct_url, dist.raw_name)
245
+ else:
246
+ # name==version requirement
247
+ req = _format_as_name_version(dist)
248
+
249
+ return cls(dist.raw_name, req, editable, comments=comments)
250
+
251
+ def __str__(self) -> str:
252
+ req = self.req
253
+ if self.editable:
254
+ req = f"-e {req}"
255
+ return "\n".join(list(self.comments) + [str(req)]) + "\n"
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/prepare.py ADDED
@@ -0,0 +1,730 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Prepares a distribution for installation
2
+ """
3
+
4
+ # The following comment should be removed at some point in the future.
5
+ # mypy: strict-optional=False
6
+
7
+ import mimetypes
8
+ import os
9
+ import shutil
10
+ from pathlib import Path
11
+ from typing import Dict, Iterable, List, Optional
12
+
13
+ from pip._vendor.packaging.utils import canonicalize_name
14
+
15
+ from pip._internal.distributions import make_distribution_for_install_requirement
16
+ from pip._internal.distributions.installed import InstalledDistribution
17
+ from pip._internal.exceptions import (
18
+ DirectoryUrlHashUnsupported,
19
+ HashMismatch,
20
+ HashUnpinned,
21
+ InstallationError,
22
+ MetadataInconsistent,
23
+ NetworkConnectionError,
24
+ VcsHashUnsupported,
25
+ )
26
+ from pip._internal.index.package_finder import PackageFinder
27
+ from pip._internal.metadata import BaseDistribution, get_metadata_distribution
28
+ from pip._internal.models.direct_url import ArchiveInfo
29
+ from pip._internal.models.link import Link
30
+ from pip._internal.models.wheel import Wheel
31
+ from pip._internal.network.download import BatchDownloader, Downloader
32
+ from pip._internal.network.lazy_wheel import (
33
+ HTTPRangeRequestUnsupported,
34
+ dist_from_wheel_url,
35
+ )
36
+ from pip._internal.network.session import PipSession
37
+ from pip._internal.operations.build.build_tracker import BuildTracker
38
+ from pip._internal.req.req_install import InstallRequirement
39
+ from pip._internal.utils._log import getLogger
40
+ from pip._internal.utils.direct_url_helpers import (
41
+ direct_url_for_editable,
42
+ direct_url_from_link,
43
+ )
44
+ from pip._internal.utils.hashes import Hashes, MissingHashes
45
+ from pip._internal.utils.logging import indent_log
46
+ from pip._internal.utils.misc import (
47
+ display_path,
48
+ hash_file,
49
+ hide_url,
50
+ redact_auth_from_requirement,
51
+ )
52
+ from pip._internal.utils.temp_dir import TempDirectory
53
+ from pip._internal.utils.unpacking import unpack_file
54
+ from pip._internal.vcs import vcs
55
+
56
+ logger = getLogger(__name__)
57
+
58
+
59
+ def _get_prepared_distribution(
60
+ req: InstallRequirement,
61
+ build_tracker: BuildTracker,
62
+ finder: PackageFinder,
63
+ build_isolation: bool,
64
+ check_build_deps: bool,
65
+ ) -> BaseDistribution:
66
+ """Prepare a distribution for installation."""
67
+ abstract_dist = make_distribution_for_install_requirement(req)
68
+ tracker_id = abstract_dist.build_tracker_id
69
+ if tracker_id is not None:
70
+ with build_tracker.track(req, tracker_id):
71
+ abstract_dist.prepare_distribution_metadata(
72
+ finder, build_isolation, check_build_deps
73
+ )
74
+ return abstract_dist.get_metadata_distribution()
75
+
76
+
77
+ def unpack_vcs_link(link: Link, location: str, verbosity: int) -> None:
78
+ vcs_backend = vcs.get_backend_for_scheme(link.scheme)
79
+ assert vcs_backend is not None
80
+ vcs_backend.unpack(location, url=hide_url(link.url), verbosity=verbosity)
81
+
82
+
83
+ class File:
84
+ def __init__(self, path: str, content_type: Optional[str]) -> None:
85
+ self.path = path
86
+ if content_type is None:
87
+ self.content_type = mimetypes.guess_type(path)[0]
88
+ else:
89
+ self.content_type = content_type
90
+
91
+
92
+ def get_http_url(
93
+ link: Link,
94
+ download: Downloader,
95
+ download_dir: Optional[str] = None,
96
+ hashes: Optional[Hashes] = None,
97
+ ) -> File:
98
+ temp_dir = TempDirectory(kind="unpack", globally_managed=True)
99
+ # If a download dir is specified, is the file already downloaded there?
100
+ already_downloaded_path = None
101
+ if download_dir:
102
+ already_downloaded_path = _check_download_dir(link, download_dir, hashes)
103
+
104
+ if already_downloaded_path:
105
+ from_path = already_downloaded_path
106
+ content_type = None
107
+ else:
108
+ # let's download to a tmp dir
109
+ from_path, content_type = download(link, temp_dir.path)
110
+ if hashes:
111
+ hashes.check_against_path(from_path)
112
+
113
+ return File(from_path, content_type)
114
+
115
+
116
+ def get_file_url(
117
+ link: Link, download_dir: Optional[str] = None, hashes: Optional[Hashes] = None
118
+ ) -> File:
119
+ """Get file and optionally check its hash."""
120
+ # If a download dir is specified, is the file already there and valid?
121
+ already_downloaded_path = None
122
+ if download_dir:
123
+ already_downloaded_path = _check_download_dir(link, download_dir, hashes)
124
+
125
+ if already_downloaded_path:
126
+ from_path = already_downloaded_path
127
+ else:
128
+ from_path = link.file_path
129
+
130
+ # If --require-hashes is off, `hashes` is either empty, the
131
+ # link's embedded hash, or MissingHashes; it is required to
132
+ # match. If --require-hashes is on, we are satisfied by any
133
+ # hash in `hashes` matching: a URL-based or an option-based
134
+ # one; no internet-sourced hash will be in `hashes`.
135
+ if hashes:
136
+ hashes.check_against_path(from_path)
137
+ return File(from_path, None)
138
+
139
+
140
+ def unpack_url(
141
+ link: Link,
142
+ location: str,
143
+ download: Downloader,
144
+ verbosity: int,
145
+ download_dir: Optional[str] = None,
146
+ hashes: Optional[Hashes] = None,
147
+ ) -> Optional[File]:
148
+ """Unpack link into location, downloading if required.
149
+
150
+ :param hashes: A Hashes object, one of whose embedded hashes must match,
151
+ or HashMismatch will be raised. If the Hashes is empty, no matches are
152
+ required, and unhashable types of requirements (like VCS ones, which
153
+ would ordinarily raise HashUnsupported) are allowed.
154
+ """
155
+ # non-editable vcs urls
156
+ if link.is_vcs:
157
+ unpack_vcs_link(link, location, verbosity=verbosity)
158
+ return None
159
+
160
+ assert not link.is_existing_dir()
161
+
162
+ # file urls
163
+ if link.is_file:
164
+ file = get_file_url(link, download_dir, hashes=hashes)
165
+
166
+ # http urls
167
+ else:
168
+ file = get_http_url(
169
+ link,
170
+ download,
171
+ download_dir,
172
+ hashes=hashes,
173
+ )
174
+
175
+ # unpack the archive to the build dir location. even when only downloading
176
+ # archives, they have to be unpacked to parse dependencies, except wheels
177
+ if not link.is_wheel:
178
+ unpack_file(file.path, location, file.content_type)
179
+
180
+ return file
181
+
182
+
183
+ def _check_download_dir(
184
+ link: Link,
185
+ download_dir: str,
186
+ hashes: Optional[Hashes],
187
+ warn_on_hash_mismatch: bool = True,
188
+ ) -> Optional[str]:
189
+ """Check download_dir for previously downloaded file with correct hash
190
+ If a correct file is found return its path else None
191
+ """
192
+ download_path = os.path.join(download_dir, link.filename)
193
+
194
+ if not os.path.exists(download_path):
195
+ return None
196
+
197
+ # If already downloaded, does its hash match?
198
+ logger.info("File was already downloaded %s", download_path)
199
+ if hashes:
200
+ try:
201
+ hashes.check_against_path(download_path)
202
+ except HashMismatch:
203
+ if warn_on_hash_mismatch:
204
+ logger.warning(
205
+ "Previously-downloaded file %s has bad hash. Re-downloading.",
206
+ download_path,
207
+ )
208
+ os.unlink(download_path)
209
+ return None
210
+ return download_path
211
+
212
+
213
+ class RequirementPreparer:
214
+ """Prepares a Requirement"""
215
+
216
+ def __init__(
217
+ self,
218
+ build_dir: str,
219
+ download_dir: Optional[str],
220
+ src_dir: str,
221
+ build_isolation: bool,
222
+ check_build_deps: bool,
223
+ build_tracker: BuildTracker,
224
+ session: PipSession,
225
+ progress_bar: str,
226
+ finder: PackageFinder,
227
+ require_hashes: bool,
228
+ use_user_site: bool,
229
+ lazy_wheel: bool,
230
+ verbosity: int,
231
+ legacy_resolver: bool,
232
+ ) -> None:
233
+ super().__init__()
234
+
235
+ self.src_dir = src_dir
236
+ self.build_dir = build_dir
237
+ self.build_tracker = build_tracker
238
+ self._session = session
239
+ self._download = Downloader(session, progress_bar)
240
+ self._batch_download = BatchDownloader(session, progress_bar)
241
+ self.finder = finder
242
+
243
+ # Where still-packed archives should be written to. If None, they are
244
+ # not saved, and are deleted immediately after unpacking.
245
+ self.download_dir = download_dir
246
+
247
+ # Is build isolation allowed?
248
+ self.build_isolation = build_isolation
249
+
250
+ # Should check build dependencies?
251
+ self.check_build_deps = check_build_deps
252
+
253
+ # Should hash-checking be required?
254
+ self.require_hashes = require_hashes
255
+
256
+ # Should install in user site-packages?
257
+ self.use_user_site = use_user_site
258
+
259
+ # Should wheels be downloaded lazily?
260
+ self.use_lazy_wheel = lazy_wheel
261
+
262
+ # How verbose should underlying tooling be?
263
+ self.verbosity = verbosity
264
+
265
+ # Are we using the legacy resolver?
266
+ self.legacy_resolver = legacy_resolver
267
+
268
+ # Memoized downloaded files, as mapping of url: path.
269
+ self._downloaded: Dict[str, str] = {}
270
+
271
+ # Previous "header" printed for a link-based InstallRequirement
272
+ self._previous_requirement_header = ("", "")
273
+
274
+ def _log_preparing_link(self, req: InstallRequirement) -> None:
275
+ """Provide context for the requirement being prepared."""
276
+ if req.link.is_file and not req.is_wheel_from_cache:
277
+ message = "Processing %s"
278
+ information = str(display_path(req.link.file_path))
279
+ else:
280
+ message = "Collecting %s"
281
+ information = redact_auth_from_requirement(req.req) if req.req else str(req)
282
+
283
+ # If we used req.req, inject requirement source if available (this
284
+ # would already be included if we used req directly)
285
+ if req.req and req.comes_from:
286
+ if isinstance(req.comes_from, str):
287
+ comes_from: Optional[str] = req.comes_from
288
+ else:
289
+ comes_from = req.comes_from.from_path()
290
+ if comes_from:
291
+ information += f" (from {comes_from})"
292
+
293
+ if (message, information) != self._previous_requirement_header:
294
+ self._previous_requirement_header = (message, information)
295
+ logger.info(message, information)
296
+
297
+ if req.is_wheel_from_cache:
298
+ with indent_log():
299
+ logger.info("Using cached %s", req.link.filename)
300
+
301
+ def _ensure_link_req_src_dir(
302
+ self, req: InstallRequirement, parallel_builds: bool
303
+ ) -> None:
304
+ """Ensure source_dir of a linked InstallRequirement."""
305
+ # Since source_dir is only set for editable requirements.
306
+ if req.link.is_wheel:
307
+ # We don't need to unpack wheels, so no need for a source
308
+ # directory.
309
+ return
310
+ assert req.source_dir is None
311
+ if req.link.is_existing_dir():
312
+ # build local directories in-tree
313
+ req.source_dir = req.link.file_path
314
+ return
315
+
316
+ # We always delete unpacked sdists after pip runs.
317
+ req.ensure_has_source_dir(
318
+ self.build_dir,
319
+ autodelete=True,
320
+ parallel_builds=parallel_builds,
321
+ )
322
+ req.ensure_pristine_source_checkout()
323
+
324
+ def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes:
325
+ # By the time this is called, the requirement's link should have
326
+ # been checked so we can tell what kind of requirements req is
327
+ # and raise some more informative errors than otherwise.
328
+ # (For example, we can raise VcsHashUnsupported for a VCS URL
329
+ # rather than HashMissing.)
330
+ if not self.require_hashes:
331
+ return req.hashes(trust_internet=True)
332
+
333
+ # We could check these first 2 conditions inside unpack_url
334
+ # and save repetition of conditions, but then we would
335
+ # report less-useful error messages for unhashable
336
+ # requirements, complaining that there's no hash provided.
337
+ if req.link.is_vcs:
338
+ raise VcsHashUnsupported()
339
+ if req.link.is_existing_dir():
340
+ raise DirectoryUrlHashUnsupported()
341
+
342
+ # Unpinned packages are asking for trouble when a new version
343
+ # is uploaded. This isn't a security check, but it saves users
344
+ # a surprising hash mismatch in the future.
345
+ # file:/// URLs aren't pinnable, so don't complain about them
346
+ # not being pinned.
347
+ if not req.is_direct and not req.is_pinned:
348
+ raise HashUnpinned()
349
+
350
+ # If known-good hashes are missing for this requirement,
351
+ # shim it with a facade object that will provoke hash
352
+ # computation and then raise a HashMissing exception
353
+ # showing the user what the hash should be.
354
+ return req.hashes(trust_internet=False) or MissingHashes()
355
+
356
+ def _fetch_metadata_only(
357
+ self,
358
+ req: InstallRequirement,
359
+ ) -> Optional[BaseDistribution]:
360
+ if self.legacy_resolver:
361
+ logger.debug(
362
+ "Metadata-only fetching is not used in the legacy resolver",
363
+ )
364
+ return None
365
+ if self.require_hashes:
366
+ logger.debug(
367
+ "Metadata-only fetching is not used as hash checking is required",
368
+ )
369
+ return None
370
+ # Try PEP 658 metadata first, then fall back to lazy wheel if unavailable.
371
+ return self._fetch_metadata_using_link_data_attr(
372
+ req
373
+ ) or self._fetch_metadata_using_lazy_wheel(req.link)
374
+
375
+ def _fetch_metadata_using_link_data_attr(
376
+ self,
377
+ req: InstallRequirement,
378
+ ) -> Optional[BaseDistribution]:
379
+ """Fetch metadata from the data-dist-info-metadata attribute, if possible."""
380
+ # (1) Get the link to the metadata file, if provided by the backend.
381
+ metadata_link = req.link.metadata_link()
382
+ if metadata_link is None:
383
+ return None
384
+ assert req.req is not None
385
+ logger.verbose(
386
+ "Obtaining dependency information for %s from %s",
387
+ req.req,
388
+ metadata_link,
389
+ )
390
+ # (2) Download the contents of the METADATA file, separate from the dist itself.
391
+ metadata_file = get_http_url(
392
+ metadata_link,
393
+ self._download,
394
+ hashes=metadata_link.as_hashes(),
395
+ )
396
+ with open(metadata_file.path, "rb") as f:
397
+ metadata_contents = f.read()
398
+ # (3) Generate a dist just from those file contents.
399
+ metadata_dist = get_metadata_distribution(
400
+ metadata_contents,
401
+ req.link.filename,
402
+ req.req.name,
403
+ )
404
+ # (4) Ensure the Name: field from the METADATA file matches the name from the
405
+ # install requirement.
406
+ #
407
+ # NB: raw_name will fall back to the name from the install requirement if
408
+ # the Name: field is not present, but it's noted in the raw_name docstring
409
+ # that that should NEVER happen anyway.
410
+ if canonicalize_name(metadata_dist.raw_name) != canonicalize_name(req.req.name):
411
+ raise MetadataInconsistent(
412
+ req, "Name", req.req.name, metadata_dist.raw_name
413
+ )
414
+ return metadata_dist
415
+
416
+ def _fetch_metadata_using_lazy_wheel(
417
+ self,
418
+ link: Link,
419
+ ) -> Optional[BaseDistribution]:
420
+ """Fetch metadata using lazy wheel, if possible."""
421
+ # --use-feature=fast-deps must be provided.
422
+ if not self.use_lazy_wheel:
423
+ return None
424
+ if link.is_file or not link.is_wheel:
425
+ logger.debug(
426
+ "Lazy wheel is not used as %r does not point to a remote wheel",
427
+ link,
428
+ )
429
+ return None
430
+
431
+ wheel = Wheel(link.filename)
432
+ name = canonicalize_name(wheel.name)
433
+ logger.info(
434
+ "Obtaining dependency information from %s %s",
435
+ name,
436
+ wheel.version,
437
+ )
438
+ url = link.url.split("#", 1)[0]
439
+ try:
440
+ return dist_from_wheel_url(name, url, self._session)
441
+ except HTTPRangeRequestUnsupported:
442
+ logger.debug("%s does not support range requests", url)
443
+ return None
444
+
445
+ def _complete_partial_requirements(
446
+ self,
447
+ partially_downloaded_reqs: Iterable[InstallRequirement],
448
+ parallel_builds: bool = False,
449
+ ) -> None:
450
+ """Download any requirements which were only fetched by metadata."""
451
+ # Download to a temporary directory. These will be copied over as
452
+ # needed for downstream 'download', 'wheel', and 'install' commands.
453
+ temp_dir = TempDirectory(kind="unpack", globally_managed=True).path
454
+
455
+ # Map each link to the requirement that owns it. This allows us to set
456
+ # `req.local_file_path` on the appropriate requirement after passing
457
+ # all the links at once into BatchDownloader.
458
+ links_to_fully_download: Dict[Link, InstallRequirement] = {}
459
+ for req in partially_downloaded_reqs:
460
+ assert req.link
461
+ links_to_fully_download[req.link] = req
462
+
463
+ batch_download = self._batch_download(
464
+ links_to_fully_download.keys(),
465
+ temp_dir,
466
+ )
467
+ for link, (filepath, _) in batch_download:
468
+ logger.debug("Downloading link %s to %s", link, filepath)
469
+ req = links_to_fully_download[link]
470
+ # Record the downloaded file path so wheel reqs can extract a Distribution
471
+ # in .get_dist().
472
+ req.local_file_path = filepath
473
+ # Record that the file is downloaded so we don't do it again in
474
+ # _prepare_linked_requirement().
475
+ self._downloaded[req.link.url] = filepath
476
+
477
+ # If this is an sdist, we need to unpack it after downloading, but the
478
+ # .source_dir won't be set up until we are in _prepare_linked_requirement().
479
+ # Add the downloaded archive to the install requirement to unpack after
480
+ # preparing the source dir.
481
+ if not req.is_wheel:
482
+ req.needs_unpacked_archive(Path(filepath))
483
+
484
+ # This step is necessary to ensure all lazy wheels are processed
485
+ # successfully by the 'download', 'wheel', and 'install' commands.
486
+ for req in partially_downloaded_reqs:
487
+ self._prepare_linked_requirement(req, parallel_builds)
488
+
489
+ def prepare_linked_requirement(
490
+ self, req: InstallRequirement, parallel_builds: bool = False
491
+ ) -> BaseDistribution:
492
+ """Prepare a requirement to be obtained from req.link."""
493
+ assert req.link
494
+ self._log_preparing_link(req)
495
+ with indent_log():
496
+ # Check if the relevant file is already available
497
+ # in the download directory
498
+ file_path = None
499
+ if self.download_dir is not None and req.link.is_wheel:
500
+ hashes = self._get_linked_req_hashes(req)
501
+ file_path = _check_download_dir(
502
+ req.link,
503
+ self.download_dir,
504
+ hashes,
505
+ # When a locally built wheel has been found in cache, we don't warn
506
+ # about re-downloading when the already downloaded wheel hash does
507
+ # not match. This is because the hash must be checked against the
508
+ # original link, not the cached link. It that case the already
509
+ # downloaded file will be removed and re-fetched from cache (which
510
+ # implies a hash check against the cache entry's origin.json).
511
+ warn_on_hash_mismatch=not req.is_wheel_from_cache,
512
+ )
513
+
514
+ if file_path is not None:
515
+ # The file is already available, so mark it as downloaded
516
+ self._downloaded[req.link.url] = file_path
517
+ else:
518
+ # The file is not available, attempt to fetch only metadata
519
+ metadata_dist = self._fetch_metadata_only(req)
520
+ if metadata_dist is not None:
521
+ req.needs_more_preparation = True
522
+ return metadata_dist
523
+
524
+ # None of the optimizations worked, fully prepare the requirement
525
+ return self._prepare_linked_requirement(req, parallel_builds)
526
+
527
+ def prepare_linked_requirements_more(
528
+ self, reqs: Iterable[InstallRequirement], parallel_builds: bool = False
529
+ ) -> None:
530
+ """Prepare linked requirements more, if needed."""
531
+ reqs = [req for req in reqs if req.needs_more_preparation]
532
+ for req in reqs:
533
+ # Determine if any of these requirements were already downloaded.
534
+ if self.download_dir is not None and req.link.is_wheel:
535
+ hashes = self._get_linked_req_hashes(req)
536
+ file_path = _check_download_dir(req.link, self.download_dir, hashes)
537
+ if file_path is not None:
538
+ self._downloaded[req.link.url] = file_path
539
+ req.needs_more_preparation = False
540
+
541
+ # Prepare requirements we found were already downloaded for some
542
+ # reason. The other downloads will be completed separately.
543
+ partially_downloaded_reqs: List[InstallRequirement] = []
544
+ for req in reqs:
545
+ if req.needs_more_preparation:
546
+ partially_downloaded_reqs.append(req)
547
+ else:
548
+ self._prepare_linked_requirement(req, parallel_builds)
549
+
550
+ # TODO: separate this part out from RequirementPreparer when the v1
551
+ # resolver can be removed!
552
+ self._complete_partial_requirements(
553
+ partially_downloaded_reqs,
554
+ parallel_builds=parallel_builds,
555
+ )
556
+
557
+ def _prepare_linked_requirement(
558
+ self, req: InstallRequirement, parallel_builds: bool
559
+ ) -> BaseDistribution:
560
+ assert req.link
561
+ link = req.link
562
+
563
+ hashes = self._get_linked_req_hashes(req)
564
+
565
+ if hashes and req.is_wheel_from_cache:
566
+ assert req.download_info is not None
567
+ assert link.is_wheel
568
+ assert link.is_file
569
+ # We need to verify hashes, and we have found the requirement in the cache
570
+ # of locally built wheels.
571
+ if (
572
+ isinstance(req.download_info.info, ArchiveInfo)
573
+ and req.download_info.info.hashes
574
+ and hashes.has_one_of(req.download_info.info.hashes)
575
+ ):
576
+ # At this point we know the requirement was built from a hashable source
577
+ # artifact, and we verified that the cache entry's hash of the original
578
+ # artifact matches one of the hashes we expect. We don't verify hashes
579
+ # against the cached wheel, because the wheel is not the original.
580
+ hashes = None
581
+ else:
582
+ logger.warning(
583
+ "The hashes of the source archive found in cache entry "
584
+ "don't match, ignoring cached built wheel "
585
+ "and re-downloading source."
586
+ )
587
+ req.link = req.cached_wheel_source_link
588
+ link = req.link
589
+
590
+ self._ensure_link_req_src_dir(req, parallel_builds)
591
+
592
+ if link.is_existing_dir():
593
+ local_file = None
594
+ elif link.url not in self._downloaded:
595
+ try:
596
+ local_file = unpack_url(
597
+ link,
598
+ req.source_dir,
599
+ self._download,
600
+ self.verbosity,
601
+ self.download_dir,
602
+ hashes,
603
+ )
604
+ except NetworkConnectionError as exc:
605
+ raise InstallationError(
606
+ f"Could not install requirement {req} because of HTTP "
607
+ f"error {exc} for URL {link}"
608
+ )
609
+ else:
610
+ file_path = self._downloaded[link.url]
611
+ if hashes:
612
+ hashes.check_against_path(file_path)
613
+ local_file = File(file_path, content_type=None)
614
+
615
+ # If download_info is set, we got it from the wheel cache.
616
+ if req.download_info is None:
617
+ # Editables don't go through this function (see
618
+ # prepare_editable_requirement).
619
+ assert not req.editable
620
+ req.download_info = direct_url_from_link(link, req.source_dir)
621
+ # Make sure we have a hash in download_info. If we got it as part of the
622
+ # URL, it will have been verified and we can rely on it. Otherwise we
623
+ # compute it from the downloaded file.
624
+ # FIXME: https://github.com/pypa/pip/issues/11943
625
+ if (
626
+ isinstance(req.download_info.info, ArchiveInfo)
627
+ and not req.download_info.info.hashes
628
+ and local_file
629
+ ):
630
+ hash = hash_file(local_file.path)[0].hexdigest()
631
+ # We populate info.hash for backward compatibility.
632
+ # This will automatically populate info.hashes.
633
+ req.download_info.info.hash = f"sha256={hash}"
634
+
635
+ # For use in later processing,
636
+ # preserve the file path on the requirement.
637
+ if local_file:
638
+ req.local_file_path = local_file.path
639
+
640
+ dist = _get_prepared_distribution(
641
+ req,
642
+ self.build_tracker,
643
+ self.finder,
644
+ self.build_isolation,
645
+ self.check_build_deps,
646
+ )
647
+ return dist
648
+
649
+ def save_linked_requirement(self, req: InstallRequirement) -> None:
650
+ assert self.download_dir is not None
651
+ assert req.link is not None
652
+ link = req.link
653
+ if link.is_vcs or (link.is_existing_dir() and req.editable):
654
+ # Make a .zip of the source_dir we already created.
655
+ req.archive(self.download_dir)
656
+ return
657
+
658
+ if link.is_existing_dir():
659
+ logger.debug(
660
+ "Not copying link to destination directory "
661
+ "since it is a directory: %s",
662
+ link,
663
+ )
664
+ return
665
+ if req.local_file_path is None:
666
+ # No distribution was downloaded for this requirement.
667
+ return
668
+
669
+ download_location = os.path.join(self.download_dir, link.filename)
670
+ if not os.path.exists(download_location):
671
+ shutil.copy(req.local_file_path, download_location)
672
+ download_path = display_path(download_location)
673
+ logger.info("Saved %s", download_path)
674
+
675
+ def prepare_editable_requirement(
676
+ self,
677
+ req: InstallRequirement,
678
+ ) -> BaseDistribution:
679
+ """Prepare an editable requirement."""
680
+ assert req.editable, "cannot prepare a non-editable req as editable"
681
+
682
+ logger.info("Obtaining %s", req)
683
+
684
+ with indent_log():
685
+ if self.require_hashes:
686
+ raise InstallationError(
687
+ f"The editable requirement {req} cannot be installed when "
688
+ "requiring hashes, because there is no single file to "
689
+ "hash."
690
+ )
691
+ req.ensure_has_source_dir(self.src_dir)
692
+ req.update_editable()
693
+ assert req.source_dir
694
+ req.download_info = direct_url_for_editable(req.unpacked_source_directory)
695
+
696
+ dist = _get_prepared_distribution(
697
+ req,
698
+ self.build_tracker,
699
+ self.finder,
700
+ self.build_isolation,
701
+ self.check_build_deps,
702
+ )
703
+
704
+ req.check_if_exists(self.use_user_site)
705
+
706
+ return dist
707
+
708
+ def prepare_installed_requirement(
709
+ self,
710
+ req: InstallRequirement,
711
+ skip_reason: str,
712
+ ) -> BaseDistribution:
713
+ """Prepare an already-installed requirement."""
714
+ assert req.satisfied_by, "req should have been satisfied but isn't"
715
+ assert skip_reason is not None, (
716
+ "did not get skip reason skipped but req.satisfied_by "
717
+ f"is set to {req.satisfied_by}"
718
+ )
719
+ logger.info(
720
+ "Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version
721
+ )
722
+ with indent_log():
723
+ if self.require_hashes:
724
+ logger.debug(
725
+ "Since it is already installed, we are trusting this "
726
+ "package without checking its hash. To ensure a "
727
+ "completely repeatable environment, install into an "
728
+ "empty virtualenv."
729
+ )
730
+ return InstalledDistribution(req).get_metadata_distribution()
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/req/req_file.py ADDED
@@ -0,0 +1,554 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Requirements file parsing
3
+ """
4
+
5
+ import logging
6
+ import optparse
7
+ import os
8
+ import re
9
+ import shlex
10
+ import urllib.parse
11
+ from optparse import Values
12
+ from typing import (
13
+ TYPE_CHECKING,
14
+ Any,
15
+ Callable,
16
+ Dict,
17
+ Generator,
18
+ Iterable,
19
+ List,
20
+ Optional,
21
+ Tuple,
22
+ )
23
+
24
+ from pip._internal.cli import cmdoptions
25
+ from pip._internal.exceptions import InstallationError, RequirementsFileParseError
26
+ from pip._internal.models.search_scope import SearchScope
27
+ from pip._internal.network.session import PipSession
28
+ from pip._internal.network.utils import raise_for_status
29
+ from pip._internal.utils.encoding import auto_decode
30
+ from pip._internal.utils.urls import get_url_scheme
31
+
32
+ if TYPE_CHECKING:
33
+ # NoReturn introduced in 3.6.2; imported only for type checking to maintain
34
+ # pip compatibility with older patch versions of Python 3.6
35
+ from typing import NoReturn
36
+
37
+ from pip._internal.index.package_finder import PackageFinder
38
+
39
+ __all__ = ["parse_requirements"]
40
+
41
+ ReqFileLines = Iterable[Tuple[int, str]]
42
+
43
+ LineParser = Callable[[str], Tuple[str, Values]]
44
+
45
+ SCHEME_RE = re.compile(r"^(http|https|file):", re.I)
46
+ COMMENT_RE = re.compile(r"(^|\s+)#.*$")
47
+
48
+ # Matches environment variable-style values in '${MY_VARIABLE_1}' with the
49
+ # variable name consisting of only uppercase letters, digits or the '_'
50
+ # (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
51
+ # 2013 Edition.
52
+ ENV_VAR_RE = re.compile(r"(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})")
53
+
54
+ SUPPORTED_OPTIONS: List[Callable[..., optparse.Option]] = [
55
+ cmdoptions.index_url,
56
+ cmdoptions.extra_index_url,
57
+ cmdoptions.no_index,
58
+ cmdoptions.constraints,
59
+ cmdoptions.requirements,
60
+ cmdoptions.editable,
61
+ cmdoptions.find_links,
62
+ cmdoptions.no_binary,
63
+ cmdoptions.only_binary,
64
+ cmdoptions.prefer_binary,
65
+ cmdoptions.require_hashes,
66
+ cmdoptions.pre,
67
+ cmdoptions.trusted_host,
68
+ cmdoptions.use_new_feature,
69
+ ]
70
+
71
+ # options to be passed to requirements
72
+ SUPPORTED_OPTIONS_REQ: List[Callable[..., optparse.Option]] = [
73
+ cmdoptions.global_options,
74
+ cmdoptions.hash,
75
+ cmdoptions.config_settings,
76
+ ]
77
+
78
+ SUPPORTED_OPTIONS_EDITABLE_REQ: List[Callable[..., optparse.Option]] = [
79
+ cmdoptions.config_settings,
80
+ ]
81
+
82
+
83
+ # the 'dest' string values
84
+ SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
85
+ SUPPORTED_OPTIONS_EDITABLE_REQ_DEST = [
86
+ str(o().dest) for o in SUPPORTED_OPTIONS_EDITABLE_REQ
87
+ ]
88
+
89
+ logger = logging.getLogger(__name__)
90
+
91
+
92
+ class ParsedRequirement:
93
+ def __init__(
94
+ self,
95
+ requirement: str,
96
+ is_editable: bool,
97
+ comes_from: str,
98
+ constraint: bool,
99
+ options: Optional[Dict[str, Any]] = None,
100
+ line_source: Optional[str] = None,
101
+ ) -> None:
102
+ self.requirement = requirement
103
+ self.is_editable = is_editable
104
+ self.comes_from = comes_from
105
+ self.options = options
106
+ self.constraint = constraint
107
+ self.line_source = line_source
108
+
109
+
110
+ class ParsedLine:
111
+ def __init__(
112
+ self,
113
+ filename: str,
114
+ lineno: int,
115
+ args: str,
116
+ opts: Values,
117
+ constraint: bool,
118
+ ) -> None:
119
+ self.filename = filename
120
+ self.lineno = lineno
121
+ self.opts = opts
122
+ self.constraint = constraint
123
+
124
+ if args:
125
+ self.is_requirement = True
126
+ self.is_editable = False
127
+ self.requirement = args
128
+ elif opts.editables:
129
+ self.is_requirement = True
130
+ self.is_editable = True
131
+ # We don't support multiple -e on one line
132
+ self.requirement = opts.editables[0]
133
+ else:
134
+ self.is_requirement = False
135
+
136
+
137
+ def parse_requirements(
138
+ filename: str,
139
+ session: PipSession,
140
+ finder: Optional["PackageFinder"] = None,
141
+ options: Optional[optparse.Values] = None,
142
+ constraint: bool = False,
143
+ ) -> Generator[ParsedRequirement, None, None]:
144
+ """Parse a requirements file and yield ParsedRequirement instances.
145
+
146
+ :param filename: Path or url of requirements file.
147
+ :param session: PipSession instance.
148
+ :param finder: Instance of pip.index.PackageFinder.
149
+ :param options: cli options.
150
+ :param constraint: If true, parsing a constraint file rather than
151
+ requirements file.
152
+ """
153
+ line_parser = get_line_parser(finder)
154
+ parser = RequirementsFileParser(session, line_parser)
155
+
156
+ for parsed_line in parser.parse(filename, constraint):
157
+ parsed_req = handle_line(
158
+ parsed_line, options=options, finder=finder, session=session
159
+ )
160
+ if parsed_req is not None:
161
+ yield parsed_req
162
+
163
+
164
+ def preprocess(content: str) -> ReqFileLines:
165
+ """Split, filter, and join lines, and return a line iterator
166
+
167
+ :param content: the content of the requirements file
168
+ """
169
+ lines_enum: ReqFileLines = enumerate(content.splitlines(), start=1)
170
+ lines_enum = join_lines(lines_enum)
171
+ lines_enum = ignore_comments(lines_enum)
172
+ lines_enum = expand_env_variables(lines_enum)
173
+ return lines_enum
174
+
175
+
176
+ def handle_requirement_line(
177
+ line: ParsedLine,
178
+ options: Optional[optparse.Values] = None,
179
+ ) -> ParsedRequirement:
180
+ # preserve for the nested code path
181
+ line_comes_from = "{} {} (line {})".format(
182
+ "-c" if line.constraint else "-r",
183
+ line.filename,
184
+ line.lineno,
185
+ )
186
+
187
+ assert line.is_requirement
188
+
189
+ # get the options that apply to requirements
190
+ if line.is_editable:
191
+ supported_dest = SUPPORTED_OPTIONS_EDITABLE_REQ_DEST
192
+ else:
193
+ supported_dest = SUPPORTED_OPTIONS_REQ_DEST
194
+ req_options = {}
195
+ for dest in supported_dest:
196
+ if dest in line.opts.__dict__ and line.opts.__dict__[dest]:
197
+ req_options[dest] = line.opts.__dict__[dest]
198
+
199
+ line_source = f"line {line.lineno} of {line.filename}"
200
+ return ParsedRequirement(
201
+ requirement=line.requirement,
202
+ is_editable=line.is_editable,
203
+ comes_from=line_comes_from,
204
+ constraint=line.constraint,
205
+ options=req_options,
206
+ line_source=line_source,
207
+ )
208
+
209
+
210
+ def handle_option_line(
211
+ opts: Values,
212
+ filename: str,
213
+ lineno: int,
214
+ finder: Optional["PackageFinder"] = None,
215
+ options: Optional[optparse.Values] = None,
216
+ session: Optional[PipSession] = None,
217
+ ) -> None:
218
+ if opts.hashes:
219
+ logger.warning(
220
+ "%s line %s has --hash but no requirement, and will be ignored.",
221
+ filename,
222
+ lineno,
223
+ )
224
+
225
+ if options:
226
+ # percolate options upward
227
+ if opts.require_hashes:
228
+ options.require_hashes = opts.require_hashes
229
+ if opts.features_enabled:
230
+ options.features_enabled.extend(
231
+ f for f in opts.features_enabled if f not in options.features_enabled
232
+ )
233
+
234
+ # set finder options
235
+ if finder:
236
+ find_links = finder.find_links
237
+ index_urls = finder.index_urls
238
+ no_index = finder.search_scope.no_index
239
+ if opts.no_index is True:
240
+ no_index = True
241
+ index_urls = []
242
+ if opts.index_url and not no_index:
243
+ index_urls = [opts.index_url]
244
+ if opts.extra_index_urls and not no_index:
245
+ index_urls.extend(opts.extra_index_urls)
246
+ if opts.find_links:
247
+ # FIXME: it would be nice to keep track of the source
248
+ # of the find_links: support a find-links local path
249
+ # relative to a requirements file.
250
+ value = opts.find_links[0]
251
+ req_dir = os.path.dirname(os.path.abspath(filename))
252
+ relative_to_reqs_file = os.path.join(req_dir, value)
253
+ if os.path.exists(relative_to_reqs_file):
254
+ value = relative_to_reqs_file
255
+ find_links.append(value)
256
+
257
+ if session:
258
+ # We need to update the auth urls in session
259
+ session.update_index_urls(index_urls)
260
+
261
+ search_scope = SearchScope(
262
+ find_links=find_links,
263
+ index_urls=index_urls,
264
+ no_index=no_index,
265
+ )
266
+ finder.search_scope = search_scope
267
+
268
+ if opts.pre:
269
+ finder.set_allow_all_prereleases()
270
+
271
+ if opts.prefer_binary:
272
+ finder.set_prefer_binary()
273
+
274
+ if session:
275
+ for host in opts.trusted_hosts or []:
276
+ source = f"line {lineno} of {filename}"
277
+ session.add_trusted_host(host, source=source)
278
+
279
+
280
+ def handle_line(
281
+ line: ParsedLine,
282
+ options: Optional[optparse.Values] = None,
283
+ finder: Optional["PackageFinder"] = None,
284
+ session: Optional[PipSession] = None,
285
+ ) -> Optional[ParsedRequirement]:
286
+ """Handle a single parsed requirements line; This can result in
287
+ creating/yielding requirements, or updating the finder.
288
+
289
+ :param line: The parsed line to be processed.
290
+ :param options: CLI options.
291
+ :param finder: The finder - updated by non-requirement lines.
292
+ :param session: The session - updated by non-requirement lines.
293
+
294
+ Returns a ParsedRequirement object if the line is a requirement line,
295
+ otherwise returns None.
296
+
297
+ For lines that contain requirements, the only options that have an effect
298
+ are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
299
+ requirement. Other options from SUPPORTED_OPTIONS may be present, but are
300
+ ignored.
301
+
302
+ For lines that do not contain requirements, the only options that have an
303
+ effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
304
+ be present, but are ignored. These lines may contain multiple options
305
+ (although our docs imply only one is supported), and all our parsed and
306
+ affect the finder.
307
+ """
308
+
309
+ if line.is_requirement:
310
+ parsed_req = handle_requirement_line(line, options)
311
+ return parsed_req
312
+ else:
313
+ handle_option_line(
314
+ line.opts,
315
+ line.filename,
316
+ line.lineno,
317
+ finder,
318
+ options,
319
+ session,
320
+ )
321
+ return None
322
+
323
+
324
+ class RequirementsFileParser:
325
+ def __init__(
326
+ self,
327
+ session: PipSession,
328
+ line_parser: LineParser,
329
+ ) -> None:
330
+ self._session = session
331
+ self._line_parser = line_parser
332
+
333
+ def parse(
334
+ self, filename: str, constraint: bool
335
+ ) -> Generator[ParsedLine, None, None]:
336
+ """Parse a given file, yielding parsed lines."""
337
+ yield from self._parse_and_recurse(filename, constraint)
338
+
339
+ def _parse_and_recurse(
340
+ self, filename: str, constraint: bool
341
+ ) -> Generator[ParsedLine, None, None]:
342
+ for line in self._parse_file(filename, constraint):
343
+ if not line.is_requirement and (
344
+ line.opts.requirements or line.opts.constraints
345
+ ):
346
+ # parse a nested requirements file
347
+ if line.opts.requirements:
348
+ req_path = line.opts.requirements[0]
349
+ nested_constraint = False
350
+ else:
351
+ req_path = line.opts.constraints[0]
352
+ nested_constraint = True
353
+
354
+ # original file is over http
355
+ if SCHEME_RE.search(filename):
356
+ # do a url join so relative paths work
357
+ req_path = urllib.parse.urljoin(filename, req_path)
358
+ # original file and nested file are paths
359
+ elif not SCHEME_RE.search(req_path):
360
+ # do a join so relative paths work
361
+ req_path = os.path.join(
362
+ os.path.dirname(filename),
363
+ req_path,
364
+ )
365
+
366
+ yield from self._parse_and_recurse(req_path, nested_constraint)
367
+ else:
368
+ yield line
369
+
370
+ def _parse_file(
371
+ self, filename: str, constraint: bool
372
+ ) -> Generator[ParsedLine, None, None]:
373
+ _, content = get_file_content(filename, self._session)
374
+
375
+ lines_enum = preprocess(content)
376
+
377
+ for line_number, line in lines_enum:
378
+ try:
379
+ args_str, opts = self._line_parser(line)
380
+ except OptionParsingError as e:
381
+ # add offending line
382
+ msg = f"Invalid requirement: {line}\n{e.msg}"
383
+ raise RequirementsFileParseError(msg)
384
+
385
+ yield ParsedLine(
386
+ filename,
387
+ line_number,
388
+ args_str,
389
+ opts,
390
+ constraint,
391
+ )
392
+
393
+
394
+ def get_line_parser(finder: Optional["PackageFinder"]) -> LineParser:
395
+ def parse_line(line: str) -> Tuple[str, Values]:
396
+ # Build new parser for each line since it accumulates appendable
397
+ # options.
398
+ parser = build_parser()
399
+ defaults = parser.get_default_values()
400
+ defaults.index_url = None
401
+ if finder:
402
+ defaults.format_control = finder.format_control
403
+
404
+ args_str, options_str = break_args_options(line)
405
+
406
+ try:
407
+ options = shlex.split(options_str)
408
+ except ValueError as e:
409
+ raise OptionParsingError(f"Could not split options: {options_str}") from e
410
+
411
+ opts, _ = parser.parse_args(options, defaults)
412
+
413
+ return args_str, opts
414
+
415
+ return parse_line
416
+
417
+
418
+ def break_args_options(line: str) -> Tuple[str, str]:
419
+ """Break up the line into an args and options string. We only want to shlex
420
+ (and then optparse) the options, not the args. args can contain markers
421
+ which are corrupted by shlex.
422
+ """
423
+ tokens = line.split(" ")
424
+ args = []
425
+ options = tokens[:]
426
+ for token in tokens:
427
+ if token.startswith("-") or token.startswith("--"):
428
+ break
429
+ else:
430
+ args.append(token)
431
+ options.pop(0)
432
+ return " ".join(args), " ".join(options)
433
+
434
+
435
+ class OptionParsingError(Exception):
436
+ def __init__(self, msg: str) -> None:
437
+ self.msg = msg
438
+
439
+
440
+ def build_parser() -> optparse.OptionParser:
441
+ """
442
+ Return a parser for parsing requirement lines
443
+ """
444
+ parser = optparse.OptionParser(add_help_option=False)
445
+
446
+ option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
447
+ for option_factory in option_factories:
448
+ option = option_factory()
449
+ parser.add_option(option)
450
+
451
+ # By default optparse sys.exits on parsing errors. We want to wrap
452
+ # that in our own exception.
453
+ def parser_exit(self: Any, msg: str) -> "NoReturn":
454
+ raise OptionParsingError(msg)
455
+
456
+ # NOTE: mypy disallows assigning to a method
457
+ # https://github.com/python/mypy/issues/2427
458
+ parser.exit = parser_exit # type: ignore
459
+
460
+ return parser
461
+
462
+
463
+ def join_lines(lines_enum: ReqFileLines) -> ReqFileLines:
464
+ """Joins a line ending in '\' with the previous line (except when following
465
+ comments). The joined line takes on the index of the first line.
466
+ """
467
+ primary_line_number = None
468
+ new_line: List[str] = []
469
+ for line_number, line in lines_enum:
470
+ if not line.endswith("\\") or COMMENT_RE.match(line):
471
+ if COMMENT_RE.match(line):
472
+ # this ensures comments are always matched later
473
+ line = " " + line
474
+ if new_line:
475
+ new_line.append(line)
476
+ assert primary_line_number is not None
477
+ yield primary_line_number, "".join(new_line)
478
+ new_line = []
479
+ else:
480
+ yield line_number, line
481
+ else:
482
+ if not new_line:
483
+ primary_line_number = line_number
484
+ new_line.append(line.strip("\\"))
485
+
486
+ # last line contains \
487
+ if new_line:
488
+ assert primary_line_number is not None
489
+ yield primary_line_number, "".join(new_line)
490
+
491
+ # TODO: handle space after '\'.
492
+
493
+
494
+ def ignore_comments(lines_enum: ReqFileLines) -> ReqFileLines:
495
+ """
496
+ Strips comments and filter empty lines.
497
+ """
498
+ for line_number, line in lines_enum:
499
+ line = COMMENT_RE.sub("", line)
500
+ line = line.strip()
501
+ if line:
502
+ yield line_number, line
503
+
504
+
505
+ def expand_env_variables(lines_enum: ReqFileLines) -> ReqFileLines:
506
+ """Replace all environment variables that can be retrieved via `os.getenv`.
507
+
508
+ The only allowed format for environment variables defined in the
509
+ requirement file is `${MY_VARIABLE_1}` to ensure two things:
510
+
511
+ 1. Strings that contain a `$` aren't accidentally (partially) expanded.
512
+ 2. Ensure consistency across platforms for requirement files.
513
+
514
+ These points are the result of a discussion on the `github pull
515
+ request #3514 <https://github.com/pypa/pip/pull/3514>`_.
516
+
517
+ Valid characters in variable names follow the `POSIX standard
518
+ <http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited
519
+ to uppercase letter, digits and the `_` (underscore).
520
+ """
521
+ for line_number, line in lines_enum:
522
+ for env_var, var_name in ENV_VAR_RE.findall(line):
523
+ value = os.getenv(var_name)
524
+ if not value:
525
+ continue
526
+
527
+ line = line.replace(env_var, value)
528
+
529
+ yield line_number, line
530
+
531
+
532
+ def get_file_content(url: str, session: PipSession) -> Tuple[str, str]:
533
+ """Gets the content of a file; it may be a filename, file: URL, or
534
+ http: URL. Returns (location, content). Content is unicode.
535
+ Respects # -*- coding: declarations on the retrieved files.
536
+
537
+ :param url: File path or url.
538
+ :param session: PipSession instance.
539
+ """
540
+ scheme = get_url_scheme(url)
541
+
542
+ # Pip has special support for file:// URLs (LocalFSAdapter).
543
+ if scheme in ["http", "https", "file"]:
544
+ resp = session.get(url)
545
+ raise_for_status(resp)
546
+ return resp.url, resp.text
547
+
548
+ # Assume this is a bare path.
549
+ try:
550
+ with open(url, "rb") as f:
551
+ content = auto_decode(f.read())
552
+ except OSError as exc:
553
+ raise InstallationError(f"Could not open requirements file: {exc}")
554
+ return url, content
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/req/req_install.py ADDED
@@ -0,0 +1,923 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import logging
3
+ import os
4
+ import shutil
5
+ import sys
6
+ import uuid
7
+ import zipfile
8
+ from optparse import Values
9
+ from pathlib import Path
10
+ from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union
11
+
12
+ from pip._vendor.packaging.markers import Marker
13
+ from pip._vendor.packaging.requirements import Requirement
14
+ from pip._vendor.packaging.specifiers import SpecifierSet
15
+ from pip._vendor.packaging.utils import canonicalize_name
16
+ from pip._vendor.packaging.version import Version
17
+ from pip._vendor.packaging.version import parse as parse_version
18
+ from pip._vendor.pyproject_hooks import BuildBackendHookCaller
19
+
20
+ from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
21
+ from pip._internal.exceptions import InstallationError, PreviousBuildDirError
22
+ from pip._internal.locations import get_scheme
23
+ from pip._internal.metadata import (
24
+ BaseDistribution,
25
+ get_default_environment,
26
+ get_directory_distribution,
27
+ get_wheel_distribution,
28
+ )
29
+ from pip._internal.metadata.base import FilesystemWheel
30
+ from pip._internal.models.direct_url import DirectUrl
31
+ from pip._internal.models.link import Link
32
+ from pip._internal.operations.build.metadata import generate_metadata
33
+ from pip._internal.operations.build.metadata_editable import generate_editable_metadata
34
+ from pip._internal.operations.build.metadata_legacy import (
35
+ generate_metadata as generate_metadata_legacy,
36
+ )
37
+ from pip._internal.operations.install.editable_legacy import (
38
+ install_editable as install_editable_legacy,
39
+ )
40
+ from pip._internal.operations.install.wheel import install_wheel
41
+ from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
42
+ from pip._internal.req.req_uninstall import UninstallPathSet
43
+ from pip._internal.utils.deprecation import deprecated
44
+ from pip._internal.utils.hashes import Hashes
45
+ from pip._internal.utils.misc import (
46
+ ConfiguredBuildBackendHookCaller,
47
+ ask_path_exists,
48
+ backup_dir,
49
+ display_path,
50
+ hide_url,
51
+ is_installable_dir,
52
+ redact_auth_from_requirement,
53
+ redact_auth_from_url,
54
+ )
55
+ from pip._internal.utils.packaging import safe_extra
56
+ from pip._internal.utils.subprocess import runner_with_spinner_message
57
+ from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
58
+ from pip._internal.utils.unpacking import unpack_file
59
+ from pip._internal.utils.virtualenv import running_under_virtualenv
60
+ from pip._internal.vcs import vcs
61
+
62
+ logger = logging.getLogger(__name__)
63
+
64
+
65
+ class InstallRequirement:
66
+ """
67
+ Represents something that may be installed later on, may have information
68
+ about where to fetch the relevant requirement and also contains logic for
69
+ installing the said requirement.
70
+ """
71
+
72
+ def __init__(
73
+ self,
74
+ req: Optional[Requirement],
75
+ comes_from: Optional[Union[str, "InstallRequirement"]],
76
+ editable: bool = False,
77
+ link: Optional[Link] = None,
78
+ markers: Optional[Marker] = None,
79
+ use_pep517: Optional[bool] = None,
80
+ isolated: bool = False,
81
+ *,
82
+ global_options: Optional[List[str]] = None,
83
+ hash_options: Optional[Dict[str, List[str]]] = None,
84
+ config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
85
+ constraint: bool = False,
86
+ extras: Collection[str] = (),
87
+ user_supplied: bool = False,
88
+ permit_editable_wheels: bool = False,
89
+ ) -> None:
90
+ assert req is None or isinstance(req, Requirement), req
91
+ self.req = req
92
+ self.comes_from = comes_from
93
+ self.constraint = constraint
94
+ self.editable = editable
95
+ self.permit_editable_wheels = permit_editable_wheels
96
+
97
+ # source_dir is the local directory where the linked requirement is
98
+ # located, or unpacked. In case unpacking is needed, creating and
99
+ # populating source_dir is done by the RequirementPreparer. Note this
100
+ # is not necessarily the directory where pyproject.toml or setup.py is
101
+ # located - that one is obtained via unpacked_source_directory.
102
+ self.source_dir: Optional[str] = None
103
+ if self.editable:
104
+ assert link
105
+ if link.is_file:
106
+ self.source_dir = os.path.normpath(os.path.abspath(link.file_path))
107
+
108
+ # original_link is the direct URL that was provided by the user for the
109
+ # requirement, either directly or via a constraints file.
110
+ if link is None and req and req.url:
111
+ # PEP 508 URL requirement
112
+ link = Link(req.url)
113
+ self.link = self.original_link = link
114
+
115
+ # When this InstallRequirement is a wheel obtained from the cache of locally
116
+ # built wheels, this is the source link corresponding to the cache entry, which
117
+ # was used to download and build the cached wheel.
118
+ self.cached_wheel_source_link: Optional[Link] = None
119
+
120
+ # Information about the location of the artifact that was downloaded . This
121
+ # property is guaranteed to be set in resolver results.
122
+ self.download_info: Optional[DirectUrl] = None
123
+
124
+ # Path to any downloaded or already-existing package.
125
+ self.local_file_path: Optional[str] = None
126
+ if self.link and self.link.is_file:
127
+ self.local_file_path = self.link.file_path
128
+
129
+ if extras:
130
+ self.extras = extras
131
+ elif req:
132
+ self.extras = req.extras
133
+ else:
134
+ self.extras = set()
135
+ if markers is None and req:
136
+ markers = req.marker
137
+ self.markers = markers
138
+
139
+ # This holds the Distribution object if this requirement is already installed.
140
+ self.satisfied_by: Optional[BaseDistribution] = None
141
+ # Whether the installation process should try to uninstall an existing
142
+ # distribution before installing this requirement.
143
+ self.should_reinstall = False
144
+ # Temporary build location
145
+ self._temp_build_dir: Optional[TempDirectory] = None
146
+ # Set to True after successful installation
147
+ self.install_succeeded: Optional[bool] = None
148
+ # Supplied options
149
+ self.global_options = global_options if global_options else []
150
+ self.hash_options = hash_options if hash_options else {}
151
+ self.config_settings = config_settings
152
+ # Set to True after successful preparation of this requirement
153
+ self.prepared = False
154
+ # User supplied requirement are explicitly requested for installation
155
+ # by the user via CLI arguments or requirements files, as opposed to,
156
+ # e.g. dependencies, extras or constraints.
157
+ self.user_supplied = user_supplied
158
+
159
+ self.isolated = isolated
160
+ self.build_env: BuildEnvironment = NoOpBuildEnvironment()
161
+
162
+ # For PEP 517, the directory where we request the project metadata
163
+ # gets stored. We need this to pass to build_wheel, so the backend
164
+ # can ensure that the wheel matches the metadata (see the PEP for
165
+ # details).
166
+ self.metadata_directory: Optional[str] = None
167
+
168
+ # The static build requirements (from pyproject.toml)
169
+ self.pyproject_requires: Optional[List[str]] = None
170
+
171
+ # Build requirements that we will check are available
172
+ self.requirements_to_check: List[str] = []
173
+
174
+ # The PEP 517 backend we should use to build the project
175
+ self.pep517_backend: Optional[BuildBackendHookCaller] = None
176
+
177
+ # Are we using PEP 517 for this requirement?
178
+ # After pyproject.toml has been loaded, the only valid values are True
179
+ # and False. Before loading, None is valid (meaning "use the default").
180
+ # Setting an explicit value before loading pyproject.toml is supported,
181
+ # but after loading this flag should be treated as read only.
182
+ self.use_pep517 = use_pep517
183
+
184
+ # If config settings are provided, enforce PEP 517.
185
+ if self.config_settings:
186
+ if self.use_pep517 is False:
187
+ logger.warning(
188
+ "--no-use-pep517 ignored for %s "
189
+ "because --config-settings are specified.",
190
+ self,
191
+ )
192
+ self.use_pep517 = True
193
+
194
+ # This requirement needs more preparation before it can be built
195
+ self.needs_more_preparation = False
196
+
197
+ # This requirement needs to be unpacked before it can be installed.
198
+ self._archive_source: Optional[Path] = None
199
+
200
+ def __str__(self) -> str:
201
+ if self.req:
202
+ s = redact_auth_from_requirement(self.req)
203
+ if self.link:
204
+ s += f" from {redact_auth_from_url(self.link.url)}"
205
+ elif self.link:
206
+ s = redact_auth_from_url(self.link.url)
207
+ else:
208
+ s = "<InstallRequirement>"
209
+ if self.satisfied_by is not None:
210
+ if self.satisfied_by.location is not None:
211
+ location = display_path(self.satisfied_by.location)
212
+ else:
213
+ location = "<memory>"
214
+ s += f" in {location}"
215
+ if self.comes_from:
216
+ if isinstance(self.comes_from, str):
217
+ comes_from: Optional[str] = self.comes_from
218
+ else:
219
+ comes_from = self.comes_from.from_path()
220
+ if comes_from:
221
+ s += f" (from {comes_from})"
222
+ return s
223
+
224
+ def __repr__(self) -> str:
225
+ return "<{} object: {} editable={!r}>".format(
226
+ self.__class__.__name__, str(self), self.editable
227
+ )
228
+
229
+ def format_debug(self) -> str:
230
+ """An un-tested helper for getting state, for debugging."""
231
+ attributes = vars(self)
232
+ names = sorted(attributes)
233
+
234
+ state = (f"{attr}={attributes[attr]!r}" for attr in sorted(names))
235
+ return "<{name} object: {{{state}}}>".format(
236
+ name=self.__class__.__name__,
237
+ state=", ".join(state),
238
+ )
239
+
240
+ # Things that are valid for all kinds of requirements?
241
+ @property
242
+ def name(self) -> Optional[str]:
243
+ if self.req is None:
244
+ return None
245
+ return self.req.name
246
+
247
+ @functools.lru_cache() # use cached_property in python 3.8+
248
+ def supports_pyproject_editable(self) -> bool:
249
+ if not self.use_pep517:
250
+ return False
251
+ assert self.pep517_backend
252
+ with self.build_env:
253
+ runner = runner_with_spinner_message(
254
+ "Checking if build backend supports build_editable"
255
+ )
256
+ with self.pep517_backend.subprocess_runner(runner):
257
+ return "build_editable" in self.pep517_backend._supported_features()
258
+
259
+ @property
260
+ def specifier(self) -> SpecifierSet:
261
+ assert self.req is not None
262
+ return self.req.specifier
263
+
264
+ @property
265
+ def is_direct(self) -> bool:
266
+ """Whether this requirement was specified as a direct URL."""
267
+ return self.original_link is not None
268
+
269
+ @property
270
+ def is_pinned(self) -> bool:
271
+ """Return whether I am pinned to an exact version.
272
+
273
+ For example, some-package==1.2 is pinned; some-package>1.2 is not.
274
+ """
275
+ assert self.req is not None
276
+ specifiers = self.req.specifier
277
+ return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="}
278
+
279
+ def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
280
+ if not extras_requested:
281
+ # Provide an extra to safely evaluate the markers
282
+ # without matching any extra
283
+ extras_requested = ("",)
284
+ if self.markers is not None:
285
+ return any(
286
+ self.markers.evaluate({"extra": extra})
287
+ # TODO: Remove these two variants when packaging is upgraded to
288
+ # support the marker comparison logic specified in PEP 685.
289
+ or self.markers.evaluate({"extra": safe_extra(extra)})
290
+ or self.markers.evaluate({"extra": canonicalize_name(extra)})
291
+ for extra in extras_requested
292
+ )
293
+ else:
294
+ return True
295
+
296
+ @property
297
+ def has_hash_options(self) -> bool:
298
+ """Return whether any known-good hashes are specified as options.
299
+
300
+ These activate --require-hashes mode; hashes specified as part of a
301
+ URL do not.
302
+
303
+ """
304
+ return bool(self.hash_options)
305
+
306
+ def hashes(self, trust_internet: bool = True) -> Hashes:
307
+ """Return a hash-comparer that considers my option- and URL-based
308
+ hashes to be known-good.
309
+
310
+ Hashes in URLs--ones embedded in the requirements file, not ones
311
+ downloaded from an index server--are almost peers with ones from
312
+ flags. They satisfy --require-hashes (whether it was implicitly or
313
+ explicitly activated) but do not activate it. md5 and sha224 are not
314
+ allowed in flags, which should nudge people toward good algos. We
315
+ always OR all hashes together, even ones from URLs.
316
+
317
+ :param trust_internet: Whether to trust URL-based (#md5=...) hashes
318
+ downloaded from the internet, as by populate_link()
319
+
320
+ """
321
+ good_hashes = self.hash_options.copy()
322
+ if trust_internet:
323
+ link = self.link
324
+ elif self.is_direct and self.user_supplied:
325
+ link = self.original_link
326
+ else:
327
+ link = None
328
+ if link and link.hash:
329
+ assert link.hash_name is not None
330
+ good_hashes.setdefault(link.hash_name, []).append(link.hash)
331
+ return Hashes(good_hashes)
332
+
333
+ def from_path(self) -> Optional[str]:
334
+ """Format a nice indicator to show where this "comes from" """
335
+ if self.req is None:
336
+ return None
337
+ s = str(self.req)
338
+ if self.comes_from:
339
+ comes_from: Optional[str]
340
+ if isinstance(self.comes_from, str):
341
+ comes_from = self.comes_from
342
+ else:
343
+ comes_from = self.comes_from.from_path()
344
+ if comes_from:
345
+ s += "->" + comes_from
346
+ return s
347
+
348
+ def ensure_build_location(
349
+ self, build_dir: str, autodelete: bool, parallel_builds: bool
350
+ ) -> str:
351
+ assert build_dir is not None
352
+ if self._temp_build_dir is not None:
353
+ assert self._temp_build_dir.path
354
+ return self._temp_build_dir.path
355
+ if self.req is None:
356
+ # Some systems have /tmp as a symlink which confuses custom
357
+ # builds (such as numpy). Thus, we ensure that the real path
358
+ # is returned.
359
+ self._temp_build_dir = TempDirectory(
360
+ kind=tempdir_kinds.REQ_BUILD, globally_managed=True
361
+ )
362
+
363
+ return self._temp_build_dir.path
364
+
365
+ # This is the only remaining place where we manually determine the path
366
+ # for the temporary directory. It is only needed for editables where
367
+ # it is the value of the --src option.
368
+
369
+ # When parallel builds are enabled, add a UUID to the build directory
370
+ # name so multiple builds do not interfere with each other.
371
+ dir_name: str = canonicalize_name(self.req.name)
372
+ if parallel_builds:
373
+ dir_name = f"{dir_name}_{uuid.uuid4().hex}"
374
+
375
+ # FIXME: Is there a better place to create the build_dir? (hg and bzr
376
+ # need this)
377
+ if not os.path.exists(build_dir):
378
+ logger.debug("Creating directory %s", build_dir)
379
+ os.makedirs(build_dir)
380
+ actual_build_dir = os.path.join(build_dir, dir_name)
381
+ # `None` indicates that we respect the globally-configured deletion
382
+ # settings, which is what we actually want when auto-deleting.
383
+ delete_arg = None if autodelete else False
384
+ return TempDirectory(
385
+ path=actual_build_dir,
386
+ delete=delete_arg,
387
+ kind=tempdir_kinds.REQ_BUILD,
388
+ globally_managed=True,
389
+ ).path
390
+
391
+ def _set_requirement(self) -> None:
392
+ """Set requirement after generating metadata."""
393
+ assert self.req is None
394
+ assert self.metadata is not None
395
+ assert self.source_dir is not None
396
+
397
+ # Construct a Requirement object from the generated metadata
398
+ if isinstance(parse_version(self.metadata["Version"]), Version):
399
+ op = "=="
400
+ else:
401
+ op = "==="
402
+
403
+ self.req = Requirement(
404
+ "".join(
405
+ [
406
+ self.metadata["Name"],
407
+ op,
408
+ self.metadata["Version"],
409
+ ]
410
+ )
411
+ )
412
+
413
+ def warn_on_mismatching_name(self) -> None:
414
+ assert self.req is not None
415
+ metadata_name = canonicalize_name(self.metadata["Name"])
416
+ if canonicalize_name(self.req.name) == metadata_name:
417
+ # Everything is fine.
418
+ return
419
+
420
+ # If we're here, there's a mismatch. Log a warning about it.
421
+ logger.warning(
422
+ "Generating metadata for package %s "
423
+ "produced metadata for project name %s. Fix your "
424
+ "#egg=%s fragments.",
425
+ self.name,
426
+ metadata_name,
427
+ self.name,
428
+ )
429
+ self.req = Requirement(metadata_name)
430
+
431
+ def check_if_exists(self, use_user_site: bool) -> None:
432
+ """Find an installed distribution that satisfies or conflicts
433
+ with this requirement, and set self.satisfied_by or
434
+ self.should_reinstall appropriately.
435
+ """
436
+ if self.req is None:
437
+ return
438
+ existing_dist = get_default_environment().get_distribution(self.req.name)
439
+ if not existing_dist:
440
+ return
441
+
442
+ version_compatible = self.req.specifier.contains(
443
+ existing_dist.version,
444
+ prereleases=True,
445
+ )
446
+ if not version_compatible:
447
+ self.satisfied_by = None
448
+ if use_user_site:
449
+ if existing_dist.in_usersite:
450
+ self.should_reinstall = True
451
+ elif running_under_virtualenv() and existing_dist.in_site_packages:
452
+ raise InstallationError(
453
+ f"Will not install to the user site because it will "
454
+ f"lack sys.path precedence to {existing_dist.raw_name} "
455
+ f"in {existing_dist.location}"
456
+ )
457
+ else:
458
+ self.should_reinstall = True
459
+ else:
460
+ if self.editable:
461
+ self.should_reinstall = True
462
+ # when installing editables, nothing pre-existing should ever
463
+ # satisfy
464
+ self.satisfied_by = None
465
+ else:
466
+ self.satisfied_by = existing_dist
467
+
468
+ # Things valid for wheels
469
+ @property
470
+ def is_wheel(self) -> bool:
471
+ if not self.link:
472
+ return False
473
+ return self.link.is_wheel
474
+
475
+ @property
476
+ def is_wheel_from_cache(self) -> bool:
477
+ # When True, it means that this InstallRequirement is a local wheel file in the
478
+ # cache of locally built wheels.
479
+ return self.cached_wheel_source_link is not None
480
+
481
+ # Things valid for sdists
482
+ @property
483
+ def unpacked_source_directory(self) -> str:
484
+ assert self.source_dir, f"No source dir for {self}"
485
+ return os.path.join(
486
+ self.source_dir, self.link and self.link.subdirectory_fragment or ""
487
+ )
488
+
489
+ @property
490
+ def setup_py_path(self) -> str:
491
+ assert self.source_dir, f"No source dir for {self}"
492
+ setup_py = os.path.join(self.unpacked_source_directory, "setup.py")
493
+
494
+ return setup_py
495
+
496
+ @property
497
+ def setup_cfg_path(self) -> str:
498
+ assert self.source_dir, f"No source dir for {self}"
499
+ setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg")
500
+
501
+ return setup_cfg
502
+
503
+ @property
504
+ def pyproject_toml_path(self) -> str:
505
+ assert self.source_dir, f"No source dir for {self}"
506
+ return make_pyproject_path(self.unpacked_source_directory)
507
+
508
+ def load_pyproject_toml(self) -> None:
509
+ """Load the pyproject.toml file.
510
+
511
+ After calling this routine, all of the attributes related to PEP 517
512
+ processing for this requirement have been set. In particular, the
513
+ use_pep517 attribute can be used to determine whether we should
514
+ follow the PEP 517 or legacy (setup.py) code path.
515
+ """
516
+ pyproject_toml_data = load_pyproject_toml(
517
+ self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self)
518
+ )
519
+
520
+ if pyproject_toml_data is None:
521
+ assert not self.config_settings
522
+ self.use_pep517 = False
523
+ return
524
+
525
+ self.use_pep517 = True
526
+ requires, backend, check, backend_path = pyproject_toml_data
527
+ self.requirements_to_check = check
528
+ self.pyproject_requires = requires
529
+ self.pep517_backend = ConfiguredBuildBackendHookCaller(
530
+ self,
531
+ self.unpacked_source_directory,
532
+ backend,
533
+ backend_path=backend_path,
534
+ )
535
+
536
+ def isolated_editable_sanity_check(self) -> None:
537
+ """Check that an editable requirement if valid for use with PEP 517/518.
538
+
539
+ This verifies that an editable that has a pyproject.toml either supports PEP 660
540
+ or as a setup.py or a setup.cfg
541
+ """
542
+ if (
543
+ self.editable
544
+ and self.use_pep517
545
+ and not self.supports_pyproject_editable()
546
+ and not os.path.isfile(self.setup_py_path)
547
+ and not os.path.isfile(self.setup_cfg_path)
548
+ ):
549
+ raise InstallationError(
550
+ f"Project {self} has a 'pyproject.toml' and its build "
551
+ f"backend is missing the 'build_editable' hook. Since it does not "
552
+ f"have a 'setup.py' nor a 'setup.cfg', "
553
+ f"it cannot be installed in editable mode. "
554
+ f"Consider using a build backend that supports PEP 660."
555
+ )
556
+
557
+ def prepare_metadata(self) -> None:
558
+ """Ensure that project metadata is available.
559
+
560
+ Under PEP 517 and PEP 660, call the backend hook to prepare the metadata.
561
+ Under legacy processing, call setup.py egg-info.
562
+ """
563
+ assert self.source_dir, f"No source dir for {self}"
564
+ details = self.name or f"from {self.link}"
565
+
566
+ if self.use_pep517:
567
+ assert self.pep517_backend is not None
568
+ if (
569
+ self.editable
570
+ and self.permit_editable_wheels
571
+ and self.supports_pyproject_editable()
572
+ ):
573
+ self.metadata_directory = generate_editable_metadata(
574
+ build_env=self.build_env,
575
+ backend=self.pep517_backend,
576
+ details=details,
577
+ )
578
+ else:
579
+ self.metadata_directory = generate_metadata(
580
+ build_env=self.build_env,
581
+ backend=self.pep517_backend,
582
+ details=details,
583
+ )
584
+ else:
585
+ self.metadata_directory = generate_metadata_legacy(
586
+ build_env=self.build_env,
587
+ setup_py_path=self.setup_py_path,
588
+ source_dir=self.unpacked_source_directory,
589
+ isolated=self.isolated,
590
+ details=details,
591
+ )
592
+
593
+ # Act on the newly generated metadata, based on the name and version.
594
+ if not self.name:
595
+ self._set_requirement()
596
+ else:
597
+ self.warn_on_mismatching_name()
598
+
599
+ self.assert_source_matches_version()
600
+
601
+ @property
602
+ def metadata(self) -> Any:
603
+ if not hasattr(self, "_metadata"):
604
+ self._metadata = self.get_dist().metadata
605
+
606
+ return self._metadata
607
+
608
+ def get_dist(self) -> BaseDistribution:
609
+ if self.metadata_directory:
610
+ return get_directory_distribution(self.metadata_directory)
611
+ elif self.local_file_path and self.is_wheel:
612
+ assert self.req is not None
613
+ return get_wheel_distribution(
614
+ FilesystemWheel(self.local_file_path),
615
+ canonicalize_name(self.req.name),
616
+ )
617
+ raise AssertionError(
618
+ f"InstallRequirement {self} has no metadata directory and no wheel: "
619
+ f"can't make a distribution."
620
+ )
621
+
622
+ def assert_source_matches_version(self) -> None:
623
+ assert self.source_dir, f"No source dir for {self}"
624
+ version = self.metadata["version"]
625
+ if self.req and self.req.specifier and version not in self.req.specifier:
626
+ logger.warning(
627
+ "Requested %s, but installing version %s",
628
+ self,
629
+ version,
630
+ )
631
+ else:
632
+ logger.debug(
633
+ "Source in %s has version %s, which satisfies requirement %s",
634
+ display_path(self.source_dir),
635
+ version,
636
+ self,
637
+ )
638
+
639
+ # For both source distributions and editables
640
+ def ensure_has_source_dir(
641
+ self,
642
+ parent_dir: str,
643
+ autodelete: bool = False,
644
+ parallel_builds: bool = False,
645
+ ) -> None:
646
+ """Ensure that a source_dir is set.
647
+
648
+ This will create a temporary build dir if the name of the requirement
649
+ isn't known yet.
650
+
651
+ :param parent_dir: The ideal pip parent_dir for the source_dir.
652
+ Generally src_dir for editables and build_dir for sdists.
653
+ :return: self.source_dir
654
+ """
655
+ if self.source_dir is None:
656
+ self.source_dir = self.ensure_build_location(
657
+ parent_dir,
658
+ autodelete=autodelete,
659
+ parallel_builds=parallel_builds,
660
+ )
661
+
662
+ def needs_unpacked_archive(self, archive_source: Path) -> None:
663
+ assert self._archive_source is None
664
+ self._archive_source = archive_source
665
+
666
+ def ensure_pristine_source_checkout(self) -> None:
667
+ """Ensure the source directory has not yet been built in."""
668
+ assert self.source_dir is not None
669
+ if self._archive_source is not None:
670
+ unpack_file(str(self._archive_source), self.source_dir)
671
+ elif is_installable_dir(self.source_dir):
672
+ # If a checkout exists, it's unwise to keep going.
673
+ # version inconsistencies are logged later, but do not fail
674
+ # the installation.
675
+ raise PreviousBuildDirError(
676
+ f"pip can't proceed with requirements '{self}' due to a "
677
+ f"pre-existing build directory ({self.source_dir}). This is likely "
678
+ "due to a previous installation that failed . pip is "
679
+ "being responsible and not assuming it can delete this. "
680
+ "Please delete it and try again."
681
+ )
682
+
683
+ # For editable installations
684
+ def update_editable(self) -> None:
685
+ if not self.link:
686
+ logger.debug(
687
+ "Cannot update repository at %s; repository location is unknown",
688
+ self.source_dir,
689
+ )
690
+ return
691
+ assert self.editable
692
+ assert self.source_dir
693
+ if self.link.scheme == "file":
694
+ # Static paths don't get updated
695
+ return
696
+ vcs_backend = vcs.get_backend_for_scheme(self.link.scheme)
697
+ # Editable requirements are validated in Requirement constructors.
698
+ # So here, if it's neither a path nor a valid VCS URL, it's a bug.
699
+ assert vcs_backend, f"Unsupported VCS URL {self.link.url}"
700
+ hidden_url = hide_url(self.link.url)
701
+ vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0)
702
+
703
+ # Top-level Actions
704
+ def uninstall(
705
+ self, auto_confirm: bool = False, verbose: bool = False
706
+ ) -> Optional[UninstallPathSet]:
707
+ """
708
+ Uninstall the distribution currently satisfying this requirement.
709
+
710
+ Prompts before removing or modifying files unless
711
+ ``auto_confirm`` is True.
712
+
713
+ Refuses to delete or modify files outside of ``sys.prefix`` -
714
+ thus uninstallation within a virtual environment can only
715
+ modify that virtual environment, even if the virtualenv is
716
+ linked to global site-packages.
717
+
718
+ """
719
+ assert self.req
720
+ dist = get_default_environment().get_distribution(self.req.name)
721
+ if not dist:
722
+ logger.warning("Skipping %s as it is not installed.", self.name)
723
+ return None
724
+ logger.info("Found existing installation: %s", dist)
725
+
726
+ uninstalled_pathset = UninstallPathSet.from_dist(dist)
727
+ uninstalled_pathset.remove(auto_confirm, verbose)
728
+ return uninstalled_pathset
729
+
730
+ def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str:
731
+ def _clean_zip_name(name: str, prefix: str) -> str:
732
+ assert name.startswith(
733
+ prefix + os.path.sep
734
+ ), f"name {name!r} doesn't start with prefix {prefix!r}"
735
+ name = name[len(prefix) + 1 :]
736
+ name = name.replace(os.path.sep, "/")
737
+ return name
738
+
739
+ assert self.req is not None
740
+ path = os.path.join(parentdir, path)
741
+ name = _clean_zip_name(path, rootdir)
742
+ return self.req.name + "/" + name
743
+
744
+ def archive(self, build_dir: Optional[str]) -> None:
745
+ """Saves archive to provided build_dir.
746
+
747
+ Used for saving downloaded VCS requirements as part of `pip download`.
748
+ """
749
+ assert self.source_dir
750
+ if build_dir is None:
751
+ return
752
+
753
+ create_archive = True
754
+ archive_name = "{}-{}.zip".format(self.name, self.metadata["version"])
755
+ archive_path = os.path.join(build_dir, archive_name)
756
+
757
+ if os.path.exists(archive_path):
758
+ response = ask_path_exists(
759
+ f"The file {display_path(archive_path)} exists. (i)gnore, (w)ipe, "
760
+ "(b)ackup, (a)bort ",
761
+ ("i", "w", "b", "a"),
762
+ )
763
+ if response == "i":
764
+ create_archive = False
765
+ elif response == "w":
766
+ logger.warning("Deleting %s", display_path(archive_path))
767
+ os.remove(archive_path)
768
+ elif response == "b":
769
+ dest_file = backup_dir(archive_path)
770
+ logger.warning(
771
+ "Backing up %s to %s",
772
+ display_path(archive_path),
773
+ display_path(dest_file),
774
+ )
775
+ shutil.move(archive_path, dest_file)
776
+ elif response == "a":
777
+ sys.exit(-1)
778
+
779
+ if not create_archive:
780
+ return
781
+
782
+ zip_output = zipfile.ZipFile(
783
+ archive_path,
784
+ "w",
785
+ zipfile.ZIP_DEFLATED,
786
+ allowZip64=True,
787
+ )
788
+ with zip_output:
789
+ dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory))
790
+ for dirpath, dirnames, filenames in os.walk(dir):
791
+ for dirname in dirnames:
792
+ dir_arcname = self._get_archive_name(
793
+ dirname,
794
+ parentdir=dirpath,
795
+ rootdir=dir,
796
+ )
797
+ zipdir = zipfile.ZipInfo(dir_arcname + "/")
798
+ zipdir.external_attr = 0x1ED << 16 # 0o755
799
+ zip_output.writestr(zipdir, "")
800
+ for filename in filenames:
801
+ file_arcname = self._get_archive_name(
802
+ filename,
803
+ parentdir=dirpath,
804
+ rootdir=dir,
805
+ )
806
+ filename = os.path.join(dirpath, filename)
807
+ zip_output.write(filename, file_arcname)
808
+
809
+ logger.info("Saved %s", display_path(archive_path))
810
+
811
+ def install(
812
+ self,
813
+ global_options: Optional[Sequence[str]] = None,
814
+ root: Optional[str] = None,
815
+ home: Optional[str] = None,
816
+ prefix: Optional[str] = None,
817
+ warn_script_location: bool = True,
818
+ use_user_site: bool = False,
819
+ pycompile: bool = True,
820
+ ) -> None:
821
+ assert self.req is not None
822
+ scheme = get_scheme(
823
+ self.req.name,
824
+ user=use_user_site,
825
+ home=home,
826
+ root=root,
827
+ isolated=self.isolated,
828
+ prefix=prefix,
829
+ )
830
+
831
+ if self.editable and not self.is_wheel:
832
+ if self.config_settings:
833
+ logger.warning(
834
+ "--config-settings ignored for legacy editable install of %s. "
835
+ "Consider upgrading to a version of setuptools "
836
+ "that supports PEP 660 (>= 64).",
837
+ self,
838
+ )
839
+ install_editable_legacy(
840
+ global_options=global_options if global_options is not None else [],
841
+ prefix=prefix,
842
+ home=home,
843
+ use_user_site=use_user_site,
844
+ name=self.req.name,
845
+ setup_py_path=self.setup_py_path,
846
+ isolated=self.isolated,
847
+ build_env=self.build_env,
848
+ unpacked_source_directory=self.unpacked_source_directory,
849
+ )
850
+ self.install_succeeded = True
851
+ return
852
+
853
+ assert self.is_wheel
854
+ assert self.local_file_path
855
+
856
+ install_wheel(
857
+ self.req.name,
858
+ self.local_file_path,
859
+ scheme=scheme,
860
+ req_description=str(self.req),
861
+ pycompile=pycompile,
862
+ warn_script_location=warn_script_location,
863
+ direct_url=self.download_info if self.is_direct else None,
864
+ requested=self.user_supplied,
865
+ )
866
+ self.install_succeeded = True
867
+
868
+
869
+ def check_invalid_constraint_type(req: InstallRequirement) -> str:
870
+ # Check for unsupported forms
871
+ problem = ""
872
+ if not req.name:
873
+ problem = "Unnamed requirements are not allowed as constraints"
874
+ elif req.editable:
875
+ problem = "Editable requirements are not allowed as constraints"
876
+ elif req.extras:
877
+ problem = "Constraints cannot have extras"
878
+
879
+ if problem:
880
+ deprecated(
881
+ reason=(
882
+ "Constraints are only allowed to take the form of a package "
883
+ "name and a version specifier. Other forms were originally "
884
+ "permitted as an accident of the implementation, but were "
885
+ "undocumented. The new implementation of the resolver no "
886
+ "longer supports these forms."
887
+ ),
888
+ replacement="replacing the constraint with a requirement",
889
+ # No plan yet for when the new resolver becomes default
890
+ gone_in=None,
891
+ issue=8210,
892
+ )
893
+
894
+ return problem
895
+
896
+
897
+ def _has_option(options: Values, reqs: List[InstallRequirement], option: str) -> bool:
898
+ if getattr(options, option, None):
899
+ return True
900
+ for req in reqs:
901
+ if getattr(req, option, None):
902
+ return True
903
+ return False
904
+
905
+
906
+ def check_legacy_setup_py_options(
907
+ options: Values,
908
+ reqs: List[InstallRequirement],
909
+ ) -> None:
910
+ has_build_options = _has_option(options, reqs, "build_options")
911
+ has_global_options = _has_option(options, reqs, "global_options")
912
+ if has_build_options or has_global_options:
913
+ deprecated(
914
+ reason="--build-option and --global-option are deprecated.",
915
+ issue=11859,
916
+ replacement="to use --config-settings",
917
+ gone_in="24.2",
918
+ )
919
+ logger.warning(
920
+ "Implying --no-binary=:all: due to the presence of "
921
+ "--build-option / --global-option. "
922
+ )
923
+ options.format_control.disallow_binaries()
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/req/req_set.py ADDED
@@ -0,0 +1,119 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from collections import OrderedDict
3
+ from typing import Dict, List
4
+
5
+ from pip._vendor.packaging.specifiers import LegacySpecifier
6
+ from pip._vendor.packaging.utils import canonicalize_name
7
+ from pip._vendor.packaging.version import LegacyVersion
8
+
9
+ from pip._internal.req.req_install import InstallRequirement
10
+ from pip._internal.utils.deprecation import deprecated
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ class RequirementSet:
16
+ def __init__(self, check_supported_wheels: bool = True) -> None:
17
+ """Create a RequirementSet."""
18
+
19
+ self.requirements: Dict[str, InstallRequirement] = OrderedDict()
20
+ self.check_supported_wheels = check_supported_wheels
21
+
22
+ self.unnamed_requirements: List[InstallRequirement] = []
23
+
24
+ def __str__(self) -> str:
25
+ requirements = sorted(
26
+ (req for req in self.requirements.values() if not req.comes_from),
27
+ key=lambda req: canonicalize_name(req.name or ""),
28
+ )
29
+ return " ".join(str(req.req) for req in requirements)
30
+
31
+ def __repr__(self) -> str:
32
+ requirements = sorted(
33
+ self.requirements.values(),
34
+ key=lambda req: canonicalize_name(req.name or ""),
35
+ )
36
+
37
+ format_string = "<{classname} object; {count} requirement(s): {reqs}>"
38
+ return format_string.format(
39
+ classname=self.__class__.__name__,
40
+ count=len(requirements),
41
+ reqs=", ".join(str(req.req) for req in requirements),
42
+ )
43
+
44
+ def add_unnamed_requirement(self, install_req: InstallRequirement) -> None:
45
+ assert not install_req.name
46
+ self.unnamed_requirements.append(install_req)
47
+
48
+ def add_named_requirement(self, install_req: InstallRequirement) -> None:
49
+ assert install_req.name
50
+
51
+ project_name = canonicalize_name(install_req.name)
52
+ self.requirements[project_name] = install_req
53
+
54
+ def has_requirement(self, name: str) -> bool:
55
+ project_name = canonicalize_name(name)
56
+
57
+ return (
58
+ project_name in self.requirements
59
+ and not self.requirements[project_name].constraint
60
+ )
61
+
62
+ def get_requirement(self, name: str) -> InstallRequirement:
63
+ project_name = canonicalize_name(name)
64
+
65
+ if project_name in self.requirements:
66
+ return self.requirements[project_name]
67
+
68
+ raise KeyError(f"No project with the name {name!r}")
69
+
70
+ @property
71
+ def all_requirements(self) -> List[InstallRequirement]:
72
+ return self.unnamed_requirements + list(self.requirements.values())
73
+
74
+ @property
75
+ def requirements_to_install(self) -> List[InstallRequirement]:
76
+ """Return the list of requirements that need to be installed.
77
+
78
+ TODO remove this property together with the legacy resolver, since the new
79
+ resolver only returns requirements that need to be installed.
80
+ """
81
+ return [
82
+ install_req
83
+ for install_req in self.all_requirements
84
+ if not install_req.constraint and not install_req.satisfied_by
85
+ ]
86
+
87
+ def warn_legacy_versions_and_specifiers(self) -> None:
88
+ for req in self.requirements_to_install:
89
+ version = req.get_dist().version
90
+ if isinstance(version, LegacyVersion):
91
+ deprecated(
92
+ reason=(
93
+ f"pip has selected the non standard version {version} "
94
+ f"of {req}. In the future this version will be "
95
+ f"ignored as it isn't standard compliant."
96
+ ),
97
+ replacement=(
98
+ "set or update constraints to select another version "
99
+ "or contact the package author to fix the version number"
100
+ ),
101
+ issue=12063,
102
+ gone_in="24.1",
103
+ )
104
+ for dep in req.get_dist().iter_dependencies():
105
+ if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier):
106
+ deprecated(
107
+ reason=(
108
+ f"pip has selected {req} {version} which has non "
109
+ f"standard dependency specifier {dep}. "
110
+ f"In the future this version of {req} will be "
111
+ f"ignored as it isn't standard compliant."
112
+ ),
113
+ replacement=(
114
+ "set or update constraints to select another version "
115
+ "or contact the package author to fix the version number"
116
+ ),
117
+ issue=12063,
118
+ gone_in="24.1",
119
+ )
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-38.pyc ADDED
Binary file (162 Bytes). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-38.pyc ADDED
Binary file (14.8 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-38.pyc ADDED
Binary file (166 Bytes). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-38.pyc ADDED
Binary file (6.39 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-38.pyc ADDED
Binary file (20.6 kB). View file