ZTWHHH commited on
Commit
89c9c82
·
verified ·
1 Parent(s): dae1be3

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. mantis_evalkit/lib/python3.10/site-packages/decord.libs/libavfilter-1e2243e2.so.7.40.101 +3 -0
  3. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/__pycache__/__init__.cpython-310.pyc +0 -0
  4. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/__pycache__/build_env.cpython-310.pyc +0 -0
  5. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/__pycache__/cache.cpython-310.pyc +0 -0
  6. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/__pycache__/configuration.cpython-310.pyc +0 -0
  7. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/__pycache__/exceptions.cpython-310.pyc +0 -0
  8. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/__pycache__/main.cpython-310.pyc +0 -0
  9. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/__pycache__/pyproject.cpython-310.pyc +0 -0
  10. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-310.pyc +0 -0
  11. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-310.pyc +0 -0
  12. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__init__.py +132 -0
  13. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-310.pyc +0 -0
  14. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/check.cpython-310.pyc +0 -0
  15. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/completion.cpython-310.pyc +0 -0
  16. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-310.pyc +0 -0
  17. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/debug.cpython-310.pyc +0 -0
  18. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/download.cpython-310.pyc +0 -0
  19. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-310.pyc +0 -0
  20. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/hash.cpython-310.pyc +0 -0
  21. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/help.cpython-310.pyc +0 -0
  22. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/index.cpython-310.pyc +0 -0
  23. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/install.cpython-310.pyc +0 -0
  24. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/search.cpython-310.pyc +0 -0
  25. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/show.cpython-310.pyc +0 -0
  26. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-310.pyc +0 -0
  27. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-310.pyc +0 -0
  28. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/cache.py +228 -0
  29. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/check.py +67 -0
  30. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/configuration.py +280 -0
  31. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/freeze.py +109 -0
  32. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/hash.py +59 -0
  33. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/help.py +41 -0
  34. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/inspect.py +92 -0
  35. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/install.py +784 -0
  36. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/search.py +172 -0
  37. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/show.py +224 -0
  38. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/uninstall.py +114 -0
  39. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/index/__init__.py +2 -0
  40. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/index/__pycache__/__init__.cpython-310.pyc +0 -0
  41. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/index/__pycache__/collector.cpython-310.pyc +0 -0
  42. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-310.pyc +0 -0
  43. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/index/__pycache__/sources.cpython-310.pyc +0 -0
  44. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/index/collector.py +494 -0
  45. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/index/package_finder.py +1029 -0
  46. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/index/sources.py +284 -0
  47. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/network/__init__.py +2 -0
  48. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/network/__pycache__/__init__.cpython-310.pyc +0 -0
  49. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/network/__pycache__/auth.cpython-310.pyc +0 -0
  50. mantis_evalkit/lib/python3.10/site-packages/pip/_internal/network/__pycache__/cache.cpython-310.pyc +0 -0
.gitattributes CHANGED
@@ -520,3 +520,4 @@ mantis_evalkit/lib/python3.10/site-packages/scipy.libs/libquadmath-96973f99.so.0
520
  mantis_evalkit/lib/python3.10/site-packages/kiwisolver/_cext.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
521
  moondream/lib/python3.10/site-packages/torch/__pycache__/_meta_registrations.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
522
  parrot/lib/python3.10/site-packages/nvidia/cuda_cupti/lib/libnvperf_target.so filter=lfs diff=lfs merge=lfs -text
 
 
520
  mantis_evalkit/lib/python3.10/site-packages/kiwisolver/_cext.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
521
  moondream/lib/python3.10/site-packages/torch/__pycache__/_meta_registrations.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
522
  parrot/lib/python3.10/site-packages/nvidia/cuda_cupti/lib/libnvperf_target.so filter=lfs diff=lfs merge=lfs -text
523
+ mantis_evalkit/lib/python3.10/site-packages/decord.libs/libavfilter-1e2243e2.so.7.40.101 filter=lfs diff=lfs merge=lfs -text
mantis_evalkit/lib/python3.10/site-packages/decord.libs/libavfilter-1e2243e2.so.7.40.101 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:79b37a526b50d6ebcd2255983198276718c29c0942d1fde96306e413041e01cb
3
+ size 3075448
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (681 Bytes). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/__pycache__/build_env.cpython-310.pyc ADDED
Binary file (9.88 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/__pycache__/cache.cpython-310.pyc ADDED
Binary file (9.03 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/__pycache__/configuration.cpython-310.pyc ADDED
Binary file (11.6 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/__pycache__/exceptions.cpython-310.pyc ADDED
Binary file (28.1 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/__pycache__/main.cpython-310.pyc ADDED
Binary file (605 Bytes). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/__pycache__/pyproject.cpython-310.pyc ADDED
Binary file (3.75 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-310.pyc ADDED
Binary file (6.85 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-310.pyc ADDED
Binary file (8.63 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__init__.py ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Package containing all pip commands
3
+ """
4
+
5
+ import importlib
6
+ from collections import namedtuple
7
+ from typing import Any, Dict, Optional
8
+
9
+ from pip._internal.cli.base_command import Command
10
+
11
+ CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary")
12
+
13
+ # This dictionary does a bunch of heavy lifting for help output:
14
+ # - Enables avoiding additional (costly) imports for presenting `--help`.
15
+ # - The ordering matters for help display.
16
+ #
17
+ # Even though the module path starts with the same "pip._internal.commands"
18
+ # prefix, the full path makes testing easier (specifically when modifying
19
+ # `commands_dict` in test setup / teardown).
20
+ commands_dict: Dict[str, CommandInfo] = {
21
+ "install": CommandInfo(
22
+ "pip._internal.commands.install",
23
+ "InstallCommand",
24
+ "Install packages.",
25
+ ),
26
+ "download": CommandInfo(
27
+ "pip._internal.commands.download",
28
+ "DownloadCommand",
29
+ "Download packages.",
30
+ ),
31
+ "uninstall": CommandInfo(
32
+ "pip._internal.commands.uninstall",
33
+ "UninstallCommand",
34
+ "Uninstall packages.",
35
+ ),
36
+ "freeze": CommandInfo(
37
+ "pip._internal.commands.freeze",
38
+ "FreezeCommand",
39
+ "Output installed packages in requirements format.",
40
+ ),
41
+ "inspect": CommandInfo(
42
+ "pip._internal.commands.inspect",
43
+ "InspectCommand",
44
+ "Inspect the python environment.",
45
+ ),
46
+ "list": CommandInfo(
47
+ "pip._internal.commands.list",
48
+ "ListCommand",
49
+ "List installed packages.",
50
+ ),
51
+ "show": CommandInfo(
52
+ "pip._internal.commands.show",
53
+ "ShowCommand",
54
+ "Show information about installed packages.",
55
+ ),
56
+ "check": CommandInfo(
57
+ "pip._internal.commands.check",
58
+ "CheckCommand",
59
+ "Verify installed packages have compatible dependencies.",
60
+ ),
61
+ "config": CommandInfo(
62
+ "pip._internal.commands.configuration",
63
+ "ConfigurationCommand",
64
+ "Manage local and global configuration.",
65
+ ),
66
+ "search": CommandInfo(
67
+ "pip._internal.commands.search",
68
+ "SearchCommand",
69
+ "Search PyPI for packages.",
70
+ ),
71
+ "cache": CommandInfo(
72
+ "pip._internal.commands.cache",
73
+ "CacheCommand",
74
+ "Inspect and manage pip's wheel cache.",
75
+ ),
76
+ "index": CommandInfo(
77
+ "pip._internal.commands.index",
78
+ "IndexCommand",
79
+ "Inspect information available from package indexes.",
80
+ ),
81
+ "wheel": CommandInfo(
82
+ "pip._internal.commands.wheel",
83
+ "WheelCommand",
84
+ "Build wheels from your requirements.",
85
+ ),
86
+ "hash": CommandInfo(
87
+ "pip._internal.commands.hash",
88
+ "HashCommand",
89
+ "Compute hashes of package archives.",
90
+ ),
91
+ "completion": CommandInfo(
92
+ "pip._internal.commands.completion",
93
+ "CompletionCommand",
94
+ "A helper command used for command completion.",
95
+ ),
96
+ "debug": CommandInfo(
97
+ "pip._internal.commands.debug",
98
+ "DebugCommand",
99
+ "Show information useful for debugging.",
100
+ ),
101
+ "help": CommandInfo(
102
+ "pip._internal.commands.help",
103
+ "HelpCommand",
104
+ "Show help for commands.",
105
+ ),
106
+ }
107
+
108
+
109
+ def create_command(name: str, **kwargs: Any) -> Command:
110
+ """
111
+ Create an instance of the Command class with the given name.
112
+ """
113
+ module_path, class_name, summary = commands_dict[name]
114
+ module = importlib.import_module(module_path)
115
+ command_class = getattr(module, class_name)
116
+ command = command_class(name=name, summary=summary, **kwargs)
117
+
118
+ return command
119
+
120
+
121
+ def get_similar_commands(name: str) -> Optional[str]:
122
+ """Command name auto-correct."""
123
+ from difflib import get_close_matches
124
+
125
+ name = name.lower()
126
+
127
+ close_commands = get_close_matches(name, commands_dict.keys())
128
+
129
+ if close_commands:
130
+ return close_commands[0]
131
+ else:
132
+ return None
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (3.24 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/check.cpython-310.pyc ADDED
Binary file (1.96 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/completion.cpython-310.pyc ADDED
Binary file (4.31 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-310.pyc ADDED
Binary file (8.92 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/debug.cpython-310.pyc ADDED
Binary file (6.87 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/download.cpython-310.pyc ADDED
Binary file (4.19 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-310.pyc ADDED
Binary file (2.99 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/hash.cpython-310.pyc ADDED
Binary file (2.14 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/help.cpython-310.pyc ADDED
Binary file (1.3 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/index.cpython-310.pyc ADDED
Binary file (4.53 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/install.cpython-310.pyc ADDED
Binary file (17.9 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/search.cpython-310.pyc ADDED
Binary file (5.3 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/show.cpython-310.pyc ADDED
Binary file (7.15 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-310.pyc ADDED
Binary file (3.32 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-310.pyc ADDED
Binary file (4.92 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/cache.py ADDED
@@ -0,0 +1,228 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import textwrap
3
+ from optparse import Values
4
+ from typing import Any, List
5
+
6
+ from pip._internal.cli.base_command import Command
7
+ from pip._internal.cli.status_codes import ERROR, SUCCESS
8
+ from pip._internal.exceptions import CommandError, PipError
9
+ from pip._internal.utils import filesystem
10
+ from pip._internal.utils.logging import getLogger
11
+ from pip._internal.utils.misc import format_size
12
+
13
+ logger = getLogger(__name__)
14
+
15
+
16
+ class CacheCommand(Command):
17
+ """
18
+ Inspect and manage pip's wheel cache.
19
+
20
+ Subcommands:
21
+
22
+ - dir: Show the cache directory.
23
+ - info: Show information about the cache.
24
+ - list: List filenames of packages stored in the cache.
25
+ - remove: Remove one or more package from the cache.
26
+ - purge: Remove all items from the cache.
27
+
28
+ ``<pattern>`` can be a glob expression or a package name.
29
+ """
30
+
31
+ ignore_require_venv = True
32
+ usage = """
33
+ %prog dir
34
+ %prog info
35
+ %prog list [<pattern>] [--format=[human, abspath]]
36
+ %prog remove <pattern>
37
+ %prog purge
38
+ """
39
+
40
+ def add_options(self) -> None:
41
+ self.cmd_opts.add_option(
42
+ "--format",
43
+ action="store",
44
+ dest="list_format",
45
+ default="human",
46
+ choices=("human", "abspath"),
47
+ help="Select the output format among: human (default) or abspath",
48
+ )
49
+
50
+ self.parser.insert_option_group(0, self.cmd_opts)
51
+
52
+ def run(self, options: Values, args: List[str]) -> int:
53
+ handlers = {
54
+ "dir": self.get_cache_dir,
55
+ "info": self.get_cache_info,
56
+ "list": self.list_cache_items,
57
+ "remove": self.remove_cache_items,
58
+ "purge": self.purge_cache,
59
+ }
60
+
61
+ if not options.cache_dir:
62
+ logger.error("pip cache commands can not function since cache is disabled.")
63
+ return ERROR
64
+
65
+ # Determine action
66
+ if not args or args[0] not in handlers:
67
+ logger.error(
68
+ "Need an action (%s) to perform.",
69
+ ", ".join(sorted(handlers)),
70
+ )
71
+ return ERROR
72
+
73
+ action = args[0]
74
+
75
+ # Error handling happens here, not in the action-handlers.
76
+ try:
77
+ handlers[action](options, args[1:])
78
+ except PipError as e:
79
+ logger.error(e.args[0])
80
+ return ERROR
81
+
82
+ return SUCCESS
83
+
84
+ def get_cache_dir(self, options: Values, args: List[Any]) -> None:
85
+ if args:
86
+ raise CommandError("Too many arguments")
87
+
88
+ logger.info(options.cache_dir)
89
+
90
+ def get_cache_info(self, options: Values, args: List[Any]) -> None:
91
+ if args:
92
+ raise CommandError("Too many arguments")
93
+
94
+ num_http_files = len(self._find_http_files(options))
95
+ num_packages = len(self._find_wheels(options, "*"))
96
+
97
+ http_cache_location = self._cache_dir(options, "http-v2")
98
+ old_http_cache_location = self._cache_dir(options, "http")
99
+ wheels_cache_location = self._cache_dir(options, "wheels")
100
+ http_cache_size = filesystem.format_size(
101
+ filesystem.directory_size(http_cache_location)
102
+ + filesystem.directory_size(old_http_cache_location)
103
+ )
104
+ wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)
105
+
106
+ message = (
107
+ textwrap.dedent(
108
+ """
109
+ Package index page cache location (pip v23.3+): {http_cache_location}
110
+ Package index page cache location (older pips): {old_http_cache_location}
111
+ Package index page cache size: {http_cache_size}
112
+ Number of HTTP files: {num_http_files}
113
+ Locally built wheels location: {wheels_cache_location}
114
+ Locally built wheels size: {wheels_cache_size}
115
+ Number of locally built wheels: {package_count}
116
+ """ # noqa: E501
117
+ )
118
+ .format(
119
+ http_cache_location=http_cache_location,
120
+ old_http_cache_location=old_http_cache_location,
121
+ http_cache_size=http_cache_size,
122
+ num_http_files=num_http_files,
123
+ wheels_cache_location=wheels_cache_location,
124
+ package_count=num_packages,
125
+ wheels_cache_size=wheels_cache_size,
126
+ )
127
+ .strip()
128
+ )
129
+
130
+ logger.info(message)
131
+
132
+ def list_cache_items(self, options: Values, args: List[Any]) -> None:
133
+ if len(args) > 1:
134
+ raise CommandError("Too many arguments")
135
+
136
+ if args:
137
+ pattern = args[0]
138
+ else:
139
+ pattern = "*"
140
+
141
+ files = self._find_wheels(options, pattern)
142
+ if options.list_format == "human":
143
+ self.format_for_human(files)
144
+ else:
145
+ self.format_for_abspath(files)
146
+
147
+ def format_for_human(self, files: List[str]) -> None:
148
+ if not files:
149
+ logger.info("No locally built wheels cached.")
150
+ return
151
+
152
+ results = []
153
+ for filename in files:
154
+ wheel = os.path.basename(filename)
155
+ size = filesystem.format_file_size(filename)
156
+ results.append(f" - {wheel} ({size})")
157
+ logger.info("Cache contents:\n")
158
+ logger.info("\n".join(sorted(results)))
159
+
160
+ def format_for_abspath(self, files: List[str]) -> None:
161
+ if files:
162
+ logger.info("\n".join(sorted(files)))
163
+
164
+ def remove_cache_items(self, options: Values, args: List[Any]) -> None:
165
+ if len(args) > 1:
166
+ raise CommandError("Too many arguments")
167
+
168
+ if not args:
169
+ raise CommandError("Please provide a pattern")
170
+
171
+ files = self._find_wheels(options, args[0])
172
+
173
+ no_matching_msg = "No matching packages"
174
+ if args[0] == "*":
175
+ # Only fetch http files if no specific pattern given
176
+ files += self._find_http_files(options)
177
+ else:
178
+ # Add the pattern to the log message
179
+ no_matching_msg += f' for pattern "{args[0]}"'
180
+
181
+ if not files:
182
+ logger.warning(no_matching_msg)
183
+
184
+ bytes_removed = 0
185
+ for filename in files:
186
+ bytes_removed += os.stat(filename).st_size
187
+ os.unlink(filename)
188
+ logger.verbose("Removed %s", filename)
189
+ logger.info("Files removed: %s (%s)", len(files), format_size(bytes_removed))
190
+
191
+ def purge_cache(self, options: Values, args: List[Any]) -> None:
192
+ if args:
193
+ raise CommandError("Too many arguments")
194
+
195
+ return self.remove_cache_items(options, ["*"])
196
+
197
+ def _cache_dir(self, options: Values, subdir: str) -> str:
198
+ return os.path.join(options.cache_dir, subdir)
199
+
200
+ def _find_http_files(self, options: Values) -> List[str]:
201
+ old_http_dir = self._cache_dir(options, "http")
202
+ new_http_dir = self._cache_dir(options, "http-v2")
203
+ return filesystem.find_files(old_http_dir, "*") + filesystem.find_files(
204
+ new_http_dir, "*"
205
+ )
206
+
207
+ def _find_wheels(self, options: Values, pattern: str) -> List[str]:
208
+ wheel_dir = self._cache_dir(options, "wheels")
209
+
210
+ # The wheel filename format, as specified in PEP 427, is:
211
+ # {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl
212
+ #
213
+ # Additionally, non-alphanumeric values in the distribution are
214
+ # normalized to underscores (_), meaning hyphens can never occur
215
+ # before `-{version}`.
216
+ #
217
+ # Given that information:
218
+ # - If the pattern we're given contains a hyphen (-), the user is
219
+ # providing at least the version. Thus, we can just append `*.whl`
220
+ # to match the rest of it.
221
+ # - If the pattern we're given doesn't contain a hyphen (-), the
222
+ # user is only providing the name. Thus, we append `-*.whl` to
223
+ # match the hyphen before the version, followed by anything else.
224
+ #
225
+ # PEP 427: https://www.python.org/dev/peps/pep-0427/
226
+ pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl")
227
+
228
+ return filesystem.find_files(wheel_dir, pattern)
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/check.py ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from optparse import Values
3
+ from typing import List
4
+
5
+ from pip._internal.cli.base_command import Command
6
+ from pip._internal.cli.status_codes import ERROR, SUCCESS
7
+ from pip._internal.metadata import get_default_environment
8
+ from pip._internal.operations.check import (
9
+ check_package_set,
10
+ check_unsupported,
11
+ create_package_set_from_installed,
12
+ )
13
+ from pip._internal.utils.compatibility_tags import get_supported
14
+ from pip._internal.utils.misc import write_output
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ class CheckCommand(Command):
20
+ """Verify installed packages have compatible dependencies."""
21
+
22
+ ignore_require_venv = True
23
+ usage = """
24
+ %prog [options]"""
25
+
26
+ def run(self, options: Values, args: List[str]) -> int:
27
+ package_set, parsing_probs = create_package_set_from_installed()
28
+ missing, conflicting = check_package_set(package_set)
29
+ unsupported = list(
30
+ check_unsupported(
31
+ get_default_environment().iter_installed_distributions(),
32
+ get_supported(),
33
+ )
34
+ )
35
+
36
+ for project_name in missing:
37
+ version = package_set[project_name].version
38
+ for dependency in missing[project_name]:
39
+ write_output(
40
+ "%s %s requires %s, which is not installed.",
41
+ project_name,
42
+ version,
43
+ dependency[0],
44
+ )
45
+
46
+ for project_name in conflicting:
47
+ version = package_set[project_name].version
48
+ for dep_name, dep_version, req in conflicting[project_name]:
49
+ write_output(
50
+ "%s %s has requirement %s, but you have %s %s.",
51
+ project_name,
52
+ version,
53
+ req,
54
+ dep_name,
55
+ dep_version,
56
+ )
57
+ for package in unsupported:
58
+ write_output(
59
+ "%s %s is not supported on this platform",
60
+ package.raw_name,
61
+ package.version,
62
+ )
63
+ if missing or conflicting or parsing_probs or unsupported:
64
+ return ERROR
65
+ else:
66
+ write_output("No broken requirements found.")
67
+ return SUCCESS
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/configuration.py ADDED
@@ -0,0 +1,280 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os
3
+ import subprocess
4
+ from optparse import Values
5
+ from typing import Any, List, Optional
6
+
7
+ from pip._internal.cli.base_command import Command
8
+ from pip._internal.cli.status_codes import ERROR, SUCCESS
9
+ from pip._internal.configuration import (
10
+ Configuration,
11
+ Kind,
12
+ get_configuration_files,
13
+ kinds,
14
+ )
15
+ from pip._internal.exceptions import PipError
16
+ from pip._internal.utils.logging import indent_log
17
+ from pip._internal.utils.misc import get_prog, write_output
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+
22
+ class ConfigurationCommand(Command):
23
+ """
24
+ Manage local and global configuration.
25
+
26
+ Subcommands:
27
+
28
+ - list: List the active configuration (or from the file specified)
29
+ - edit: Edit the configuration file in an editor
30
+ - get: Get the value associated with command.option
31
+ - set: Set the command.option=value
32
+ - unset: Unset the value associated with command.option
33
+ - debug: List the configuration files and values defined under them
34
+
35
+ Configuration keys should be dot separated command and option name,
36
+ with the special prefix "global" affecting any command. For example,
37
+ "pip config set global.index-url https://example.org/" would configure
38
+ the index url for all commands, but "pip config set download.timeout 10"
39
+ would configure a 10 second timeout only for "pip download" commands.
40
+
41
+ If none of --user, --global and --site are passed, a virtual
42
+ environment configuration file is used if one is active and the file
43
+ exists. Otherwise, all modifications happen to the user file by
44
+ default.
45
+ """
46
+
47
+ ignore_require_venv = True
48
+ usage = """
49
+ %prog [<file-option>] list
50
+ %prog [<file-option>] [--editor <editor-path>] edit
51
+
52
+ %prog [<file-option>] get command.option
53
+ %prog [<file-option>] set command.option value
54
+ %prog [<file-option>] unset command.option
55
+ %prog [<file-option>] debug
56
+ """
57
+
58
+ def add_options(self) -> None:
59
+ self.cmd_opts.add_option(
60
+ "--editor",
61
+ dest="editor",
62
+ action="store",
63
+ default=None,
64
+ help=(
65
+ "Editor to use to edit the file. Uses VISUAL or EDITOR "
66
+ "environment variables if not provided."
67
+ ),
68
+ )
69
+
70
+ self.cmd_opts.add_option(
71
+ "--global",
72
+ dest="global_file",
73
+ action="store_true",
74
+ default=False,
75
+ help="Use the system-wide configuration file only",
76
+ )
77
+
78
+ self.cmd_opts.add_option(
79
+ "--user",
80
+ dest="user_file",
81
+ action="store_true",
82
+ default=False,
83
+ help="Use the user configuration file only",
84
+ )
85
+
86
+ self.cmd_opts.add_option(
87
+ "--site",
88
+ dest="site_file",
89
+ action="store_true",
90
+ default=False,
91
+ help="Use the current environment configuration file only",
92
+ )
93
+
94
+ self.parser.insert_option_group(0, self.cmd_opts)
95
+
96
+ def run(self, options: Values, args: List[str]) -> int:
97
+ handlers = {
98
+ "list": self.list_values,
99
+ "edit": self.open_in_editor,
100
+ "get": self.get_name,
101
+ "set": self.set_name_value,
102
+ "unset": self.unset_name,
103
+ "debug": self.list_config_values,
104
+ }
105
+
106
+ # Determine action
107
+ if not args or args[0] not in handlers:
108
+ logger.error(
109
+ "Need an action (%s) to perform.",
110
+ ", ".join(sorted(handlers)),
111
+ )
112
+ return ERROR
113
+
114
+ action = args[0]
115
+
116
+ # Determine which configuration files are to be loaded
117
+ # Depends on whether the command is modifying.
118
+ try:
119
+ load_only = self._determine_file(
120
+ options, need_value=(action in ["get", "set", "unset", "edit"])
121
+ )
122
+ except PipError as e:
123
+ logger.error(e.args[0])
124
+ return ERROR
125
+
126
+ # Load a new configuration
127
+ self.configuration = Configuration(
128
+ isolated=options.isolated_mode, load_only=load_only
129
+ )
130
+ self.configuration.load()
131
+
132
+ # Error handling happens here, not in the action-handlers.
133
+ try:
134
+ handlers[action](options, args[1:])
135
+ except PipError as e:
136
+ logger.error(e.args[0])
137
+ return ERROR
138
+
139
+ return SUCCESS
140
+
141
+ def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]:
142
+ file_options = [
143
+ key
144
+ for key, value in (
145
+ (kinds.USER, options.user_file),
146
+ (kinds.GLOBAL, options.global_file),
147
+ (kinds.SITE, options.site_file),
148
+ )
149
+ if value
150
+ ]
151
+
152
+ if not file_options:
153
+ if not need_value:
154
+ return None
155
+ # Default to user, unless there's a site file.
156
+ elif any(
157
+ os.path.exists(site_config_file)
158
+ for site_config_file in get_configuration_files()[kinds.SITE]
159
+ ):
160
+ return kinds.SITE
161
+ else:
162
+ return kinds.USER
163
+ elif len(file_options) == 1:
164
+ return file_options[0]
165
+
166
+ raise PipError(
167
+ "Need exactly one file to operate upon "
168
+ "(--user, --site, --global) to perform."
169
+ )
170
+
171
+ def list_values(self, options: Values, args: List[str]) -> None:
172
+ self._get_n_args(args, "list", n=0)
173
+
174
+ for key, value in sorted(self.configuration.items()):
175
+ write_output("%s=%r", key, value)
176
+
177
+ def get_name(self, options: Values, args: List[str]) -> None:
178
+ key = self._get_n_args(args, "get [name]", n=1)
179
+ value = self.configuration.get_value(key)
180
+
181
+ write_output("%s", value)
182
+
183
+ def set_name_value(self, options: Values, args: List[str]) -> None:
184
+ key, value = self._get_n_args(args, "set [name] [value]", n=2)
185
+ self.configuration.set_value(key, value)
186
+
187
+ self._save_configuration()
188
+
189
+ def unset_name(self, options: Values, args: List[str]) -> None:
190
+ key = self._get_n_args(args, "unset [name]", n=1)
191
+ self.configuration.unset_value(key)
192
+
193
+ self._save_configuration()
194
+
195
+ def list_config_values(self, options: Values, args: List[str]) -> None:
196
+ """List config key-value pairs across different config files"""
197
+ self._get_n_args(args, "debug", n=0)
198
+
199
+ self.print_env_var_values()
200
+ # Iterate over config files and print if they exist, and the
201
+ # key-value pairs present in them if they do
202
+ for variant, files in sorted(self.configuration.iter_config_files()):
203
+ write_output("%s:", variant)
204
+ for fname in files:
205
+ with indent_log():
206
+ file_exists = os.path.exists(fname)
207
+ write_output("%s, exists: %r", fname, file_exists)
208
+ if file_exists:
209
+ self.print_config_file_values(variant)
210
+
211
+ def print_config_file_values(self, variant: Kind) -> None:
212
+ """Get key-value pairs from the file of a variant"""
213
+ for name, value in self.configuration.get_values_in_config(variant).items():
214
+ with indent_log():
215
+ write_output("%s: %s", name, value)
216
+
217
+ def print_env_var_values(self) -> None:
218
+ """Get key-values pairs present as environment variables"""
219
+ write_output("%s:", "env_var")
220
+ with indent_log():
221
+ for key, value in sorted(self.configuration.get_environ_vars()):
222
+ env_var = f"PIP_{key.upper()}"
223
+ write_output("%s=%r", env_var, value)
224
+
225
+ def open_in_editor(self, options: Values, args: List[str]) -> None:
226
+ editor = self._determine_editor(options)
227
+
228
+ fname = self.configuration.get_file_to_edit()
229
+ if fname is None:
230
+ raise PipError("Could not determine appropriate file.")
231
+ elif '"' in fname:
232
+ # This shouldn't happen, unless we see a username like that.
233
+ # If that happens, we'd appreciate a pull request fixing this.
234
+ raise PipError(
235
+ f'Can not open an editor for a file name containing "\n{fname}'
236
+ )
237
+
238
+ try:
239
+ subprocess.check_call(f'{editor} "{fname}"', shell=True)
240
+ except FileNotFoundError as e:
241
+ if not e.filename:
242
+ e.filename = editor
243
+ raise
244
+ except subprocess.CalledProcessError as e:
245
+ raise PipError(f"Editor Subprocess exited with exit code {e.returncode}")
246
+
247
+ def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
248
+ """Helper to make sure the command got the right number of arguments"""
249
+ if len(args) != n:
250
+ msg = (
251
+ f"Got unexpected number of arguments, expected {n}. "
252
+ f'(example: "{get_prog()} config {example}")'
253
+ )
254
+ raise PipError(msg)
255
+
256
+ if n == 1:
257
+ return args[0]
258
+ else:
259
+ return args
260
+
261
+ def _save_configuration(self) -> None:
262
+ # We successfully ran a modifying command. Need to save the
263
+ # configuration.
264
+ try:
265
+ self.configuration.save()
266
+ except Exception:
267
+ logger.exception(
268
+ "Unable to save configuration. Please report this as a bug."
269
+ )
270
+ raise PipError("Internal Error.")
271
+
272
+ def _determine_editor(self, options: Values) -> str:
273
+ if options.editor is not None:
274
+ return options.editor
275
+ elif "VISUAL" in os.environ:
276
+ return os.environ["VISUAL"]
277
+ elif "EDITOR" in os.environ:
278
+ return os.environ["EDITOR"]
279
+ else:
280
+ raise PipError("Could not determine editor to use.")
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/freeze.py ADDED
@@ -0,0 +1,109 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ from optparse import Values
3
+ from typing import AbstractSet, List
4
+
5
+ from pip._internal.cli import cmdoptions
6
+ from pip._internal.cli.base_command import Command
7
+ from pip._internal.cli.status_codes import SUCCESS
8
+ from pip._internal.operations.freeze import freeze
9
+ from pip._internal.utils.compat import stdlib_pkgs
10
+
11
+
12
+ def _should_suppress_build_backends() -> bool:
13
+ return sys.version_info < (3, 12)
14
+
15
+
16
+ def _dev_pkgs() -> AbstractSet[str]:
17
+ pkgs = {"pip"}
18
+
19
+ if _should_suppress_build_backends():
20
+ pkgs |= {"setuptools", "distribute", "wheel"}
21
+
22
+ return pkgs
23
+
24
+
25
+ class FreezeCommand(Command):
26
+ """
27
+ Output installed packages in requirements format.
28
+
29
+ packages are listed in a case-insensitive sorted order.
30
+ """
31
+
32
+ ignore_require_venv = True
33
+ usage = """
34
+ %prog [options]"""
35
+ log_streams = ("ext://sys.stderr", "ext://sys.stderr")
36
+
37
+ def add_options(self) -> None:
38
+ self.cmd_opts.add_option(
39
+ "-r",
40
+ "--requirement",
41
+ dest="requirements",
42
+ action="append",
43
+ default=[],
44
+ metavar="file",
45
+ help=(
46
+ "Use the order in the given requirements file and its "
47
+ "comments when generating output. This option can be "
48
+ "used multiple times."
49
+ ),
50
+ )
51
+ self.cmd_opts.add_option(
52
+ "-l",
53
+ "--local",
54
+ dest="local",
55
+ action="store_true",
56
+ default=False,
57
+ help=(
58
+ "If in a virtualenv that has global access, do not output "
59
+ "globally-installed packages."
60
+ ),
61
+ )
62
+ self.cmd_opts.add_option(
63
+ "--user",
64
+ dest="user",
65
+ action="store_true",
66
+ default=False,
67
+ help="Only output packages installed in user-site.",
68
+ )
69
+ self.cmd_opts.add_option(cmdoptions.list_path())
70
+ self.cmd_opts.add_option(
71
+ "--all",
72
+ dest="freeze_all",
73
+ action="store_true",
74
+ help=(
75
+ "Do not skip these packages in the output:"
76
+ " {}".format(", ".join(_dev_pkgs()))
77
+ ),
78
+ )
79
+ self.cmd_opts.add_option(
80
+ "--exclude-editable",
81
+ dest="exclude_editable",
82
+ action="store_true",
83
+ help="Exclude editable package from output.",
84
+ )
85
+ self.cmd_opts.add_option(cmdoptions.list_exclude())
86
+
87
+ self.parser.insert_option_group(0, self.cmd_opts)
88
+
89
+ def run(self, options: Values, args: List[str]) -> int:
90
+ skip = set(stdlib_pkgs)
91
+ if not options.freeze_all:
92
+ skip.update(_dev_pkgs())
93
+
94
+ if options.excludes:
95
+ skip.update(options.excludes)
96
+
97
+ cmdoptions.check_list_path_option(options)
98
+
99
+ for line in freeze(
100
+ requirement=options.requirements,
101
+ local_only=options.local,
102
+ user_only=options.user,
103
+ paths=options.path,
104
+ isolated=options.isolated_mode,
105
+ skip=skip,
106
+ exclude_editable=options.exclude_editable,
107
+ ):
108
+ sys.stdout.write(line + "\n")
109
+ return SUCCESS
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/hash.py ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import hashlib
2
+ import logging
3
+ import sys
4
+ from optparse import Values
5
+ from typing import List
6
+
7
+ from pip._internal.cli.base_command import Command
8
+ from pip._internal.cli.status_codes import ERROR, SUCCESS
9
+ from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
10
+ from pip._internal.utils.misc import read_chunks, write_output
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ class HashCommand(Command):
16
+ """
17
+ Compute a hash of a local package archive.
18
+
19
+ These can be used with --hash in a requirements file to do repeatable
20
+ installs.
21
+ """
22
+
23
+ usage = "%prog [options] <file> ..."
24
+ ignore_require_venv = True
25
+
26
+ def add_options(self) -> None:
27
+ self.cmd_opts.add_option(
28
+ "-a",
29
+ "--algorithm",
30
+ dest="algorithm",
31
+ choices=STRONG_HASHES,
32
+ action="store",
33
+ default=FAVORITE_HASH,
34
+ help="The hash algorithm to use: one of {}".format(
35
+ ", ".join(STRONG_HASHES)
36
+ ),
37
+ )
38
+ self.parser.insert_option_group(0, self.cmd_opts)
39
+
40
+ def run(self, options: Values, args: List[str]) -> int:
41
+ if not args:
42
+ self.parser.print_usage(sys.stderr)
43
+ return ERROR
44
+
45
+ algorithm = options.algorithm
46
+ for path in args:
47
+ write_output(
48
+ "%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm)
49
+ )
50
+ return SUCCESS
51
+
52
+
53
+ def _hash_of_file(path: str, algorithm: str) -> str:
54
+ """Return the hash digest of a file."""
55
+ with open(path, "rb") as archive:
56
+ hash = hashlib.new(algorithm)
57
+ for chunk in read_chunks(archive):
58
+ hash.update(chunk)
59
+ return hash.hexdigest()
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/help.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from optparse import Values
2
+ from typing import List
3
+
4
+ from pip._internal.cli.base_command import Command
5
+ from pip._internal.cli.status_codes import SUCCESS
6
+ from pip._internal.exceptions import CommandError
7
+
8
+
9
+ class HelpCommand(Command):
10
+ """Show help for commands"""
11
+
12
+ usage = """
13
+ %prog <command>"""
14
+ ignore_require_venv = True
15
+
16
+ def run(self, options: Values, args: List[str]) -> int:
17
+ from pip._internal.commands import (
18
+ commands_dict,
19
+ create_command,
20
+ get_similar_commands,
21
+ )
22
+
23
+ try:
24
+ # 'pip help' with no args is handled by pip.__init__.parseopt()
25
+ cmd_name = args[0] # the command we need help for
26
+ except IndexError:
27
+ return SUCCESS
28
+
29
+ if cmd_name not in commands_dict:
30
+ guess = get_similar_commands(cmd_name)
31
+
32
+ msg = [f'unknown command "{cmd_name}"']
33
+ if guess:
34
+ msg.append(f'maybe you meant "{guess}"')
35
+
36
+ raise CommandError(" - ".join(msg))
37
+
38
+ command = create_command(cmd_name)
39
+ command.parser.print_help()
40
+
41
+ return SUCCESS
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/inspect.py ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from optparse import Values
3
+ from typing import Any, Dict, List
4
+
5
+ from pip._vendor.packaging.markers import default_environment
6
+ from pip._vendor.rich import print_json
7
+
8
+ from pip import __version__
9
+ from pip._internal.cli import cmdoptions
10
+ from pip._internal.cli.base_command import Command
11
+ from pip._internal.cli.status_codes import SUCCESS
12
+ from pip._internal.metadata import BaseDistribution, get_environment
13
+ from pip._internal.utils.compat import stdlib_pkgs
14
+ from pip._internal.utils.urls import path_to_url
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ class InspectCommand(Command):
20
+ """
21
+ Inspect the content of a Python environment and produce a report in JSON format.
22
+ """
23
+
24
+ ignore_require_venv = True
25
+ usage = """
26
+ %prog [options]"""
27
+
28
+ def add_options(self) -> None:
29
+ self.cmd_opts.add_option(
30
+ "--local",
31
+ action="store_true",
32
+ default=False,
33
+ help=(
34
+ "If in a virtualenv that has global access, do not list "
35
+ "globally-installed packages."
36
+ ),
37
+ )
38
+ self.cmd_opts.add_option(
39
+ "--user",
40
+ dest="user",
41
+ action="store_true",
42
+ default=False,
43
+ help="Only output packages installed in user-site.",
44
+ )
45
+ self.cmd_opts.add_option(cmdoptions.list_path())
46
+ self.parser.insert_option_group(0, self.cmd_opts)
47
+
48
+ def run(self, options: Values, args: List[str]) -> int:
49
+ cmdoptions.check_list_path_option(options)
50
+ dists = get_environment(options.path).iter_installed_distributions(
51
+ local_only=options.local,
52
+ user_only=options.user,
53
+ skip=set(stdlib_pkgs),
54
+ )
55
+ output = {
56
+ "version": "1",
57
+ "pip_version": __version__,
58
+ "installed": [self._dist_to_dict(dist) for dist in dists],
59
+ "environment": default_environment(),
60
+ # TODO tags? scheme?
61
+ }
62
+ print_json(data=output)
63
+ return SUCCESS
64
+
65
+ def _dist_to_dict(self, dist: BaseDistribution) -> Dict[str, Any]:
66
+ res: Dict[str, Any] = {
67
+ "metadata": dist.metadata_dict,
68
+ "metadata_location": dist.info_location,
69
+ }
70
+ # direct_url. Note that we don't have download_info (as in the installation
71
+ # report) since it is not recorded in installed metadata.
72
+ direct_url = dist.direct_url
73
+ if direct_url is not None:
74
+ res["direct_url"] = direct_url.to_dict()
75
+ else:
76
+ # Emulate direct_url for legacy editable installs.
77
+ editable_project_location = dist.editable_project_location
78
+ if editable_project_location is not None:
79
+ res["direct_url"] = {
80
+ "url": path_to_url(editable_project_location),
81
+ "dir_info": {
82
+ "editable": True,
83
+ },
84
+ }
85
+ # installer
86
+ installer = dist.installer
87
+ if dist.installer:
88
+ res["installer"] = installer
89
+ # requested
90
+ if dist.installed_with_dist_info:
91
+ res["requested"] = dist.requested
92
+ return res
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/install.py ADDED
@@ -0,0 +1,784 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import errno
2
+ import json
3
+ import operator
4
+ import os
5
+ import shutil
6
+ import site
7
+ from optparse import SUPPRESS_HELP, Values
8
+ from typing import List, Optional
9
+
10
+ from pip._vendor.packaging.utils import canonicalize_name
11
+ from pip._vendor.rich import print_json
12
+
13
+ # Eagerly import self_outdated_check to avoid crashes. Otherwise,
14
+ # this module would be imported *after* pip was replaced, resulting
15
+ # in crashes if the new self_outdated_check module was incompatible
16
+ # with the rest of pip that's already imported, or allowing a
17
+ # wheel to execute arbitrary code on install by replacing
18
+ # self_outdated_check.
19
+ import pip._internal.self_outdated_check # noqa: F401
20
+ from pip._internal.cache import WheelCache
21
+ from pip._internal.cli import cmdoptions
22
+ from pip._internal.cli.cmdoptions import make_target_python
23
+ from pip._internal.cli.req_command import (
24
+ RequirementCommand,
25
+ with_cleanup,
26
+ )
27
+ from pip._internal.cli.status_codes import ERROR, SUCCESS
28
+ from pip._internal.exceptions import CommandError, InstallationError
29
+ from pip._internal.locations import get_scheme
30
+ from pip._internal.metadata import get_environment
31
+ from pip._internal.models.installation_report import InstallationReport
32
+ from pip._internal.operations.build.build_tracker import get_build_tracker
33
+ from pip._internal.operations.check import ConflictDetails, check_install_conflicts
34
+ from pip._internal.req import install_given_reqs
35
+ from pip._internal.req.req_install import (
36
+ InstallRequirement,
37
+ check_legacy_setup_py_options,
38
+ )
39
+ from pip._internal.utils.compat import WINDOWS
40
+ from pip._internal.utils.filesystem import test_writable_dir
41
+ from pip._internal.utils.logging import getLogger
42
+ from pip._internal.utils.misc import (
43
+ check_externally_managed,
44
+ ensure_dir,
45
+ get_pip_version,
46
+ protect_pip_from_modification_on_windows,
47
+ warn_if_run_as_root,
48
+ write_output,
49
+ )
50
+ from pip._internal.utils.temp_dir import TempDirectory
51
+ from pip._internal.utils.virtualenv import (
52
+ running_under_virtualenv,
53
+ virtualenv_no_global,
54
+ )
55
+ from pip._internal.wheel_builder import build, should_build_for_install_command
56
+
57
+ logger = getLogger(__name__)
58
+
59
+
60
+ class InstallCommand(RequirementCommand):
61
+ """
62
+ Install packages from:
63
+
64
+ - PyPI (and other indexes) using requirement specifiers.
65
+ - VCS project urls.
66
+ - Local project directories.
67
+ - Local or remote source archives.
68
+
69
+ pip also supports installing from "requirements files", which provide
70
+ an easy way to specify a whole environment to be installed.
71
+ """
72
+
73
+ usage = """
74
+ %prog [options] <requirement specifier> [package-index-options] ...
75
+ %prog [options] -r <requirements file> [package-index-options] ...
76
+ %prog [options] [-e] <vcs project url> ...
77
+ %prog [options] [-e] <local project path> ...
78
+ %prog [options] <archive url/path> ..."""
79
+
80
+ def add_options(self) -> None:
81
+ self.cmd_opts.add_option(cmdoptions.requirements())
82
+ self.cmd_opts.add_option(cmdoptions.constraints())
83
+ self.cmd_opts.add_option(cmdoptions.no_deps())
84
+ self.cmd_opts.add_option(cmdoptions.pre())
85
+
86
+ self.cmd_opts.add_option(cmdoptions.editable())
87
+ self.cmd_opts.add_option(
88
+ "--dry-run",
89
+ action="store_true",
90
+ dest="dry_run",
91
+ default=False,
92
+ help=(
93
+ "Don't actually install anything, just print what would be. "
94
+ "Can be used in combination with --ignore-installed "
95
+ "to 'resolve' the requirements."
96
+ ),
97
+ )
98
+ self.cmd_opts.add_option(
99
+ "-t",
100
+ "--target",
101
+ dest="target_dir",
102
+ metavar="dir",
103
+ default=None,
104
+ help=(
105
+ "Install packages into <dir>. "
106
+ "By default this will not replace existing files/folders in "
107
+ "<dir>. Use --upgrade to replace existing packages in <dir> "
108
+ "with new versions."
109
+ ),
110
+ )
111
+ cmdoptions.add_target_python_options(self.cmd_opts)
112
+
113
+ self.cmd_opts.add_option(
114
+ "--user",
115
+ dest="use_user_site",
116
+ action="store_true",
117
+ help=(
118
+ "Install to the Python user install directory for your "
119
+ "platform. Typically ~/.local/, or %APPDATA%\\Python on "
120
+ "Windows. (See the Python documentation for site.USER_BASE "
121
+ "for full details.)"
122
+ ),
123
+ )
124
+ self.cmd_opts.add_option(
125
+ "--no-user",
126
+ dest="use_user_site",
127
+ action="store_false",
128
+ help=SUPPRESS_HELP,
129
+ )
130
+ self.cmd_opts.add_option(
131
+ "--root",
132
+ dest="root_path",
133
+ metavar="dir",
134
+ default=None,
135
+ help="Install everything relative to this alternate root directory.",
136
+ )
137
+ self.cmd_opts.add_option(
138
+ "--prefix",
139
+ dest="prefix_path",
140
+ metavar="dir",
141
+ default=None,
142
+ help=(
143
+ "Installation prefix where lib, bin and other top-level "
144
+ "folders are placed. Note that the resulting installation may "
145
+ "contain scripts and other resources which reference the "
146
+ "Python interpreter of pip, and not that of ``--prefix``. "
147
+ "See also the ``--python`` option if the intention is to "
148
+ "install packages into another (possibly pip-free) "
149
+ "environment."
150
+ ),
151
+ )
152
+
153
+ self.cmd_opts.add_option(cmdoptions.src())
154
+
155
+ self.cmd_opts.add_option(
156
+ "-U",
157
+ "--upgrade",
158
+ dest="upgrade",
159
+ action="store_true",
160
+ help=(
161
+ "Upgrade all specified packages to the newest available "
162
+ "version. The handling of dependencies depends on the "
163
+ "upgrade-strategy used."
164
+ ),
165
+ )
166
+
167
+ self.cmd_opts.add_option(
168
+ "--upgrade-strategy",
169
+ dest="upgrade_strategy",
170
+ default="only-if-needed",
171
+ choices=["only-if-needed", "eager"],
172
+ help=(
173
+ "Determines how dependency upgrading should be handled "
174
+ "[default: %default]. "
175
+ '"eager" - dependencies are upgraded regardless of '
176
+ "whether the currently installed version satisfies the "
177
+ "requirements of the upgraded package(s). "
178
+ '"only-if-needed" - are upgraded only when they do not '
179
+ "satisfy the requirements of the upgraded package(s)."
180
+ ),
181
+ )
182
+
183
+ self.cmd_opts.add_option(
184
+ "--force-reinstall",
185
+ dest="force_reinstall",
186
+ action="store_true",
187
+ help="Reinstall all packages even if they are already up-to-date.",
188
+ )
189
+
190
+ self.cmd_opts.add_option(
191
+ "-I",
192
+ "--ignore-installed",
193
+ dest="ignore_installed",
194
+ action="store_true",
195
+ help=(
196
+ "Ignore the installed packages, overwriting them. "
197
+ "This can break your system if the existing package "
198
+ "is of a different version or was installed "
199
+ "with a different package manager!"
200
+ ),
201
+ )
202
+
203
+ self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
204
+ self.cmd_opts.add_option(cmdoptions.no_build_isolation())
205
+ self.cmd_opts.add_option(cmdoptions.use_pep517())
206
+ self.cmd_opts.add_option(cmdoptions.no_use_pep517())
207
+ self.cmd_opts.add_option(cmdoptions.check_build_deps())
208
+ self.cmd_opts.add_option(cmdoptions.override_externally_managed())
209
+
210
+ self.cmd_opts.add_option(cmdoptions.config_settings())
211
+ self.cmd_opts.add_option(cmdoptions.global_options())
212
+
213
+ self.cmd_opts.add_option(
214
+ "--compile",
215
+ action="store_true",
216
+ dest="compile",
217
+ default=True,
218
+ help="Compile Python source files to bytecode",
219
+ )
220
+
221
+ self.cmd_opts.add_option(
222
+ "--no-compile",
223
+ action="store_false",
224
+ dest="compile",
225
+ help="Do not compile Python source files to bytecode",
226
+ )
227
+
228
+ self.cmd_opts.add_option(
229
+ "--no-warn-script-location",
230
+ action="store_false",
231
+ dest="warn_script_location",
232
+ default=True,
233
+ help="Do not warn when installing scripts outside PATH",
234
+ )
235
+ self.cmd_opts.add_option(
236
+ "--no-warn-conflicts",
237
+ action="store_false",
238
+ dest="warn_about_conflicts",
239
+ default=True,
240
+ help="Do not warn about broken dependencies",
241
+ )
242
+ self.cmd_opts.add_option(cmdoptions.no_binary())
243
+ self.cmd_opts.add_option(cmdoptions.only_binary())
244
+ self.cmd_opts.add_option(cmdoptions.prefer_binary())
245
+ self.cmd_opts.add_option(cmdoptions.require_hashes())
246
+ self.cmd_opts.add_option(cmdoptions.progress_bar())
247
+ self.cmd_opts.add_option(cmdoptions.root_user_action())
248
+
249
+ index_opts = cmdoptions.make_option_group(
250
+ cmdoptions.index_group,
251
+ self.parser,
252
+ )
253
+
254
+ self.parser.insert_option_group(0, index_opts)
255
+ self.parser.insert_option_group(0, self.cmd_opts)
256
+
257
+ self.cmd_opts.add_option(
258
+ "--report",
259
+ dest="json_report_file",
260
+ metavar="file",
261
+ default=None,
262
+ help=(
263
+ "Generate a JSON file describing what pip did to install "
264
+ "the provided requirements. "
265
+ "Can be used in combination with --dry-run and --ignore-installed "
266
+ "to 'resolve' the requirements. "
267
+ "When - is used as file name it writes to stdout. "
268
+ "When writing to stdout, please combine with the --quiet option "
269
+ "to avoid mixing pip logging output with JSON output."
270
+ ),
271
+ )
272
+
273
+ @with_cleanup
274
+ def run(self, options: Values, args: List[str]) -> int:
275
+ if options.use_user_site and options.target_dir is not None:
276
+ raise CommandError("Can not combine '--user' and '--target'")
277
+
278
+ # Check whether the environment we're installing into is externally
279
+ # managed, as specified in PEP 668. Specifying --root, --target, or
280
+ # --prefix disables the check, since there's no reliable way to locate
281
+ # the EXTERNALLY-MANAGED file for those cases. An exception is also
282
+ # made specifically for "--dry-run --report" for convenience.
283
+ installing_into_current_environment = (
284
+ not (options.dry_run and options.json_report_file)
285
+ and options.root_path is None
286
+ and options.target_dir is None
287
+ and options.prefix_path is None
288
+ )
289
+ if (
290
+ installing_into_current_environment
291
+ and not options.override_externally_managed
292
+ ):
293
+ check_externally_managed()
294
+
295
+ upgrade_strategy = "to-satisfy-only"
296
+ if options.upgrade:
297
+ upgrade_strategy = options.upgrade_strategy
298
+
299
+ cmdoptions.check_dist_restriction(options, check_target=True)
300
+
301
+ logger.verbose("Using %s", get_pip_version())
302
+ options.use_user_site = decide_user_install(
303
+ options.use_user_site,
304
+ prefix_path=options.prefix_path,
305
+ target_dir=options.target_dir,
306
+ root_path=options.root_path,
307
+ isolated_mode=options.isolated_mode,
308
+ )
309
+
310
+ target_temp_dir: Optional[TempDirectory] = None
311
+ target_temp_dir_path: Optional[str] = None
312
+ if options.target_dir:
313
+ options.ignore_installed = True
314
+ options.target_dir = os.path.abspath(options.target_dir)
315
+ if (
316
+ # fmt: off
317
+ os.path.exists(options.target_dir) and
318
+ not os.path.isdir(options.target_dir)
319
+ # fmt: on
320
+ ):
321
+ raise CommandError(
322
+ "Target path exists but is not a directory, will not continue."
323
+ )
324
+
325
+ # Create a target directory for using with the target option
326
+ target_temp_dir = TempDirectory(kind="target")
327
+ target_temp_dir_path = target_temp_dir.path
328
+ self.enter_context(target_temp_dir)
329
+
330
+ global_options = options.global_options or []
331
+
332
+ session = self.get_default_session(options)
333
+
334
+ target_python = make_target_python(options)
335
+ finder = self._build_package_finder(
336
+ options=options,
337
+ session=session,
338
+ target_python=target_python,
339
+ ignore_requires_python=options.ignore_requires_python,
340
+ )
341
+ build_tracker = self.enter_context(get_build_tracker())
342
+
343
+ directory = TempDirectory(
344
+ delete=not options.no_clean,
345
+ kind="install",
346
+ globally_managed=True,
347
+ )
348
+
349
+ try:
350
+ reqs = self.get_requirements(args, options, finder, session)
351
+ check_legacy_setup_py_options(options, reqs)
352
+
353
+ wheel_cache = WheelCache(options.cache_dir)
354
+
355
+ # Only when installing is it permitted to use PEP 660.
356
+ # In other circumstances (pip wheel, pip download) we generate
357
+ # regular (i.e. non editable) metadata and wheels.
358
+ for req in reqs:
359
+ req.permit_editable_wheels = True
360
+
361
+ preparer = self.make_requirement_preparer(
362
+ temp_build_dir=directory,
363
+ options=options,
364
+ build_tracker=build_tracker,
365
+ session=session,
366
+ finder=finder,
367
+ use_user_site=options.use_user_site,
368
+ verbosity=self.verbosity,
369
+ )
370
+ resolver = self.make_resolver(
371
+ preparer=preparer,
372
+ finder=finder,
373
+ options=options,
374
+ wheel_cache=wheel_cache,
375
+ use_user_site=options.use_user_site,
376
+ ignore_installed=options.ignore_installed,
377
+ ignore_requires_python=options.ignore_requires_python,
378
+ force_reinstall=options.force_reinstall,
379
+ upgrade_strategy=upgrade_strategy,
380
+ use_pep517=options.use_pep517,
381
+ py_version_info=options.python_version,
382
+ )
383
+
384
+ self.trace_basic_info(finder)
385
+
386
+ requirement_set = resolver.resolve(
387
+ reqs, check_supported_wheels=not options.target_dir
388
+ )
389
+
390
+ if options.json_report_file:
391
+ report = InstallationReport(requirement_set.requirements_to_install)
392
+ if options.json_report_file == "-":
393
+ print_json(data=report.to_dict())
394
+ else:
395
+ with open(options.json_report_file, "w", encoding="utf-8") as f:
396
+ json.dump(report.to_dict(), f, indent=2, ensure_ascii=False)
397
+
398
+ if options.dry_run:
399
+ would_install_items = sorted(
400
+ (r.metadata["name"], r.metadata["version"])
401
+ for r in requirement_set.requirements_to_install
402
+ )
403
+ if would_install_items:
404
+ write_output(
405
+ "Would install %s",
406
+ " ".join("-".join(item) for item in would_install_items),
407
+ )
408
+ return SUCCESS
409
+
410
+ try:
411
+ pip_req = requirement_set.get_requirement("pip")
412
+ except KeyError:
413
+ modifying_pip = False
414
+ else:
415
+ # If we're not replacing an already installed pip,
416
+ # we're not modifying it.
417
+ modifying_pip = pip_req.satisfied_by is None
418
+ protect_pip_from_modification_on_windows(modifying_pip=modifying_pip)
419
+
420
+ reqs_to_build = [
421
+ r
422
+ for r in requirement_set.requirements.values()
423
+ if should_build_for_install_command(r)
424
+ ]
425
+
426
+ _, build_failures = build(
427
+ reqs_to_build,
428
+ wheel_cache=wheel_cache,
429
+ verify=True,
430
+ build_options=[],
431
+ global_options=global_options,
432
+ )
433
+
434
+ if build_failures:
435
+ raise InstallationError(
436
+ "Failed to build installable wheels for some "
437
+ "pyproject.toml based projects ({})".format(
438
+ ", ".join(r.name for r in build_failures) # type: ignore
439
+ )
440
+ )
441
+
442
+ to_install = resolver.get_installation_order(requirement_set)
443
+
444
+ # Check for conflicts in the package set we're installing.
445
+ conflicts: Optional[ConflictDetails] = None
446
+ should_warn_about_conflicts = (
447
+ not options.ignore_dependencies and options.warn_about_conflicts
448
+ )
449
+ if should_warn_about_conflicts:
450
+ conflicts = self._determine_conflicts(to_install)
451
+
452
+ # Don't warn about script install locations if
453
+ # --target or --prefix has been specified
454
+ warn_script_location = options.warn_script_location
455
+ if options.target_dir or options.prefix_path:
456
+ warn_script_location = False
457
+
458
+ installed = install_given_reqs(
459
+ to_install,
460
+ global_options,
461
+ root=options.root_path,
462
+ home=target_temp_dir_path,
463
+ prefix=options.prefix_path,
464
+ warn_script_location=warn_script_location,
465
+ use_user_site=options.use_user_site,
466
+ pycompile=options.compile,
467
+ )
468
+
469
+ lib_locations = get_lib_location_guesses(
470
+ user=options.use_user_site,
471
+ home=target_temp_dir_path,
472
+ root=options.root_path,
473
+ prefix=options.prefix_path,
474
+ isolated=options.isolated_mode,
475
+ )
476
+ env = get_environment(lib_locations)
477
+
478
+ # Display a summary of installed packages, with extra care to
479
+ # display a package name as it was requested by the user.
480
+ installed.sort(key=operator.attrgetter("name"))
481
+ summary = []
482
+ installed_versions = {}
483
+ for distribution in env.iter_all_distributions():
484
+ installed_versions[distribution.canonical_name] = distribution.version
485
+ for package in installed:
486
+ display_name = package.name
487
+ version = installed_versions.get(canonicalize_name(display_name), None)
488
+ if version:
489
+ text = f"{display_name}-{version}"
490
+ else:
491
+ text = display_name
492
+ summary.append(text)
493
+
494
+ if conflicts is not None:
495
+ self._warn_about_conflicts(
496
+ conflicts,
497
+ resolver_variant=self.determine_resolver_variant(options),
498
+ )
499
+
500
+ installed_desc = " ".join(summary)
501
+ if installed_desc:
502
+ write_output(
503
+ "Successfully installed %s",
504
+ installed_desc,
505
+ )
506
+ except OSError as error:
507
+ show_traceback = self.verbosity >= 1
508
+
509
+ message = create_os_error_message(
510
+ error,
511
+ show_traceback,
512
+ options.use_user_site,
513
+ )
514
+ logger.error(message, exc_info=show_traceback)
515
+
516
+ return ERROR
517
+
518
+ if options.target_dir:
519
+ assert target_temp_dir
520
+ self._handle_target_dir(
521
+ options.target_dir, target_temp_dir, options.upgrade
522
+ )
523
+ if options.root_user_action == "warn":
524
+ warn_if_run_as_root()
525
+ return SUCCESS
526
+
527
+ def _handle_target_dir(
528
+ self, target_dir: str, target_temp_dir: TempDirectory, upgrade: bool
529
+ ) -> None:
530
+ ensure_dir(target_dir)
531
+
532
+ # Checking both purelib and platlib directories for installed
533
+ # packages to be moved to target directory
534
+ lib_dir_list = []
535
+
536
+ # Checking both purelib and platlib directories for installed
537
+ # packages to be moved to target directory
538
+ scheme = get_scheme("", home=target_temp_dir.path)
539
+ purelib_dir = scheme.purelib
540
+ platlib_dir = scheme.platlib
541
+ data_dir = scheme.data
542
+
543
+ if os.path.exists(purelib_dir):
544
+ lib_dir_list.append(purelib_dir)
545
+ if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
546
+ lib_dir_list.append(platlib_dir)
547
+ if os.path.exists(data_dir):
548
+ lib_dir_list.append(data_dir)
549
+
550
+ for lib_dir in lib_dir_list:
551
+ for item in os.listdir(lib_dir):
552
+ if lib_dir == data_dir:
553
+ ddir = os.path.join(data_dir, item)
554
+ if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
555
+ continue
556
+ target_item_dir = os.path.join(target_dir, item)
557
+ if os.path.exists(target_item_dir):
558
+ if not upgrade:
559
+ logger.warning(
560
+ "Target directory %s already exists. Specify "
561
+ "--upgrade to force replacement.",
562
+ target_item_dir,
563
+ )
564
+ continue
565
+ if os.path.islink(target_item_dir):
566
+ logger.warning(
567
+ "Target directory %s already exists and is "
568
+ "a link. pip will not automatically replace "
569
+ "links, please remove if replacement is "
570
+ "desired.",
571
+ target_item_dir,
572
+ )
573
+ continue
574
+ if os.path.isdir(target_item_dir):
575
+ shutil.rmtree(target_item_dir)
576
+ else:
577
+ os.remove(target_item_dir)
578
+
579
+ shutil.move(os.path.join(lib_dir, item), target_item_dir)
580
+
581
+ def _determine_conflicts(
582
+ self, to_install: List[InstallRequirement]
583
+ ) -> Optional[ConflictDetails]:
584
+ try:
585
+ return check_install_conflicts(to_install)
586
+ except Exception:
587
+ logger.exception(
588
+ "Error while checking for conflicts. Please file an issue on "
589
+ "pip's issue tracker: https://github.com/pypa/pip/issues/new"
590
+ )
591
+ return None
592
+
593
+ def _warn_about_conflicts(
594
+ self, conflict_details: ConflictDetails, resolver_variant: str
595
+ ) -> None:
596
+ package_set, (missing, conflicting) = conflict_details
597
+ if not missing and not conflicting:
598
+ return
599
+
600
+ parts: List[str] = []
601
+ if resolver_variant == "legacy":
602
+ parts.append(
603
+ "pip's legacy dependency resolver does not consider dependency "
604
+ "conflicts when selecting packages. This behaviour is the "
605
+ "source of the following dependency conflicts."
606
+ )
607
+ else:
608
+ assert resolver_variant == "resolvelib"
609
+ parts.append(
610
+ "pip's dependency resolver does not currently take into account "
611
+ "all the packages that are installed. This behaviour is the "
612
+ "source of the following dependency conflicts."
613
+ )
614
+
615
+ # NOTE: There is some duplication here, with commands/check.py
616
+ for project_name in missing:
617
+ version = package_set[project_name][0]
618
+ for dependency in missing[project_name]:
619
+ message = (
620
+ f"{project_name} {version} requires {dependency[1]}, "
621
+ "which is not installed."
622
+ )
623
+ parts.append(message)
624
+
625
+ for project_name in conflicting:
626
+ version = package_set[project_name][0]
627
+ for dep_name, dep_version, req in conflicting[project_name]:
628
+ message = (
629
+ "{name} {version} requires {requirement}, but {you} have "
630
+ "{dep_name} {dep_version} which is incompatible."
631
+ ).format(
632
+ name=project_name,
633
+ version=version,
634
+ requirement=req,
635
+ dep_name=dep_name,
636
+ dep_version=dep_version,
637
+ you=("you" if resolver_variant == "resolvelib" else "you'll"),
638
+ )
639
+ parts.append(message)
640
+
641
+ logger.critical("\n".join(parts))
642
+
643
+
644
+ def get_lib_location_guesses(
645
+ user: bool = False,
646
+ home: Optional[str] = None,
647
+ root: Optional[str] = None,
648
+ isolated: bool = False,
649
+ prefix: Optional[str] = None,
650
+ ) -> List[str]:
651
+ scheme = get_scheme(
652
+ "",
653
+ user=user,
654
+ home=home,
655
+ root=root,
656
+ isolated=isolated,
657
+ prefix=prefix,
658
+ )
659
+ return [scheme.purelib, scheme.platlib]
660
+
661
+
662
+ def site_packages_writable(root: Optional[str], isolated: bool) -> bool:
663
+ return all(
664
+ test_writable_dir(d)
665
+ for d in set(get_lib_location_guesses(root=root, isolated=isolated))
666
+ )
667
+
668
+
669
+ def decide_user_install(
670
+ use_user_site: Optional[bool],
671
+ prefix_path: Optional[str] = None,
672
+ target_dir: Optional[str] = None,
673
+ root_path: Optional[str] = None,
674
+ isolated_mode: bool = False,
675
+ ) -> bool:
676
+ """Determine whether to do a user install based on the input options.
677
+
678
+ If use_user_site is False, no additional checks are done.
679
+ If use_user_site is True, it is checked for compatibility with other
680
+ options.
681
+ If use_user_site is None, the default behaviour depends on the environment,
682
+ which is provided by the other arguments.
683
+ """
684
+ # In some cases (config from tox), use_user_site can be set to an integer
685
+ # rather than a bool, which 'use_user_site is False' wouldn't catch.
686
+ if (use_user_site is not None) and (not use_user_site):
687
+ logger.debug("Non-user install by explicit request")
688
+ return False
689
+
690
+ if use_user_site:
691
+ if prefix_path:
692
+ raise CommandError(
693
+ "Can not combine '--user' and '--prefix' as they imply "
694
+ "different installation locations"
695
+ )
696
+ if virtualenv_no_global():
697
+ raise InstallationError(
698
+ "Can not perform a '--user' install. User site-packages "
699
+ "are not visible in this virtualenv."
700
+ )
701
+ logger.debug("User install by explicit request")
702
+ return True
703
+
704
+ # If we are here, user installs have not been explicitly requested/avoided
705
+ assert use_user_site is None
706
+
707
+ # user install incompatible with --prefix/--target
708
+ if prefix_path or target_dir:
709
+ logger.debug("Non-user install due to --prefix or --target option")
710
+ return False
711
+
712
+ # If user installs are not enabled, choose a non-user install
713
+ if not site.ENABLE_USER_SITE:
714
+ logger.debug("Non-user install because user site-packages disabled")
715
+ return False
716
+
717
+ # If we have permission for a non-user install, do that,
718
+ # otherwise do a user install.
719
+ if site_packages_writable(root=root_path, isolated=isolated_mode):
720
+ logger.debug("Non-user install because site-packages writeable")
721
+ return False
722
+
723
+ logger.info(
724
+ "Defaulting to user installation because normal site-packages "
725
+ "is not writeable"
726
+ )
727
+ return True
728
+
729
+
730
+ def create_os_error_message(
731
+ error: OSError, show_traceback: bool, using_user_site: bool
732
+ ) -> str:
733
+ """Format an error message for an OSError
734
+
735
+ It may occur anytime during the execution of the install command.
736
+ """
737
+ parts = []
738
+
739
+ # Mention the error if we are not going to show a traceback
740
+ parts.append("Could not install packages due to an OSError")
741
+ if not show_traceback:
742
+ parts.append(": ")
743
+ parts.append(str(error))
744
+ else:
745
+ parts.append(".")
746
+
747
+ # Spilt the error indication from a helper message (if any)
748
+ parts[-1] += "\n"
749
+
750
+ # Suggest useful actions to the user:
751
+ # (1) using user site-packages or (2) verifying the permissions
752
+ if error.errno == errno.EACCES:
753
+ user_option_part = "Consider using the `--user` option"
754
+ permissions_part = "Check the permissions"
755
+
756
+ if not running_under_virtualenv() and not using_user_site:
757
+ parts.extend(
758
+ [
759
+ user_option_part,
760
+ " or ",
761
+ permissions_part.lower(),
762
+ ]
763
+ )
764
+ else:
765
+ parts.append(permissions_part)
766
+ parts.append(".\n")
767
+
768
+ # Suggest the user to enable Long Paths if path length is
769
+ # more than 260
770
+ if (
771
+ WINDOWS
772
+ and error.errno == errno.ENOENT
773
+ and error.filename
774
+ and len(error.filename) > 260
775
+ ):
776
+ parts.append(
777
+ "HINT: This error might have occurred since "
778
+ "this system does not have Windows Long Path "
779
+ "support enabled. You can find information on "
780
+ "how to enable this at "
781
+ "https://pip.pypa.io/warnings/enable-long-paths\n"
782
+ )
783
+
784
+ return "".join(parts).strip() + "\n"
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/search.py ADDED
@@ -0,0 +1,172 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import shutil
3
+ import sys
4
+ import textwrap
5
+ import xmlrpc.client
6
+ from collections import OrderedDict
7
+ from optparse import Values
8
+ from typing import TYPE_CHECKING, Dict, List, Optional, TypedDict
9
+
10
+ from pip._vendor.packaging.version import parse as parse_version
11
+
12
+ from pip._internal.cli.base_command import Command
13
+ from pip._internal.cli.req_command import SessionCommandMixin
14
+ from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
15
+ from pip._internal.exceptions import CommandError
16
+ from pip._internal.metadata import get_default_environment
17
+ from pip._internal.models.index import PyPI
18
+ from pip._internal.network.xmlrpc import PipXmlrpcTransport
19
+ from pip._internal.utils.logging import indent_log
20
+ from pip._internal.utils.misc import write_output
21
+
22
+ if TYPE_CHECKING:
23
+
24
+ class TransformedHit(TypedDict):
25
+ name: str
26
+ summary: str
27
+ versions: List[str]
28
+
29
+
30
+ logger = logging.getLogger(__name__)
31
+
32
+
33
+ class SearchCommand(Command, SessionCommandMixin):
34
+ """Search for PyPI packages whose name or summary contains <query>."""
35
+
36
+ usage = """
37
+ %prog [options] <query>"""
38
+ ignore_require_venv = True
39
+
40
+ def add_options(self) -> None:
41
+ self.cmd_opts.add_option(
42
+ "-i",
43
+ "--index",
44
+ dest="index",
45
+ metavar="URL",
46
+ default=PyPI.pypi_url,
47
+ help="Base URL of Python Package Index (default %default)",
48
+ )
49
+
50
+ self.parser.insert_option_group(0, self.cmd_opts)
51
+
52
+ def run(self, options: Values, args: List[str]) -> int:
53
+ if not args:
54
+ raise CommandError("Missing required argument (search query).")
55
+ query = args
56
+ pypi_hits = self.search(query, options)
57
+ hits = transform_hits(pypi_hits)
58
+
59
+ terminal_width = None
60
+ if sys.stdout.isatty():
61
+ terminal_width = shutil.get_terminal_size()[0]
62
+
63
+ print_results(hits, terminal_width=terminal_width)
64
+ if pypi_hits:
65
+ return SUCCESS
66
+ return NO_MATCHES_FOUND
67
+
68
+ def search(self, query: List[str], options: Values) -> List[Dict[str, str]]:
69
+ index_url = options.index
70
+
71
+ session = self.get_default_session(options)
72
+
73
+ transport = PipXmlrpcTransport(index_url, session)
74
+ pypi = xmlrpc.client.ServerProxy(index_url, transport)
75
+ try:
76
+ hits = pypi.search({"name": query, "summary": query}, "or")
77
+ except xmlrpc.client.Fault as fault:
78
+ message = (
79
+ f"XMLRPC request failed [code: {fault.faultCode}]\n{fault.faultString}"
80
+ )
81
+ raise CommandError(message)
82
+ assert isinstance(hits, list)
83
+ return hits
84
+
85
+
86
+ def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]:
87
+ """
88
+ The list from pypi is really a list of versions. We want a list of
89
+ packages with the list of versions stored inline. This converts the
90
+ list from pypi into one we can use.
91
+ """
92
+ packages: Dict[str, TransformedHit] = OrderedDict()
93
+ for hit in hits:
94
+ name = hit["name"]
95
+ summary = hit["summary"]
96
+ version = hit["version"]
97
+
98
+ if name not in packages.keys():
99
+ packages[name] = {
100
+ "name": name,
101
+ "summary": summary,
102
+ "versions": [version],
103
+ }
104
+ else:
105
+ packages[name]["versions"].append(version)
106
+
107
+ # if this is the highest version, replace summary and score
108
+ if version == highest_version(packages[name]["versions"]):
109
+ packages[name]["summary"] = summary
110
+
111
+ return list(packages.values())
112
+
113
+
114
+ def print_dist_installation_info(name: str, latest: str) -> None:
115
+ env = get_default_environment()
116
+ dist = env.get_distribution(name)
117
+ if dist is not None:
118
+ with indent_log():
119
+ if dist.version == latest:
120
+ write_output("INSTALLED: %s (latest)", dist.version)
121
+ else:
122
+ write_output("INSTALLED: %s", dist.version)
123
+ if parse_version(latest).pre:
124
+ write_output(
125
+ "LATEST: %s (pre-release; install"
126
+ " with `pip install --pre`)",
127
+ latest,
128
+ )
129
+ else:
130
+ write_output("LATEST: %s", latest)
131
+
132
+
133
+ def print_results(
134
+ hits: List["TransformedHit"],
135
+ name_column_width: Optional[int] = None,
136
+ terminal_width: Optional[int] = None,
137
+ ) -> None:
138
+ if not hits:
139
+ return
140
+ if name_column_width is None:
141
+ name_column_width = (
142
+ max(
143
+ [
144
+ len(hit["name"]) + len(highest_version(hit.get("versions", ["-"])))
145
+ for hit in hits
146
+ ]
147
+ )
148
+ + 4
149
+ )
150
+
151
+ for hit in hits:
152
+ name = hit["name"]
153
+ summary = hit["summary"] or ""
154
+ latest = highest_version(hit.get("versions", ["-"]))
155
+ if terminal_width is not None:
156
+ target_width = terminal_width - name_column_width - 5
157
+ if target_width > 10:
158
+ # wrap and indent summary to fit terminal
159
+ summary_lines = textwrap.wrap(summary, target_width)
160
+ summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines)
161
+
162
+ name_latest = f"{name} ({latest})"
163
+ line = f"{name_latest:{name_column_width}} - {summary}"
164
+ try:
165
+ write_output(line)
166
+ print_dist_installation_info(name, latest)
167
+ except UnicodeEncodeError:
168
+ pass
169
+
170
+
171
+ def highest_version(versions: List[str]) -> str:
172
+ return max(versions, key=parse_version)
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/show.py ADDED
@@ -0,0 +1,224 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from optparse import Values
3
+ from typing import Generator, Iterable, Iterator, List, NamedTuple, Optional
4
+
5
+ from pip._vendor.packaging.requirements import InvalidRequirement
6
+ from pip._vendor.packaging.utils import canonicalize_name
7
+
8
+ from pip._internal.cli.base_command import Command
9
+ from pip._internal.cli.status_codes import ERROR, SUCCESS
10
+ from pip._internal.metadata import BaseDistribution, get_default_environment
11
+ from pip._internal.utils.misc import write_output
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ class ShowCommand(Command):
17
+ """
18
+ Show information about one or more installed packages.
19
+
20
+ The output is in RFC-compliant mail header format.
21
+ """
22
+
23
+ usage = """
24
+ %prog [options] <package> ..."""
25
+ ignore_require_venv = True
26
+
27
+ def add_options(self) -> None:
28
+ self.cmd_opts.add_option(
29
+ "-f",
30
+ "--files",
31
+ dest="files",
32
+ action="store_true",
33
+ default=False,
34
+ help="Show the full list of installed files for each package.",
35
+ )
36
+
37
+ self.parser.insert_option_group(0, self.cmd_opts)
38
+
39
+ def run(self, options: Values, args: List[str]) -> int:
40
+ if not args:
41
+ logger.warning("ERROR: Please provide a package name or names.")
42
+ return ERROR
43
+ query = args
44
+
45
+ results = search_packages_info(query)
46
+ if not print_results(
47
+ results, list_files=options.files, verbose=options.verbose
48
+ ):
49
+ return ERROR
50
+ return SUCCESS
51
+
52
+
53
+ class _PackageInfo(NamedTuple):
54
+ name: str
55
+ version: str
56
+ location: str
57
+ editable_project_location: Optional[str]
58
+ requires: List[str]
59
+ required_by: List[str]
60
+ installer: str
61
+ metadata_version: str
62
+ classifiers: List[str]
63
+ summary: str
64
+ homepage: str
65
+ project_urls: List[str]
66
+ author: str
67
+ author_email: str
68
+ license: str
69
+ license_expression: str
70
+ entry_points: List[str]
71
+ files: Optional[List[str]]
72
+
73
+
74
+ def search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None]:
75
+ """
76
+ Gather details from installed distributions. Print distribution name,
77
+ version, location, and installed files. Installed files requires a
78
+ pip generated 'installed-files.txt' in the distributions '.egg-info'
79
+ directory.
80
+ """
81
+ env = get_default_environment()
82
+
83
+ installed = {dist.canonical_name: dist for dist in env.iter_all_distributions()}
84
+ query_names = [canonicalize_name(name) for name in query]
85
+ missing = sorted(
86
+ [name for name, pkg in zip(query, query_names) if pkg not in installed]
87
+ )
88
+ if missing:
89
+ logger.warning("Package(s) not found: %s", ", ".join(missing))
90
+
91
+ def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]:
92
+ return (
93
+ dist.metadata["Name"] or "UNKNOWN"
94
+ for dist in installed.values()
95
+ if current_dist.canonical_name
96
+ in {canonicalize_name(d.name) for d in dist.iter_dependencies()}
97
+ )
98
+
99
+ for query_name in query_names:
100
+ try:
101
+ dist = installed[query_name]
102
+ except KeyError:
103
+ continue
104
+
105
+ try:
106
+ requires = sorted(
107
+ # Avoid duplicates in requirements (e.g. due to environment markers).
108
+ {req.name for req in dist.iter_dependencies()},
109
+ key=str.lower,
110
+ )
111
+ except InvalidRequirement:
112
+ requires = sorted(dist.iter_raw_dependencies(), key=str.lower)
113
+
114
+ try:
115
+ required_by = sorted(_get_requiring_packages(dist), key=str.lower)
116
+ except InvalidRequirement:
117
+ required_by = ["#N/A"]
118
+
119
+ try:
120
+ entry_points_text = dist.read_text("entry_points.txt")
121
+ entry_points = entry_points_text.splitlines(keepends=False)
122
+ except FileNotFoundError:
123
+ entry_points = []
124
+
125
+ files_iter = dist.iter_declared_entries()
126
+ if files_iter is None:
127
+ files: Optional[List[str]] = None
128
+ else:
129
+ files = sorted(files_iter)
130
+
131
+ metadata = dist.metadata
132
+
133
+ project_urls = metadata.get_all("Project-URL", [])
134
+ homepage = metadata.get("Home-page", "")
135
+ if not homepage:
136
+ # It's common that there is a "homepage" Project-URL, but Home-page
137
+ # remains unset (especially as PEP 621 doesn't surface the field).
138
+ #
139
+ # This logic was taken from PyPI's codebase.
140
+ for url in project_urls:
141
+ url_label, url = url.split(",", maxsplit=1)
142
+ normalized_label = (
143
+ url_label.casefold().replace("-", "").replace("_", "").strip()
144
+ )
145
+ if normalized_label == "homepage":
146
+ homepage = url.strip()
147
+ break
148
+
149
+ yield _PackageInfo(
150
+ name=dist.raw_name,
151
+ version=dist.raw_version,
152
+ location=dist.location or "",
153
+ editable_project_location=dist.editable_project_location,
154
+ requires=requires,
155
+ required_by=required_by,
156
+ installer=dist.installer,
157
+ metadata_version=dist.metadata_version or "",
158
+ classifiers=metadata.get_all("Classifier", []),
159
+ summary=metadata.get("Summary", ""),
160
+ homepage=homepage,
161
+ project_urls=project_urls,
162
+ author=metadata.get("Author", ""),
163
+ author_email=metadata.get("Author-email", ""),
164
+ license=metadata.get("License", ""),
165
+ license_expression=metadata.get("License-Expression", ""),
166
+ entry_points=entry_points,
167
+ files=files,
168
+ )
169
+
170
+
171
+ def print_results(
172
+ distributions: Iterable[_PackageInfo],
173
+ list_files: bool,
174
+ verbose: bool,
175
+ ) -> bool:
176
+ """
177
+ Print the information from installed distributions found.
178
+ """
179
+ results_printed = False
180
+ for i, dist in enumerate(distributions):
181
+ results_printed = True
182
+ if i > 0:
183
+ write_output("---")
184
+
185
+ metadata_version_tuple = tuple(map(int, dist.metadata_version.split(".")))
186
+
187
+ write_output("Name: %s", dist.name)
188
+ write_output("Version: %s", dist.version)
189
+ write_output("Summary: %s", dist.summary)
190
+ write_output("Home-page: %s", dist.homepage)
191
+ write_output("Author: %s", dist.author)
192
+ write_output("Author-email: %s", dist.author_email)
193
+ if metadata_version_tuple >= (2, 4) and dist.license_expression:
194
+ write_output("License-Expression: %s", dist.license_expression)
195
+ else:
196
+ write_output("License: %s", dist.license)
197
+ write_output("Location: %s", dist.location)
198
+ if dist.editable_project_location is not None:
199
+ write_output(
200
+ "Editable project location: %s", dist.editable_project_location
201
+ )
202
+ write_output("Requires: %s", ", ".join(dist.requires))
203
+ write_output("Required-by: %s", ", ".join(dist.required_by))
204
+
205
+ if verbose:
206
+ write_output("Metadata-Version: %s", dist.metadata_version)
207
+ write_output("Installer: %s", dist.installer)
208
+ write_output("Classifiers:")
209
+ for classifier in dist.classifiers:
210
+ write_output(" %s", classifier)
211
+ write_output("Entry-points:")
212
+ for entry in dist.entry_points:
213
+ write_output(" %s", entry.strip())
214
+ write_output("Project-URLs:")
215
+ for project_url in dist.project_urls:
216
+ write_output(" %s", project_url)
217
+ if list_files:
218
+ write_output("Files:")
219
+ if dist.files is None:
220
+ write_output("Cannot locate RECORD or installed-files.txt")
221
+ else:
222
+ for line in dist.files:
223
+ write_output(" %s", line.strip())
224
+ return results_printed
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/commands/uninstall.py ADDED
@@ -0,0 +1,114 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from optparse import Values
3
+ from typing import List
4
+
5
+ from pip._vendor.packaging.utils import canonicalize_name
6
+
7
+ from pip._internal.cli import cmdoptions
8
+ from pip._internal.cli.base_command import Command
9
+ from pip._internal.cli.index_command import SessionCommandMixin
10
+ from pip._internal.cli.status_codes import SUCCESS
11
+ from pip._internal.exceptions import InstallationError
12
+ from pip._internal.req import parse_requirements
13
+ from pip._internal.req.constructors import (
14
+ install_req_from_line,
15
+ install_req_from_parsed_requirement,
16
+ )
17
+ from pip._internal.utils.misc import (
18
+ check_externally_managed,
19
+ protect_pip_from_modification_on_windows,
20
+ warn_if_run_as_root,
21
+ )
22
+
23
+ logger = logging.getLogger(__name__)
24
+
25
+
26
+ class UninstallCommand(Command, SessionCommandMixin):
27
+ """
28
+ Uninstall packages.
29
+
30
+ pip is able to uninstall most installed packages. Known exceptions are:
31
+
32
+ - Pure distutils packages installed with ``python setup.py install``, which
33
+ leave behind no metadata to determine what files were installed.
34
+ - Script wrappers installed by ``python setup.py develop``.
35
+ """
36
+
37
+ usage = """
38
+ %prog [options] <package> ...
39
+ %prog [options] -r <requirements file> ..."""
40
+
41
+ def add_options(self) -> None:
42
+ self.cmd_opts.add_option(
43
+ "-r",
44
+ "--requirement",
45
+ dest="requirements",
46
+ action="append",
47
+ default=[],
48
+ metavar="file",
49
+ help=(
50
+ "Uninstall all the packages listed in the given requirements "
51
+ "file. This option can be used multiple times."
52
+ ),
53
+ )
54
+ self.cmd_opts.add_option(
55
+ "-y",
56
+ "--yes",
57
+ dest="yes",
58
+ action="store_true",
59
+ help="Don't ask for confirmation of uninstall deletions.",
60
+ )
61
+ self.cmd_opts.add_option(cmdoptions.root_user_action())
62
+ self.cmd_opts.add_option(cmdoptions.override_externally_managed())
63
+ self.parser.insert_option_group(0, self.cmd_opts)
64
+
65
+ def run(self, options: Values, args: List[str]) -> int:
66
+ session = self.get_default_session(options)
67
+
68
+ reqs_to_uninstall = {}
69
+ for name in args:
70
+ req = install_req_from_line(
71
+ name,
72
+ isolated=options.isolated_mode,
73
+ )
74
+ if req.name:
75
+ reqs_to_uninstall[canonicalize_name(req.name)] = req
76
+ else:
77
+ logger.warning(
78
+ "Invalid requirement: %r ignored -"
79
+ " the uninstall command expects named"
80
+ " requirements.",
81
+ name,
82
+ )
83
+ for filename in options.requirements:
84
+ for parsed_req in parse_requirements(
85
+ filename, options=options, session=session
86
+ ):
87
+ req = install_req_from_parsed_requirement(
88
+ parsed_req, isolated=options.isolated_mode
89
+ )
90
+ if req.name:
91
+ reqs_to_uninstall[canonicalize_name(req.name)] = req
92
+ if not reqs_to_uninstall:
93
+ raise InstallationError(
94
+ f"You must give at least one requirement to {self.name} (see "
95
+ f'"pip help {self.name}")'
96
+ )
97
+
98
+ if not options.override_externally_managed:
99
+ check_externally_managed()
100
+
101
+ protect_pip_from_modification_on_windows(
102
+ modifying_pip="pip" in reqs_to_uninstall
103
+ )
104
+
105
+ for req in reqs_to_uninstall.values():
106
+ uninstall_pathset = req.uninstall(
107
+ auto_confirm=options.yes,
108
+ verbose=self.verbosity > 0,
109
+ )
110
+ if uninstall_pathset:
111
+ uninstall_pathset.commit()
112
+ if options.root_user_action == "warn":
113
+ warn_if_run_as_root()
114
+ return SUCCESS
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/index/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ """Index interaction code
2
+ """
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/index/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (214 Bytes). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/index/__pycache__/collector.cpython-310.pyc ADDED
Binary file (15.1 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-310.pyc ADDED
Binary file (29.6 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/index/__pycache__/sources.cpython-310.pyc ADDED
Binary file (8.87 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/index/collector.py ADDED
@@ -0,0 +1,494 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ The main purpose of this module is to expose LinkCollector.collect_sources().
3
+ """
4
+
5
+ import collections
6
+ import email.message
7
+ import functools
8
+ import itertools
9
+ import json
10
+ import logging
11
+ import os
12
+ import urllib.parse
13
+ import urllib.request
14
+ from dataclasses import dataclass
15
+ from html.parser import HTMLParser
16
+ from optparse import Values
17
+ from typing import (
18
+ Callable,
19
+ Dict,
20
+ Iterable,
21
+ List,
22
+ MutableMapping,
23
+ NamedTuple,
24
+ Optional,
25
+ Protocol,
26
+ Sequence,
27
+ Tuple,
28
+ Union,
29
+ )
30
+
31
+ from pip._vendor import requests
32
+ from pip._vendor.requests import Response
33
+ from pip._vendor.requests.exceptions import RetryError, SSLError
34
+
35
+ from pip._internal.exceptions import NetworkConnectionError
36
+ from pip._internal.models.link import Link
37
+ from pip._internal.models.search_scope import SearchScope
38
+ from pip._internal.network.session import PipSession
39
+ from pip._internal.network.utils import raise_for_status
40
+ from pip._internal.utils.filetypes import is_archive_file
41
+ from pip._internal.utils.misc import redact_auth_from_url
42
+ from pip._internal.vcs import vcs
43
+
44
+ from .sources import CandidatesFromPage, LinkSource, build_source
45
+
46
+ logger = logging.getLogger(__name__)
47
+
48
+ ResponseHeaders = MutableMapping[str, str]
49
+
50
+
51
+ def _match_vcs_scheme(url: str) -> Optional[str]:
52
+ """Look for VCS schemes in the URL.
53
+
54
+ Returns the matched VCS scheme, or None if there's no match.
55
+ """
56
+ for scheme in vcs.schemes:
57
+ if url.lower().startswith(scheme) and url[len(scheme)] in "+:":
58
+ return scheme
59
+ return None
60
+
61
+
62
+ class _NotAPIContent(Exception):
63
+ def __init__(self, content_type: str, request_desc: str) -> None:
64
+ super().__init__(content_type, request_desc)
65
+ self.content_type = content_type
66
+ self.request_desc = request_desc
67
+
68
+
69
+ def _ensure_api_header(response: Response) -> None:
70
+ """
71
+ Check the Content-Type header to ensure the response contains a Simple
72
+ API Response.
73
+
74
+ Raises `_NotAPIContent` if the content type is not a valid content-type.
75
+ """
76
+ content_type = response.headers.get("Content-Type", "Unknown")
77
+
78
+ content_type_l = content_type.lower()
79
+ if content_type_l.startswith(
80
+ (
81
+ "text/html",
82
+ "application/vnd.pypi.simple.v1+html",
83
+ "application/vnd.pypi.simple.v1+json",
84
+ )
85
+ ):
86
+ return
87
+
88
+ raise _NotAPIContent(content_type, response.request.method)
89
+
90
+
91
+ class _NotHTTP(Exception):
92
+ pass
93
+
94
+
95
+ def _ensure_api_response(url: str, session: PipSession) -> None:
96
+ """
97
+ Send a HEAD request to the URL, and ensure the response contains a simple
98
+ API Response.
99
+
100
+ Raises `_NotHTTP` if the URL is not available for a HEAD request, or
101
+ `_NotAPIContent` if the content type is not a valid content type.
102
+ """
103
+ scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url)
104
+ if scheme not in {"http", "https"}:
105
+ raise _NotHTTP()
106
+
107
+ resp = session.head(url, allow_redirects=True)
108
+ raise_for_status(resp)
109
+
110
+ _ensure_api_header(resp)
111
+
112
+
113
+ def _get_simple_response(url: str, session: PipSession) -> Response:
114
+ """Access an Simple API response with GET, and return the response.
115
+
116
+ This consists of three parts:
117
+
118
+ 1. If the URL looks suspiciously like an archive, send a HEAD first to
119
+ check the Content-Type is HTML or Simple API, to avoid downloading a
120
+ large file. Raise `_NotHTTP` if the content type cannot be determined, or
121
+ `_NotAPIContent` if it is not HTML or a Simple API.
122
+ 2. Actually perform the request. Raise HTTP exceptions on network failures.
123
+ 3. Check the Content-Type header to make sure we got a Simple API response,
124
+ and raise `_NotAPIContent` otherwise.
125
+ """
126
+ if is_archive_file(Link(url).filename):
127
+ _ensure_api_response(url, session=session)
128
+
129
+ logger.debug("Getting page %s", redact_auth_from_url(url))
130
+
131
+ resp = session.get(
132
+ url,
133
+ headers={
134
+ "Accept": ", ".join(
135
+ [
136
+ "application/vnd.pypi.simple.v1+json",
137
+ "application/vnd.pypi.simple.v1+html; q=0.1",
138
+ "text/html; q=0.01",
139
+ ]
140
+ ),
141
+ # We don't want to blindly returned cached data for
142
+ # /simple/, because authors generally expecting that
143
+ # twine upload && pip install will function, but if
144
+ # they've done a pip install in the last ~10 minutes
145
+ # it won't. Thus by setting this to zero we will not
146
+ # blindly use any cached data, however the benefit of
147
+ # using max-age=0 instead of no-cache, is that we will
148
+ # still support conditional requests, so we will still
149
+ # minimize traffic sent in cases where the page hasn't
150
+ # changed at all, we will just always incur the round
151
+ # trip for the conditional GET now instead of only
152
+ # once per 10 minutes.
153
+ # For more information, please see pypa/pip#5670.
154
+ "Cache-Control": "max-age=0",
155
+ },
156
+ )
157
+ raise_for_status(resp)
158
+
159
+ # The check for archives above only works if the url ends with
160
+ # something that looks like an archive. However that is not a
161
+ # requirement of an url. Unless we issue a HEAD request on every
162
+ # url we cannot know ahead of time for sure if something is a
163
+ # Simple API response or not. However we can check after we've
164
+ # downloaded it.
165
+ _ensure_api_header(resp)
166
+
167
+ logger.debug(
168
+ "Fetched page %s as %s",
169
+ redact_auth_from_url(url),
170
+ resp.headers.get("Content-Type", "Unknown"),
171
+ )
172
+
173
+ return resp
174
+
175
+
176
+ def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]:
177
+ """Determine if we have any encoding information in our headers."""
178
+ if headers and "Content-Type" in headers:
179
+ m = email.message.Message()
180
+ m["content-type"] = headers["Content-Type"]
181
+ charset = m.get_param("charset")
182
+ if charset:
183
+ return str(charset)
184
+ return None
185
+
186
+
187
+ class CacheablePageContent:
188
+ def __init__(self, page: "IndexContent") -> None:
189
+ assert page.cache_link_parsing
190
+ self.page = page
191
+
192
+ def __eq__(self, other: object) -> bool:
193
+ return isinstance(other, type(self)) and self.page.url == other.page.url
194
+
195
+ def __hash__(self) -> int:
196
+ return hash(self.page.url)
197
+
198
+
199
+ class ParseLinks(Protocol):
200
+ def __call__(self, page: "IndexContent") -> Iterable[Link]: ...
201
+
202
+
203
+ def with_cached_index_content(fn: ParseLinks) -> ParseLinks:
204
+ """
205
+ Given a function that parses an Iterable[Link] from an IndexContent, cache the
206
+ function's result (keyed by CacheablePageContent), unless the IndexContent
207
+ `page` has `page.cache_link_parsing == False`.
208
+ """
209
+
210
+ @functools.lru_cache(maxsize=None)
211
+ def wrapper(cacheable_page: CacheablePageContent) -> List[Link]:
212
+ return list(fn(cacheable_page.page))
213
+
214
+ @functools.wraps(fn)
215
+ def wrapper_wrapper(page: "IndexContent") -> List[Link]:
216
+ if page.cache_link_parsing:
217
+ return wrapper(CacheablePageContent(page))
218
+ return list(fn(page))
219
+
220
+ return wrapper_wrapper
221
+
222
+
223
+ @with_cached_index_content
224
+ def parse_links(page: "IndexContent") -> Iterable[Link]:
225
+ """
226
+ Parse a Simple API's Index Content, and yield its anchor elements as Link objects.
227
+ """
228
+
229
+ content_type_l = page.content_type.lower()
230
+ if content_type_l.startswith("application/vnd.pypi.simple.v1+json"):
231
+ data = json.loads(page.content)
232
+ for file in data.get("files", []):
233
+ link = Link.from_json(file, page.url)
234
+ if link is None:
235
+ continue
236
+ yield link
237
+ return
238
+
239
+ parser = HTMLLinkParser(page.url)
240
+ encoding = page.encoding or "utf-8"
241
+ parser.feed(page.content.decode(encoding))
242
+
243
+ url = page.url
244
+ base_url = parser.base_url or url
245
+ for anchor in parser.anchors:
246
+ link = Link.from_element(anchor, page_url=url, base_url=base_url)
247
+ if link is None:
248
+ continue
249
+ yield link
250
+
251
+
252
+ @dataclass(frozen=True)
253
+ class IndexContent:
254
+ """Represents one response (or page), along with its URL.
255
+
256
+ :param encoding: the encoding to decode the given content.
257
+ :param url: the URL from which the HTML was downloaded.
258
+ :param cache_link_parsing: whether links parsed from this page's url
259
+ should be cached. PyPI index urls should
260
+ have this set to False, for example.
261
+ """
262
+
263
+ content: bytes
264
+ content_type: str
265
+ encoding: Optional[str]
266
+ url: str
267
+ cache_link_parsing: bool = True
268
+
269
+ def __str__(self) -> str:
270
+ return redact_auth_from_url(self.url)
271
+
272
+
273
+ class HTMLLinkParser(HTMLParser):
274
+ """
275
+ HTMLParser that keeps the first base HREF and a list of all anchor
276
+ elements' attributes.
277
+ """
278
+
279
+ def __init__(self, url: str) -> None:
280
+ super().__init__(convert_charrefs=True)
281
+
282
+ self.url: str = url
283
+ self.base_url: Optional[str] = None
284
+ self.anchors: List[Dict[str, Optional[str]]] = []
285
+
286
+ def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None:
287
+ if tag == "base" and self.base_url is None:
288
+ href = self.get_href(attrs)
289
+ if href is not None:
290
+ self.base_url = href
291
+ elif tag == "a":
292
+ self.anchors.append(dict(attrs))
293
+
294
+ def get_href(self, attrs: List[Tuple[str, Optional[str]]]) -> Optional[str]:
295
+ for name, value in attrs:
296
+ if name == "href":
297
+ return value
298
+ return None
299
+
300
+
301
+ def _handle_get_simple_fail(
302
+ link: Link,
303
+ reason: Union[str, Exception],
304
+ meth: Optional[Callable[..., None]] = None,
305
+ ) -> None:
306
+ if meth is None:
307
+ meth = logger.debug
308
+ meth("Could not fetch URL %s: %s - skipping", link, reason)
309
+
310
+
311
+ def _make_index_content(
312
+ response: Response, cache_link_parsing: bool = True
313
+ ) -> IndexContent:
314
+ encoding = _get_encoding_from_headers(response.headers)
315
+ return IndexContent(
316
+ response.content,
317
+ response.headers["Content-Type"],
318
+ encoding=encoding,
319
+ url=response.url,
320
+ cache_link_parsing=cache_link_parsing,
321
+ )
322
+
323
+
324
+ def _get_index_content(link: Link, *, session: PipSession) -> Optional["IndexContent"]:
325
+ url = link.url.split("#", 1)[0]
326
+
327
+ # Check for VCS schemes that do not support lookup as web pages.
328
+ vcs_scheme = _match_vcs_scheme(url)
329
+ if vcs_scheme:
330
+ logger.warning(
331
+ "Cannot look at %s URL %s because it does not support lookup as web pages.",
332
+ vcs_scheme,
333
+ link,
334
+ )
335
+ return None
336
+
337
+ # Tack index.html onto file:// URLs that point to directories
338
+ scheme, _, path, _, _, _ = urllib.parse.urlparse(url)
339
+ if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)):
340
+ # add trailing slash if not present so urljoin doesn't trim
341
+ # final segment
342
+ if not url.endswith("/"):
343
+ url += "/"
344
+ # TODO: In the future, it would be nice if pip supported PEP 691
345
+ # style responses in the file:// URLs, however there's no
346
+ # standard file extension for application/vnd.pypi.simple.v1+json
347
+ # so we'll need to come up with something on our own.
348
+ url = urllib.parse.urljoin(url, "index.html")
349
+ logger.debug(" file: URL is directory, getting %s", url)
350
+
351
+ try:
352
+ resp = _get_simple_response(url, session=session)
353
+ except _NotHTTP:
354
+ logger.warning(
355
+ "Skipping page %s because it looks like an archive, and cannot "
356
+ "be checked by a HTTP HEAD request.",
357
+ link,
358
+ )
359
+ except _NotAPIContent as exc:
360
+ logger.warning(
361
+ "Skipping page %s because the %s request got Content-Type: %s. "
362
+ "The only supported Content-Types are application/vnd.pypi.simple.v1+json, "
363
+ "application/vnd.pypi.simple.v1+html, and text/html",
364
+ link,
365
+ exc.request_desc,
366
+ exc.content_type,
367
+ )
368
+ except NetworkConnectionError as exc:
369
+ _handle_get_simple_fail(link, exc)
370
+ except RetryError as exc:
371
+ _handle_get_simple_fail(link, exc)
372
+ except SSLError as exc:
373
+ reason = "There was a problem confirming the ssl certificate: "
374
+ reason += str(exc)
375
+ _handle_get_simple_fail(link, reason, meth=logger.info)
376
+ except requests.ConnectionError as exc:
377
+ _handle_get_simple_fail(link, f"connection error: {exc}")
378
+ except requests.Timeout:
379
+ _handle_get_simple_fail(link, "timed out")
380
+ else:
381
+ return _make_index_content(resp, cache_link_parsing=link.cache_link_parsing)
382
+ return None
383
+
384
+
385
+ class CollectedSources(NamedTuple):
386
+ find_links: Sequence[Optional[LinkSource]]
387
+ index_urls: Sequence[Optional[LinkSource]]
388
+
389
+
390
+ class LinkCollector:
391
+ """
392
+ Responsible for collecting Link objects from all configured locations,
393
+ making network requests as needed.
394
+
395
+ The class's main method is its collect_sources() method.
396
+ """
397
+
398
+ def __init__(
399
+ self,
400
+ session: PipSession,
401
+ search_scope: SearchScope,
402
+ ) -> None:
403
+ self.search_scope = search_scope
404
+ self.session = session
405
+
406
+ @classmethod
407
+ def create(
408
+ cls,
409
+ session: PipSession,
410
+ options: Values,
411
+ suppress_no_index: bool = False,
412
+ ) -> "LinkCollector":
413
+ """
414
+ :param session: The Session to use to make requests.
415
+ :param suppress_no_index: Whether to ignore the --no-index option
416
+ when constructing the SearchScope object.
417
+ """
418
+ index_urls = [options.index_url] + options.extra_index_urls
419
+ if options.no_index and not suppress_no_index:
420
+ logger.debug(
421
+ "Ignoring indexes: %s",
422
+ ",".join(redact_auth_from_url(url) for url in index_urls),
423
+ )
424
+ index_urls = []
425
+
426
+ # Make sure find_links is a list before passing to create().
427
+ find_links = options.find_links or []
428
+
429
+ search_scope = SearchScope.create(
430
+ find_links=find_links,
431
+ index_urls=index_urls,
432
+ no_index=options.no_index,
433
+ )
434
+ link_collector = LinkCollector(
435
+ session=session,
436
+ search_scope=search_scope,
437
+ )
438
+ return link_collector
439
+
440
+ @property
441
+ def find_links(self) -> List[str]:
442
+ return self.search_scope.find_links
443
+
444
+ def fetch_response(self, location: Link) -> Optional[IndexContent]:
445
+ """
446
+ Fetch an HTML page containing package links.
447
+ """
448
+ return _get_index_content(location, session=self.session)
449
+
450
+ def collect_sources(
451
+ self,
452
+ project_name: str,
453
+ candidates_from_page: CandidatesFromPage,
454
+ ) -> CollectedSources:
455
+ # The OrderedDict calls deduplicate sources by URL.
456
+ index_url_sources = collections.OrderedDict(
457
+ build_source(
458
+ loc,
459
+ candidates_from_page=candidates_from_page,
460
+ page_validator=self.session.is_secure_origin,
461
+ expand_dir=False,
462
+ cache_link_parsing=False,
463
+ project_name=project_name,
464
+ )
465
+ for loc in self.search_scope.get_index_urls_locations(project_name)
466
+ ).values()
467
+ find_links_sources = collections.OrderedDict(
468
+ build_source(
469
+ loc,
470
+ candidates_from_page=candidates_from_page,
471
+ page_validator=self.session.is_secure_origin,
472
+ expand_dir=True,
473
+ cache_link_parsing=True,
474
+ project_name=project_name,
475
+ )
476
+ for loc in self.find_links
477
+ ).values()
478
+
479
+ if logger.isEnabledFor(logging.DEBUG):
480
+ lines = [
481
+ f"* {s.link}"
482
+ for s in itertools.chain(find_links_sources, index_url_sources)
483
+ if s is not None and s.link is not None
484
+ ]
485
+ lines = [
486
+ f"{len(lines)} location(s) to search "
487
+ f"for versions of {project_name}:"
488
+ ] + lines
489
+ logger.debug("\n".join(lines))
490
+
491
+ return CollectedSources(
492
+ find_links=list(find_links_sources),
493
+ index_urls=list(index_url_sources),
494
+ )
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/index/package_finder.py ADDED
@@ -0,0 +1,1029 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Routines related to PyPI, indexes"""
2
+
3
+ import enum
4
+ import functools
5
+ import itertools
6
+ import logging
7
+ import re
8
+ from dataclasses import dataclass
9
+ from typing import TYPE_CHECKING, FrozenSet, Iterable, List, Optional, Set, Tuple, Union
10
+
11
+ from pip._vendor.packaging import specifiers
12
+ from pip._vendor.packaging.tags import Tag
13
+ from pip._vendor.packaging.utils import canonicalize_name
14
+ from pip._vendor.packaging.version import InvalidVersion, _BaseVersion
15
+ from pip._vendor.packaging.version import parse as parse_version
16
+
17
+ from pip._internal.exceptions import (
18
+ BestVersionAlreadyInstalled,
19
+ DistributionNotFound,
20
+ InvalidWheelFilename,
21
+ UnsupportedWheel,
22
+ )
23
+ from pip._internal.index.collector import LinkCollector, parse_links
24
+ from pip._internal.models.candidate import InstallationCandidate
25
+ from pip._internal.models.format_control import FormatControl
26
+ from pip._internal.models.link import Link
27
+ from pip._internal.models.search_scope import SearchScope
28
+ from pip._internal.models.selection_prefs import SelectionPreferences
29
+ from pip._internal.models.target_python import TargetPython
30
+ from pip._internal.models.wheel import Wheel
31
+ from pip._internal.req import InstallRequirement
32
+ from pip._internal.utils._log import getLogger
33
+ from pip._internal.utils.filetypes import WHEEL_EXTENSION
34
+ from pip._internal.utils.hashes import Hashes
35
+ from pip._internal.utils.logging import indent_log
36
+ from pip._internal.utils.misc import build_netloc
37
+ from pip._internal.utils.packaging import check_requires_python
38
+ from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS
39
+
40
+ if TYPE_CHECKING:
41
+ from pip._vendor.typing_extensions import TypeGuard
42
+
43
+ __all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"]
44
+
45
+
46
+ logger = getLogger(__name__)
47
+
48
+ BuildTag = Union[Tuple[()], Tuple[int, str]]
49
+ CandidateSortingKey = Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag]
50
+
51
+
52
+ def _check_link_requires_python(
53
+ link: Link,
54
+ version_info: Tuple[int, int, int],
55
+ ignore_requires_python: bool = False,
56
+ ) -> bool:
57
+ """
58
+ Return whether the given Python version is compatible with a link's
59
+ "Requires-Python" value.
60
+
61
+ :param version_info: A 3-tuple of ints representing the Python
62
+ major-minor-micro version to check.
63
+ :param ignore_requires_python: Whether to ignore the "Requires-Python"
64
+ value if the given Python version isn't compatible.
65
+ """
66
+ try:
67
+ is_compatible = check_requires_python(
68
+ link.requires_python,
69
+ version_info=version_info,
70
+ )
71
+ except specifiers.InvalidSpecifier:
72
+ logger.debug(
73
+ "Ignoring invalid Requires-Python (%r) for link: %s",
74
+ link.requires_python,
75
+ link,
76
+ )
77
+ else:
78
+ if not is_compatible:
79
+ version = ".".join(map(str, version_info))
80
+ if not ignore_requires_python:
81
+ logger.verbose(
82
+ "Link requires a different Python (%s not in: %r): %s",
83
+ version,
84
+ link.requires_python,
85
+ link,
86
+ )
87
+ return False
88
+
89
+ logger.debug(
90
+ "Ignoring failed Requires-Python check (%s not in: %r) for link: %s",
91
+ version,
92
+ link.requires_python,
93
+ link,
94
+ )
95
+
96
+ return True
97
+
98
+
99
+ class LinkType(enum.Enum):
100
+ candidate = enum.auto()
101
+ different_project = enum.auto()
102
+ yanked = enum.auto()
103
+ format_unsupported = enum.auto()
104
+ format_invalid = enum.auto()
105
+ platform_mismatch = enum.auto()
106
+ requires_python_mismatch = enum.auto()
107
+
108
+
109
+ class LinkEvaluator:
110
+ """
111
+ Responsible for evaluating links for a particular project.
112
+ """
113
+
114
+ _py_version_re = re.compile(r"-py([123]\.?[0-9]?)$")
115
+
116
+ # Don't include an allow_yanked default value to make sure each call
117
+ # site considers whether yanked releases are allowed. This also causes
118
+ # that decision to be made explicit in the calling code, which helps
119
+ # people when reading the code.
120
+ def __init__(
121
+ self,
122
+ project_name: str,
123
+ canonical_name: str,
124
+ formats: FrozenSet[str],
125
+ target_python: TargetPython,
126
+ allow_yanked: bool,
127
+ ignore_requires_python: Optional[bool] = None,
128
+ ) -> None:
129
+ """
130
+ :param project_name: The user supplied package name.
131
+ :param canonical_name: The canonical package name.
132
+ :param formats: The formats allowed for this package. Should be a set
133
+ with 'binary' or 'source' or both in it.
134
+ :param target_python: The target Python interpreter to use when
135
+ evaluating link compatibility. This is used, for example, to
136
+ check wheel compatibility, as well as when checking the Python
137
+ version, e.g. the Python version embedded in a link filename
138
+ (or egg fragment) and against an HTML link's optional PEP 503
139
+ "data-requires-python" attribute.
140
+ :param allow_yanked: Whether files marked as yanked (in the sense
141
+ of PEP 592) are permitted to be candidates for install.
142
+ :param ignore_requires_python: Whether to ignore incompatible
143
+ PEP 503 "data-requires-python" values in HTML links. Defaults
144
+ to False.
145
+ """
146
+ if ignore_requires_python is None:
147
+ ignore_requires_python = False
148
+
149
+ self._allow_yanked = allow_yanked
150
+ self._canonical_name = canonical_name
151
+ self._ignore_requires_python = ignore_requires_python
152
+ self._formats = formats
153
+ self._target_python = target_python
154
+
155
+ self.project_name = project_name
156
+
157
+ def evaluate_link(self, link: Link) -> Tuple[LinkType, str]:
158
+ """
159
+ Determine whether a link is a candidate for installation.
160
+
161
+ :return: A tuple (result, detail), where *result* is an enum
162
+ representing whether the evaluation found a candidate, or the reason
163
+ why one is not found. If a candidate is found, *detail* will be the
164
+ candidate's version string; if one is not found, it contains the
165
+ reason the link fails to qualify.
166
+ """
167
+ version = None
168
+ if link.is_yanked and not self._allow_yanked:
169
+ reason = link.yanked_reason or "<none given>"
170
+ return (LinkType.yanked, f"yanked for reason: {reason}")
171
+
172
+ if link.egg_fragment:
173
+ egg_info = link.egg_fragment
174
+ ext = link.ext
175
+ else:
176
+ egg_info, ext = link.splitext()
177
+ if not ext:
178
+ return (LinkType.format_unsupported, "not a file")
179
+ if ext not in SUPPORTED_EXTENSIONS:
180
+ return (
181
+ LinkType.format_unsupported,
182
+ f"unsupported archive format: {ext}",
183
+ )
184
+ if "binary" not in self._formats and ext == WHEEL_EXTENSION:
185
+ reason = f"No binaries permitted for {self.project_name}"
186
+ return (LinkType.format_unsupported, reason)
187
+ if "macosx10" in link.path and ext == ".zip":
188
+ return (LinkType.format_unsupported, "macosx10 one")
189
+ if ext == WHEEL_EXTENSION:
190
+ try:
191
+ wheel = Wheel(link.filename)
192
+ except InvalidWheelFilename:
193
+ return (
194
+ LinkType.format_invalid,
195
+ "invalid wheel filename",
196
+ )
197
+ if canonicalize_name(wheel.name) != self._canonical_name:
198
+ reason = f"wrong project name (not {self.project_name})"
199
+ return (LinkType.different_project, reason)
200
+
201
+ supported_tags = self._target_python.get_unsorted_tags()
202
+ if not wheel.supported(supported_tags):
203
+ # Include the wheel's tags in the reason string to
204
+ # simplify troubleshooting compatibility issues.
205
+ file_tags = ", ".join(wheel.get_formatted_file_tags())
206
+ reason = (
207
+ f"none of the wheel's tags ({file_tags}) are compatible "
208
+ f"(run pip debug --verbose to show compatible tags)"
209
+ )
210
+ return (LinkType.platform_mismatch, reason)
211
+
212
+ version = wheel.version
213
+
214
+ # This should be up by the self.ok_binary check, but see issue 2700.
215
+ if "source" not in self._formats and ext != WHEEL_EXTENSION:
216
+ reason = f"No sources permitted for {self.project_name}"
217
+ return (LinkType.format_unsupported, reason)
218
+
219
+ if not version:
220
+ version = _extract_version_from_fragment(
221
+ egg_info,
222
+ self._canonical_name,
223
+ )
224
+ if not version:
225
+ reason = f"Missing project version for {self.project_name}"
226
+ return (LinkType.format_invalid, reason)
227
+
228
+ match = self._py_version_re.search(version)
229
+ if match:
230
+ version = version[: match.start()]
231
+ py_version = match.group(1)
232
+ if py_version != self._target_python.py_version:
233
+ return (
234
+ LinkType.platform_mismatch,
235
+ "Python version is incorrect",
236
+ )
237
+
238
+ supports_python = _check_link_requires_python(
239
+ link,
240
+ version_info=self._target_python.py_version_info,
241
+ ignore_requires_python=self._ignore_requires_python,
242
+ )
243
+ if not supports_python:
244
+ reason = f"{version} Requires-Python {link.requires_python}"
245
+ return (LinkType.requires_python_mismatch, reason)
246
+
247
+ logger.debug("Found link %s, version: %s", link, version)
248
+
249
+ return (LinkType.candidate, version)
250
+
251
+
252
+ def filter_unallowed_hashes(
253
+ candidates: List[InstallationCandidate],
254
+ hashes: Optional[Hashes],
255
+ project_name: str,
256
+ ) -> List[InstallationCandidate]:
257
+ """
258
+ Filter out candidates whose hashes aren't allowed, and return a new
259
+ list of candidates.
260
+
261
+ If at least one candidate has an allowed hash, then all candidates with
262
+ either an allowed hash or no hash specified are returned. Otherwise,
263
+ the given candidates are returned.
264
+
265
+ Including the candidates with no hash specified when there is a match
266
+ allows a warning to be logged if there is a more preferred candidate
267
+ with no hash specified. Returning all candidates in the case of no
268
+ matches lets pip report the hash of the candidate that would otherwise
269
+ have been installed (e.g. permitting the user to more easily update
270
+ their requirements file with the desired hash).
271
+ """
272
+ if not hashes:
273
+ logger.debug(
274
+ "Given no hashes to check %s links for project %r: "
275
+ "discarding no candidates",
276
+ len(candidates),
277
+ project_name,
278
+ )
279
+ # Make sure we're not returning back the given value.
280
+ return list(candidates)
281
+
282
+ matches_or_no_digest = []
283
+ # Collect the non-matches for logging purposes.
284
+ non_matches = []
285
+ match_count = 0
286
+ for candidate in candidates:
287
+ link = candidate.link
288
+ if not link.has_hash:
289
+ pass
290
+ elif link.is_hash_allowed(hashes=hashes):
291
+ match_count += 1
292
+ else:
293
+ non_matches.append(candidate)
294
+ continue
295
+
296
+ matches_or_no_digest.append(candidate)
297
+
298
+ if match_count:
299
+ filtered = matches_or_no_digest
300
+ else:
301
+ # Make sure we're not returning back the given value.
302
+ filtered = list(candidates)
303
+
304
+ if len(filtered) == len(candidates):
305
+ discard_message = "discarding no candidates"
306
+ else:
307
+ discard_message = "discarding {} non-matches:\n {}".format(
308
+ len(non_matches),
309
+ "\n ".join(str(candidate.link) for candidate in non_matches),
310
+ )
311
+
312
+ logger.debug(
313
+ "Checked %s links for project %r against %s hashes "
314
+ "(%s matches, %s no digest): %s",
315
+ len(candidates),
316
+ project_name,
317
+ hashes.digest_count,
318
+ match_count,
319
+ len(matches_or_no_digest) - match_count,
320
+ discard_message,
321
+ )
322
+
323
+ return filtered
324
+
325
+
326
+ @dataclass
327
+ class CandidatePreferences:
328
+ """
329
+ Encapsulates some of the preferences for filtering and sorting
330
+ InstallationCandidate objects.
331
+ """
332
+
333
+ prefer_binary: bool = False
334
+ allow_all_prereleases: bool = False
335
+
336
+
337
+ @dataclass(frozen=True)
338
+ class BestCandidateResult:
339
+ """A collection of candidates, returned by `PackageFinder.find_best_candidate`.
340
+
341
+ This class is only intended to be instantiated by CandidateEvaluator's
342
+ `compute_best_candidate()` method.
343
+
344
+ :param all_candidates: A sequence of all available candidates found.
345
+ :param applicable_candidates: The applicable candidates.
346
+ :param best_candidate: The most preferred candidate found, or None
347
+ if no applicable candidates were found.
348
+ """
349
+
350
+ all_candidates: List[InstallationCandidate]
351
+ applicable_candidates: List[InstallationCandidate]
352
+ best_candidate: Optional[InstallationCandidate]
353
+
354
+ def __post_init__(self) -> None:
355
+ assert set(self.applicable_candidates) <= set(self.all_candidates)
356
+
357
+ if self.best_candidate is None:
358
+ assert not self.applicable_candidates
359
+ else:
360
+ assert self.best_candidate in self.applicable_candidates
361
+
362
+
363
+ class CandidateEvaluator:
364
+ """
365
+ Responsible for filtering and sorting candidates for installation based
366
+ on what tags are valid.
367
+ """
368
+
369
+ @classmethod
370
+ def create(
371
+ cls,
372
+ project_name: str,
373
+ target_python: Optional[TargetPython] = None,
374
+ prefer_binary: bool = False,
375
+ allow_all_prereleases: bool = False,
376
+ specifier: Optional[specifiers.BaseSpecifier] = None,
377
+ hashes: Optional[Hashes] = None,
378
+ ) -> "CandidateEvaluator":
379
+ """Create a CandidateEvaluator object.
380
+
381
+ :param target_python: The target Python interpreter to use when
382
+ checking compatibility. If None (the default), a TargetPython
383
+ object will be constructed from the running Python.
384
+ :param specifier: An optional object implementing `filter`
385
+ (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
386
+ versions.
387
+ :param hashes: An optional collection of allowed hashes.
388
+ """
389
+ if target_python is None:
390
+ target_python = TargetPython()
391
+ if specifier is None:
392
+ specifier = specifiers.SpecifierSet()
393
+
394
+ supported_tags = target_python.get_sorted_tags()
395
+
396
+ return cls(
397
+ project_name=project_name,
398
+ supported_tags=supported_tags,
399
+ specifier=specifier,
400
+ prefer_binary=prefer_binary,
401
+ allow_all_prereleases=allow_all_prereleases,
402
+ hashes=hashes,
403
+ )
404
+
405
+ def __init__(
406
+ self,
407
+ project_name: str,
408
+ supported_tags: List[Tag],
409
+ specifier: specifiers.BaseSpecifier,
410
+ prefer_binary: bool = False,
411
+ allow_all_prereleases: bool = False,
412
+ hashes: Optional[Hashes] = None,
413
+ ) -> None:
414
+ """
415
+ :param supported_tags: The PEP 425 tags supported by the target
416
+ Python in order of preference (most preferred first).
417
+ """
418
+ self._allow_all_prereleases = allow_all_prereleases
419
+ self._hashes = hashes
420
+ self._prefer_binary = prefer_binary
421
+ self._project_name = project_name
422
+ self._specifier = specifier
423
+ self._supported_tags = supported_tags
424
+ # Since the index of the tag in the _supported_tags list is used
425
+ # as a priority, precompute a map from tag to index/priority to be
426
+ # used in wheel.find_most_preferred_tag.
427
+ self._wheel_tag_preferences = {
428
+ tag: idx for idx, tag in enumerate(supported_tags)
429
+ }
430
+
431
+ def get_applicable_candidates(
432
+ self,
433
+ candidates: List[InstallationCandidate],
434
+ ) -> List[InstallationCandidate]:
435
+ """
436
+ Return the applicable candidates from a list of candidates.
437
+ """
438
+ # Using None infers from the specifier instead.
439
+ allow_prereleases = self._allow_all_prereleases or None
440
+ specifier = self._specifier
441
+
442
+ # We turn the version object into a str here because otherwise
443
+ # when we're debundled but setuptools isn't, Python will see
444
+ # packaging.version.Version and
445
+ # pkg_resources._vendor.packaging.version.Version as different
446
+ # types. This way we'll use a str as a common data interchange
447
+ # format. If we stop using the pkg_resources provided specifier
448
+ # and start using our own, we can drop the cast to str().
449
+ candidates_and_versions = [(c, str(c.version)) for c in candidates]
450
+ versions = set(
451
+ specifier.filter(
452
+ (v for _, v in candidates_and_versions),
453
+ prereleases=allow_prereleases,
454
+ )
455
+ )
456
+
457
+ applicable_candidates = [c for c, v in candidates_and_versions if v in versions]
458
+ filtered_applicable_candidates = filter_unallowed_hashes(
459
+ candidates=applicable_candidates,
460
+ hashes=self._hashes,
461
+ project_name=self._project_name,
462
+ )
463
+
464
+ return sorted(filtered_applicable_candidates, key=self._sort_key)
465
+
466
+ def _sort_key(self, candidate: InstallationCandidate) -> CandidateSortingKey:
467
+ """
468
+ Function to pass as the `key` argument to a call to sorted() to sort
469
+ InstallationCandidates by preference.
470
+
471
+ Returns a tuple such that tuples sorting as greater using Python's
472
+ default comparison operator are more preferred.
473
+
474
+ The preference is as follows:
475
+
476
+ First and foremost, candidates with allowed (matching) hashes are
477
+ always preferred over candidates without matching hashes. This is
478
+ because e.g. if the only candidate with an allowed hash is yanked,
479
+ we still want to use that candidate.
480
+
481
+ Second, excepting hash considerations, candidates that have been
482
+ yanked (in the sense of PEP 592) are always less preferred than
483
+ candidates that haven't been yanked. Then:
484
+
485
+ If not finding wheels, they are sorted by version only.
486
+ If finding wheels, then the sort order is by version, then:
487
+ 1. existing installs
488
+ 2. wheels ordered via Wheel.support_index_min(self._supported_tags)
489
+ 3. source archives
490
+ If prefer_binary was set, then all wheels are sorted above sources.
491
+
492
+ Note: it was considered to embed this logic into the Link
493
+ comparison operators, but then different sdist links
494
+ with the same version, would have to be considered equal
495
+ """
496
+ valid_tags = self._supported_tags
497
+ support_num = len(valid_tags)
498
+ build_tag: BuildTag = ()
499
+ binary_preference = 0
500
+ link = candidate.link
501
+ if link.is_wheel:
502
+ # can raise InvalidWheelFilename
503
+ wheel = Wheel(link.filename)
504
+ try:
505
+ pri = -(
506
+ wheel.find_most_preferred_tag(
507
+ valid_tags, self._wheel_tag_preferences
508
+ )
509
+ )
510
+ except ValueError:
511
+ raise UnsupportedWheel(
512
+ f"{wheel.filename} is not a supported wheel for this platform. It "
513
+ "can't be sorted."
514
+ )
515
+ if self._prefer_binary:
516
+ binary_preference = 1
517
+ if wheel.build_tag is not None:
518
+ match = re.match(r"^(\d+)(.*)$", wheel.build_tag)
519
+ assert match is not None, "guaranteed by filename validation"
520
+ build_tag_groups = match.groups()
521
+ build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
522
+ else: # sdist
523
+ pri = -(support_num)
524
+ has_allowed_hash = int(link.is_hash_allowed(self._hashes))
525
+ yank_value = -1 * int(link.is_yanked) # -1 for yanked.
526
+ return (
527
+ has_allowed_hash,
528
+ yank_value,
529
+ binary_preference,
530
+ candidate.version,
531
+ pri,
532
+ build_tag,
533
+ )
534
+
535
+ def sort_best_candidate(
536
+ self,
537
+ candidates: List[InstallationCandidate],
538
+ ) -> Optional[InstallationCandidate]:
539
+ """
540
+ Return the best candidate per the instance's sort order, or None if
541
+ no candidate is acceptable.
542
+ """
543
+ if not candidates:
544
+ return None
545
+ best_candidate = max(candidates, key=self._sort_key)
546
+ return best_candidate
547
+
548
+ def compute_best_candidate(
549
+ self,
550
+ candidates: List[InstallationCandidate],
551
+ ) -> BestCandidateResult:
552
+ """
553
+ Compute and return a `BestCandidateResult` instance.
554
+ """
555
+ applicable_candidates = self.get_applicable_candidates(candidates)
556
+
557
+ best_candidate = self.sort_best_candidate(applicable_candidates)
558
+
559
+ return BestCandidateResult(
560
+ candidates,
561
+ applicable_candidates=applicable_candidates,
562
+ best_candidate=best_candidate,
563
+ )
564
+
565
+
566
+ class PackageFinder:
567
+ """This finds packages.
568
+
569
+ This is meant to match easy_install's technique for looking for
570
+ packages, by reading pages and looking for appropriate links.
571
+ """
572
+
573
+ def __init__(
574
+ self,
575
+ link_collector: LinkCollector,
576
+ target_python: TargetPython,
577
+ allow_yanked: bool,
578
+ format_control: Optional[FormatControl] = None,
579
+ candidate_prefs: Optional[CandidatePreferences] = None,
580
+ ignore_requires_python: Optional[bool] = None,
581
+ ) -> None:
582
+ """
583
+ This constructor is primarily meant to be used by the create() class
584
+ method and from tests.
585
+
586
+ :param format_control: A FormatControl object, used to control
587
+ the selection of source packages / binary packages when consulting
588
+ the index and links.
589
+ :param candidate_prefs: Options to use when creating a
590
+ CandidateEvaluator object.
591
+ """
592
+ if candidate_prefs is None:
593
+ candidate_prefs = CandidatePreferences()
594
+
595
+ format_control = format_control or FormatControl(set(), set())
596
+
597
+ self._allow_yanked = allow_yanked
598
+ self._candidate_prefs = candidate_prefs
599
+ self._ignore_requires_python = ignore_requires_python
600
+ self._link_collector = link_collector
601
+ self._target_python = target_python
602
+
603
+ self.format_control = format_control
604
+
605
+ # These are boring links that have already been logged somehow.
606
+ self._logged_links: Set[Tuple[Link, LinkType, str]] = set()
607
+
608
+ # Don't include an allow_yanked default value to make sure each call
609
+ # site considers whether yanked releases are allowed. This also causes
610
+ # that decision to be made explicit in the calling code, which helps
611
+ # people when reading the code.
612
+ @classmethod
613
+ def create(
614
+ cls,
615
+ link_collector: LinkCollector,
616
+ selection_prefs: SelectionPreferences,
617
+ target_python: Optional[TargetPython] = None,
618
+ ) -> "PackageFinder":
619
+ """Create a PackageFinder.
620
+
621
+ :param selection_prefs: The candidate selection preferences, as a
622
+ SelectionPreferences object.
623
+ :param target_python: The target Python interpreter to use when
624
+ checking compatibility. If None (the default), a TargetPython
625
+ object will be constructed from the running Python.
626
+ """
627
+ if target_python is None:
628
+ target_python = TargetPython()
629
+
630
+ candidate_prefs = CandidatePreferences(
631
+ prefer_binary=selection_prefs.prefer_binary,
632
+ allow_all_prereleases=selection_prefs.allow_all_prereleases,
633
+ )
634
+
635
+ return cls(
636
+ candidate_prefs=candidate_prefs,
637
+ link_collector=link_collector,
638
+ target_python=target_python,
639
+ allow_yanked=selection_prefs.allow_yanked,
640
+ format_control=selection_prefs.format_control,
641
+ ignore_requires_python=selection_prefs.ignore_requires_python,
642
+ )
643
+
644
+ @property
645
+ def target_python(self) -> TargetPython:
646
+ return self._target_python
647
+
648
+ @property
649
+ def search_scope(self) -> SearchScope:
650
+ return self._link_collector.search_scope
651
+
652
+ @search_scope.setter
653
+ def search_scope(self, search_scope: SearchScope) -> None:
654
+ self._link_collector.search_scope = search_scope
655
+
656
+ @property
657
+ def find_links(self) -> List[str]:
658
+ return self._link_collector.find_links
659
+
660
+ @property
661
+ def index_urls(self) -> List[str]:
662
+ return self.search_scope.index_urls
663
+
664
+ @property
665
+ def proxy(self) -> Optional[str]:
666
+ return self._link_collector.session.pip_proxy
667
+
668
+ @property
669
+ def trusted_hosts(self) -> Iterable[str]:
670
+ for host_port in self._link_collector.session.pip_trusted_origins:
671
+ yield build_netloc(*host_port)
672
+
673
+ @property
674
+ def custom_cert(self) -> Optional[str]:
675
+ # session.verify is either a boolean (use default bundle/no SSL
676
+ # verification) or a string path to a custom CA bundle to use. We only
677
+ # care about the latter.
678
+ verify = self._link_collector.session.verify
679
+ return verify if isinstance(verify, str) else None
680
+
681
+ @property
682
+ def client_cert(self) -> Optional[str]:
683
+ cert = self._link_collector.session.cert
684
+ assert not isinstance(cert, tuple), "pip only supports PEM client certs"
685
+ return cert
686
+
687
+ @property
688
+ def allow_all_prereleases(self) -> bool:
689
+ return self._candidate_prefs.allow_all_prereleases
690
+
691
+ def set_allow_all_prereleases(self) -> None:
692
+ self._candidate_prefs.allow_all_prereleases = True
693
+
694
+ @property
695
+ def prefer_binary(self) -> bool:
696
+ return self._candidate_prefs.prefer_binary
697
+
698
+ def set_prefer_binary(self) -> None:
699
+ self._candidate_prefs.prefer_binary = True
700
+
701
+ def requires_python_skipped_reasons(self) -> List[str]:
702
+ reasons = {
703
+ detail
704
+ for _, result, detail in self._logged_links
705
+ if result == LinkType.requires_python_mismatch
706
+ }
707
+ return sorted(reasons)
708
+
709
+ def make_link_evaluator(self, project_name: str) -> LinkEvaluator:
710
+ canonical_name = canonicalize_name(project_name)
711
+ formats = self.format_control.get_allowed_formats(canonical_name)
712
+
713
+ return LinkEvaluator(
714
+ project_name=project_name,
715
+ canonical_name=canonical_name,
716
+ formats=formats,
717
+ target_python=self._target_python,
718
+ allow_yanked=self._allow_yanked,
719
+ ignore_requires_python=self._ignore_requires_python,
720
+ )
721
+
722
+ def _sort_links(self, links: Iterable[Link]) -> List[Link]:
723
+ """
724
+ Returns elements of links in order, non-egg links first, egg links
725
+ second, while eliminating duplicates
726
+ """
727
+ eggs, no_eggs = [], []
728
+ seen: Set[Link] = set()
729
+ for link in links:
730
+ if link not in seen:
731
+ seen.add(link)
732
+ if link.egg_fragment:
733
+ eggs.append(link)
734
+ else:
735
+ no_eggs.append(link)
736
+ return no_eggs + eggs
737
+
738
+ def _log_skipped_link(self, link: Link, result: LinkType, detail: str) -> None:
739
+ # This is a hot method so don't waste time hashing links unless we're
740
+ # actually going to log 'em.
741
+ if not logger.isEnabledFor(logging.DEBUG):
742
+ return
743
+
744
+ entry = (link, result, detail)
745
+ if entry not in self._logged_links:
746
+ # Put the link at the end so the reason is more visible and because
747
+ # the link string is usually very long.
748
+ logger.debug("Skipping link: %s: %s", detail, link)
749
+ self._logged_links.add(entry)
750
+
751
+ def get_install_candidate(
752
+ self, link_evaluator: LinkEvaluator, link: Link
753
+ ) -> Optional[InstallationCandidate]:
754
+ """
755
+ If the link is a candidate for install, convert it to an
756
+ InstallationCandidate and return it. Otherwise, return None.
757
+ """
758
+ result, detail = link_evaluator.evaluate_link(link)
759
+ if result != LinkType.candidate:
760
+ self._log_skipped_link(link, result, detail)
761
+ return None
762
+
763
+ try:
764
+ return InstallationCandidate(
765
+ name=link_evaluator.project_name,
766
+ link=link,
767
+ version=detail,
768
+ )
769
+ except InvalidVersion:
770
+ return None
771
+
772
+ def evaluate_links(
773
+ self, link_evaluator: LinkEvaluator, links: Iterable[Link]
774
+ ) -> List[InstallationCandidate]:
775
+ """
776
+ Convert links that are candidates to InstallationCandidate objects.
777
+ """
778
+ candidates = []
779
+ for link in self._sort_links(links):
780
+ candidate = self.get_install_candidate(link_evaluator, link)
781
+ if candidate is not None:
782
+ candidates.append(candidate)
783
+
784
+ return candidates
785
+
786
+ def process_project_url(
787
+ self, project_url: Link, link_evaluator: LinkEvaluator
788
+ ) -> List[InstallationCandidate]:
789
+ logger.debug(
790
+ "Fetching project page and analyzing links: %s",
791
+ project_url,
792
+ )
793
+ index_response = self._link_collector.fetch_response(project_url)
794
+ if index_response is None:
795
+ return []
796
+
797
+ page_links = list(parse_links(index_response))
798
+
799
+ with indent_log():
800
+ package_links = self.evaluate_links(
801
+ link_evaluator,
802
+ links=page_links,
803
+ )
804
+
805
+ return package_links
806
+
807
+ @functools.lru_cache(maxsize=None)
808
+ def find_all_candidates(self, project_name: str) -> List[InstallationCandidate]:
809
+ """Find all available InstallationCandidate for project_name
810
+
811
+ This checks index_urls and find_links.
812
+ All versions found are returned as an InstallationCandidate list.
813
+
814
+ See LinkEvaluator.evaluate_link() for details on which files
815
+ are accepted.
816
+ """
817
+ link_evaluator = self.make_link_evaluator(project_name)
818
+
819
+ collected_sources = self._link_collector.collect_sources(
820
+ project_name=project_name,
821
+ candidates_from_page=functools.partial(
822
+ self.process_project_url,
823
+ link_evaluator=link_evaluator,
824
+ ),
825
+ )
826
+
827
+ page_candidates_it = itertools.chain.from_iterable(
828
+ source.page_candidates()
829
+ for sources in collected_sources
830
+ for source in sources
831
+ if source is not None
832
+ )
833
+ page_candidates = list(page_candidates_it)
834
+
835
+ file_links_it = itertools.chain.from_iterable(
836
+ source.file_links()
837
+ for sources in collected_sources
838
+ for source in sources
839
+ if source is not None
840
+ )
841
+ file_candidates = self.evaluate_links(
842
+ link_evaluator,
843
+ sorted(file_links_it, reverse=True),
844
+ )
845
+
846
+ if logger.isEnabledFor(logging.DEBUG) and file_candidates:
847
+ paths = []
848
+ for candidate in file_candidates:
849
+ assert candidate.link.url # we need to have a URL
850
+ try:
851
+ paths.append(candidate.link.file_path)
852
+ except Exception:
853
+ paths.append(candidate.link.url) # it's not a local file
854
+
855
+ logger.debug("Local files found: %s", ", ".join(paths))
856
+
857
+ # This is an intentional priority ordering
858
+ return file_candidates + page_candidates
859
+
860
+ def make_candidate_evaluator(
861
+ self,
862
+ project_name: str,
863
+ specifier: Optional[specifiers.BaseSpecifier] = None,
864
+ hashes: Optional[Hashes] = None,
865
+ ) -> CandidateEvaluator:
866
+ """Create a CandidateEvaluator object to use."""
867
+ candidate_prefs = self._candidate_prefs
868
+ return CandidateEvaluator.create(
869
+ project_name=project_name,
870
+ target_python=self._target_python,
871
+ prefer_binary=candidate_prefs.prefer_binary,
872
+ allow_all_prereleases=candidate_prefs.allow_all_prereleases,
873
+ specifier=specifier,
874
+ hashes=hashes,
875
+ )
876
+
877
+ @functools.lru_cache(maxsize=None)
878
+ def find_best_candidate(
879
+ self,
880
+ project_name: str,
881
+ specifier: Optional[specifiers.BaseSpecifier] = None,
882
+ hashes: Optional[Hashes] = None,
883
+ ) -> BestCandidateResult:
884
+ """Find matches for the given project and specifier.
885
+
886
+ :param specifier: An optional object implementing `filter`
887
+ (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
888
+ versions.
889
+
890
+ :return: A `BestCandidateResult` instance.
891
+ """
892
+ candidates = self.find_all_candidates(project_name)
893
+ candidate_evaluator = self.make_candidate_evaluator(
894
+ project_name=project_name,
895
+ specifier=specifier,
896
+ hashes=hashes,
897
+ )
898
+ return candidate_evaluator.compute_best_candidate(candidates)
899
+
900
+ def find_requirement(
901
+ self, req: InstallRequirement, upgrade: bool
902
+ ) -> Optional[InstallationCandidate]:
903
+ """Try to find a Link matching req
904
+
905
+ Expects req, an InstallRequirement and upgrade, a boolean
906
+ Returns a InstallationCandidate if found,
907
+ Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
908
+ """
909
+ hashes = req.hashes(trust_internet=False)
910
+ best_candidate_result = self.find_best_candidate(
911
+ req.name,
912
+ specifier=req.specifier,
913
+ hashes=hashes,
914
+ )
915
+ best_candidate = best_candidate_result.best_candidate
916
+
917
+ installed_version: Optional[_BaseVersion] = None
918
+ if req.satisfied_by is not None:
919
+ installed_version = req.satisfied_by.version
920
+
921
+ def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str:
922
+ # This repeated parse_version and str() conversion is needed to
923
+ # handle different vendoring sources from pip and pkg_resources.
924
+ # If we stop using the pkg_resources provided specifier and start
925
+ # using our own, we can drop the cast to str().
926
+ return (
927
+ ", ".join(
928
+ sorted(
929
+ {str(c.version) for c in cand_iter},
930
+ key=parse_version,
931
+ )
932
+ )
933
+ or "none"
934
+ )
935
+
936
+ if installed_version is None and best_candidate is None:
937
+ logger.critical(
938
+ "Could not find a version that satisfies the requirement %s "
939
+ "(from versions: %s)",
940
+ req,
941
+ _format_versions(best_candidate_result.all_candidates),
942
+ )
943
+
944
+ raise DistributionNotFound(f"No matching distribution found for {req}")
945
+
946
+ def _should_install_candidate(
947
+ candidate: Optional[InstallationCandidate],
948
+ ) -> "TypeGuard[InstallationCandidate]":
949
+ if installed_version is None:
950
+ return True
951
+ if best_candidate is None:
952
+ return False
953
+ return best_candidate.version > installed_version
954
+
955
+ if not upgrade and installed_version is not None:
956
+ if _should_install_candidate(best_candidate):
957
+ logger.debug(
958
+ "Existing installed version (%s) satisfies requirement "
959
+ "(most up-to-date version is %s)",
960
+ installed_version,
961
+ best_candidate.version,
962
+ )
963
+ else:
964
+ logger.debug(
965
+ "Existing installed version (%s) is most up-to-date and "
966
+ "satisfies requirement",
967
+ installed_version,
968
+ )
969
+ return None
970
+
971
+ if _should_install_candidate(best_candidate):
972
+ logger.debug(
973
+ "Using version %s (newest of versions: %s)",
974
+ best_candidate.version,
975
+ _format_versions(best_candidate_result.applicable_candidates),
976
+ )
977
+ return best_candidate
978
+
979
+ # We have an existing version, and its the best version
980
+ logger.debug(
981
+ "Installed version (%s) is most up-to-date (past versions: %s)",
982
+ installed_version,
983
+ _format_versions(best_candidate_result.applicable_candidates),
984
+ )
985
+ raise BestVersionAlreadyInstalled
986
+
987
+
988
+ def _find_name_version_sep(fragment: str, canonical_name: str) -> int:
989
+ """Find the separator's index based on the package's canonical name.
990
+
991
+ :param fragment: A <package>+<version> filename "fragment" (stem) or
992
+ egg fragment.
993
+ :param canonical_name: The package's canonical name.
994
+
995
+ This function is needed since the canonicalized name does not necessarily
996
+ have the same length as the egg info's name part. An example::
997
+
998
+ >>> fragment = 'foo__bar-1.0'
999
+ >>> canonical_name = 'foo-bar'
1000
+ >>> _find_name_version_sep(fragment, canonical_name)
1001
+ 8
1002
+ """
1003
+ # Project name and version must be separated by one single dash. Find all
1004
+ # occurrences of dashes; if the string in front of it matches the canonical
1005
+ # name, this is the one separating the name and version parts.
1006
+ for i, c in enumerate(fragment):
1007
+ if c != "-":
1008
+ continue
1009
+ if canonicalize_name(fragment[:i]) == canonical_name:
1010
+ return i
1011
+ raise ValueError(f"{fragment} does not match {canonical_name}")
1012
+
1013
+
1014
+ def _extract_version_from_fragment(fragment: str, canonical_name: str) -> Optional[str]:
1015
+ """Parse the version string from a <package>+<version> filename
1016
+ "fragment" (stem) or egg fragment.
1017
+
1018
+ :param fragment: The string to parse. E.g. foo-2.1
1019
+ :param canonical_name: The canonicalized name of the package this
1020
+ belongs to.
1021
+ """
1022
+ try:
1023
+ version_start = _find_name_version_sep(fragment, canonical_name) + 1
1024
+ except ValueError:
1025
+ return None
1026
+ version = fragment[version_start:]
1027
+ if not version:
1028
+ return None
1029
+ return version
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/index/sources.py ADDED
@@ -0,0 +1,284 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import mimetypes
3
+ import os
4
+ from collections import defaultdict
5
+ from typing import Callable, Dict, Iterable, List, Optional, Tuple
6
+
7
+ from pip._vendor.packaging.utils import (
8
+ InvalidSdistFilename,
9
+ InvalidWheelFilename,
10
+ canonicalize_name,
11
+ parse_sdist_filename,
12
+ parse_wheel_filename,
13
+ )
14
+
15
+ from pip._internal.models.candidate import InstallationCandidate
16
+ from pip._internal.models.link import Link
17
+ from pip._internal.utils.urls import path_to_url, url_to_path
18
+ from pip._internal.vcs import is_url
19
+
20
+ logger = logging.getLogger(__name__)
21
+
22
+ FoundCandidates = Iterable[InstallationCandidate]
23
+ FoundLinks = Iterable[Link]
24
+ CandidatesFromPage = Callable[[Link], Iterable[InstallationCandidate]]
25
+ PageValidator = Callable[[Link], bool]
26
+
27
+
28
+ class LinkSource:
29
+ @property
30
+ def link(self) -> Optional[Link]:
31
+ """Returns the underlying link, if there's one."""
32
+ raise NotImplementedError()
33
+
34
+ def page_candidates(self) -> FoundCandidates:
35
+ """Candidates found by parsing an archive listing HTML file."""
36
+ raise NotImplementedError()
37
+
38
+ def file_links(self) -> FoundLinks:
39
+ """Links found by specifying archives directly."""
40
+ raise NotImplementedError()
41
+
42
+
43
+ def _is_html_file(file_url: str) -> bool:
44
+ return mimetypes.guess_type(file_url, strict=False)[0] == "text/html"
45
+
46
+
47
+ class _FlatDirectoryToUrls:
48
+ """Scans directory and caches results"""
49
+
50
+ def __init__(self, path: str) -> None:
51
+ self._path = path
52
+ self._page_candidates: List[str] = []
53
+ self._project_name_to_urls: Dict[str, List[str]] = defaultdict(list)
54
+ self._scanned_directory = False
55
+
56
+ def _scan_directory(self) -> None:
57
+ """Scans directory once and populates both page_candidates
58
+ and project_name_to_urls at the same time
59
+ """
60
+ for entry in os.scandir(self._path):
61
+ url = path_to_url(entry.path)
62
+ if _is_html_file(url):
63
+ self._page_candidates.append(url)
64
+ continue
65
+
66
+ # File must have a valid wheel or sdist name,
67
+ # otherwise not worth considering as a package
68
+ try:
69
+ project_filename = parse_wheel_filename(entry.name)[0]
70
+ except InvalidWheelFilename:
71
+ try:
72
+ project_filename = parse_sdist_filename(entry.name)[0]
73
+ except InvalidSdistFilename:
74
+ continue
75
+
76
+ self._project_name_to_urls[project_filename].append(url)
77
+ self._scanned_directory = True
78
+
79
+ @property
80
+ def page_candidates(self) -> List[str]:
81
+ if not self._scanned_directory:
82
+ self._scan_directory()
83
+
84
+ return self._page_candidates
85
+
86
+ @property
87
+ def project_name_to_urls(self) -> Dict[str, List[str]]:
88
+ if not self._scanned_directory:
89
+ self._scan_directory()
90
+
91
+ return self._project_name_to_urls
92
+
93
+
94
+ class _FlatDirectorySource(LinkSource):
95
+ """Link source specified by ``--find-links=<path-to-dir>``.
96
+
97
+ This looks the content of the directory, and returns:
98
+
99
+ * ``page_candidates``: Links listed on each HTML file in the directory.
100
+ * ``file_candidates``: Archives in the directory.
101
+ """
102
+
103
+ _paths_to_urls: Dict[str, _FlatDirectoryToUrls] = {}
104
+
105
+ def __init__(
106
+ self,
107
+ candidates_from_page: CandidatesFromPage,
108
+ path: str,
109
+ project_name: str,
110
+ ) -> None:
111
+ self._candidates_from_page = candidates_from_page
112
+ self._project_name = canonicalize_name(project_name)
113
+
114
+ # Get existing instance of _FlatDirectoryToUrls if it exists
115
+ if path in self._paths_to_urls:
116
+ self._path_to_urls = self._paths_to_urls[path]
117
+ else:
118
+ self._path_to_urls = _FlatDirectoryToUrls(path=path)
119
+ self._paths_to_urls[path] = self._path_to_urls
120
+
121
+ @property
122
+ def link(self) -> Optional[Link]:
123
+ return None
124
+
125
+ def page_candidates(self) -> FoundCandidates:
126
+ for url in self._path_to_urls.page_candidates:
127
+ yield from self._candidates_from_page(Link(url))
128
+
129
+ def file_links(self) -> FoundLinks:
130
+ for url in self._path_to_urls.project_name_to_urls[self._project_name]:
131
+ yield Link(url)
132
+
133
+
134
+ class _LocalFileSource(LinkSource):
135
+ """``--find-links=<path-or-url>`` or ``--[extra-]index-url=<path-or-url>``.
136
+
137
+ If a URL is supplied, it must be a ``file:`` URL. If a path is supplied to
138
+ the option, it is converted to a URL first. This returns:
139
+
140
+ * ``page_candidates``: Links listed on an HTML file.
141
+ * ``file_candidates``: The non-HTML file.
142
+ """
143
+
144
+ def __init__(
145
+ self,
146
+ candidates_from_page: CandidatesFromPage,
147
+ link: Link,
148
+ ) -> None:
149
+ self._candidates_from_page = candidates_from_page
150
+ self._link = link
151
+
152
+ @property
153
+ def link(self) -> Optional[Link]:
154
+ return self._link
155
+
156
+ def page_candidates(self) -> FoundCandidates:
157
+ if not _is_html_file(self._link.url):
158
+ return
159
+ yield from self._candidates_from_page(self._link)
160
+
161
+ def file_links(self) -> FoundLinks:
162
+ if _is_html_file(self._link.url):
163
+ return
164
+ yield self._link
165
+
166
+
167
+ class _RemoteFileSource(LinkSource):
168
+ """``--find-links=<url>`` or ``--[extra-]index-url=<url>``.
169
+
170
+ This returns:
171
+
172
+ * ``page_candidates``: Links listed on an HTML file.
173
+ * ``file_candidates``: The non-HTML file.
174
+ """
175
+
176
+ def __init__(
177
+ self,
178
+ candidates_from_page: CandidatesFromPage,
179
+ page_validator: PageValidator,
180
+ link: Link,
181
+ ) -> None:
182
+ self._candidates_from_page = candidates_from_page
183
+ self._page_validator = page_validator
184
+ self._link = link
185
+
186
+ @property
187
+ def link(self) -> Optional[Link]:
188
+ return self._link
189
+
190
+ def page_candidates(self) -> FoundCandidates:
191
+ if not self._page_validator(self._link):
192
+ return
193
+ yield from self._candidates_from_page(self._link)
194
+
195
+ def file_links(self) -> FoundLinks:
196
+ yield self._link
197
+
198
+
199
+ class _IndexDirectorySource(LinkSource):
200
+ """``--[extra-]index-url=<path-to-directory>``.
201
+
202
+ This is treated like a remote URL; ``candidates_from_page`` contains logic
203
+ for this by appending ``index.html`` to the link.
204
+ """
205
+
206
+ def __init__(
207
+ self,
208
+ candidates_from_page: CandidatesFromPage,
209
+ link: Link,
210
+ ) -> None:
211
+ self._candidates_from_page = candidates_from_page
212
+ self._link = link
213
+
214
+ @property
215
+ def link(self) -> Optional[Link]:
216
+ return self._link
217
+
218
+ def page_candidates(self) -> FoundCandidates:
219
+ yield from self._candidates_from_page(self._link)
220
+
221
+ def file_links(self) -> FoundLinks:
222
+ return ()
223
+
224
+
225
+ def build_source(
226
+ location: str,
227
+ *,
228
+ candidates_from_page: CandidatesFromPage,
229
+ page_validator: PageValidator,
230
+ expand_dir: bool,
231
+ cache_link_parsing: bool,
232
+ project_name: str,
233
+ ) -> Tuple[Optional[str], Optional[LinkSource]]:
234
+ path: Optional[str] = None
235
+ url: Optional[str] = None
236
+ if os.path.exists(location): # Is a local path.
237
+ url = path_to_url(location)
238
+ path = location
239
+ elif location.startswith("file:"): # A file: URL.
240
+ url = location
241
+ path = url_to_path(location)
242
+ elif is_url(location):
243
+ url = location
244
+
245
+ if url is None:
246
+ msg = (
247
+ "Location '%s' is ignored: "
248
+ "it is either a non-existing path or lacks a specific scheme."
249
+ )
250
+ logger.warning(msg, location)
251
+ return (None, None)
252
+
253
+ if path is None:
254
+ source: LinkSource = _RemoteFileSource(
255
+ candidates_from_page=candidates_from_page,
256
+ page_validator=page_validator,
257
+ link=Link(url, cache_link_parsing=cache_link_parsing),
258
+ )
259
+ return (url, source)
260
+
261
+ if os.path.isdir(path):
262
+ if expand_dir:
263
+ source = _FlatDirectorySource(
264
+ candidates_from_page=candidates_from_page,
265
+ path=path,
266
+ project_name=project_name,
267
+ )
268
+ else:
269
+ source = _IndexDirectorySource(
270
+ candidates_from_page=candidates_from_page,
271
+ link=Link(url, cache_link_parsing=cache_link_parsing),
272
+ )
273
+ return (url, source)
274
+ elif os.path.isfile(path):
275
+ source = _LocalFileSource(
276
+ candidates_from_page=candidates_from_page,
277
+ link=Link(url, cache_link_parsing=cache_link_parsing),
278
+ )
279
+ return (url, source)
280
+ logger.warning(
281
+ "Location '%s' is ignored: it is neither a file nor a directory.",
282
+ location,
283
+ )
284
+ return (url, None)
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/network/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ """Contains purely network-related utilities.
2
+ """
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/network/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (236 Bytes). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/network/__pycache__/auth.cpython-310.pyc ADDED
Binary file (14.5 kB). View file
 
mantis_evalkit/lib/python3.10/site-packages/pip/_internal/network/__pycache__/cache.cpython-310.pyc ADDED
Binary file (4.79 kB). View file