ZTWHHH commited on
Commit
5cccbcc
·
verified ·
1 Parent(s): 706ad82

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. llava/lib/python3.10/encodings/__pycache__/cp1251.cpython-310.pyc +0 -0
  2. llava/lib/python3.10/encodings/__pycache__/iso2022_jp.cpython-310.pyc +0 -0
  3. llava/lib/python3.10/encodings/__pycache__/iso8859_10.cpython-310.pyc +0 -0
  4. llava/lib/python3.10/encodings/__pycache__/iso8859_13.cpython-310.pyc +0 -0
  5. llava/lib/python3.10/encodings/__pycache__/iso8859_5.cpython-310.pyc +0 -0
  6. llava/lib/python3.10/encodings/__pycache__/koi8_t.cpython-310.pyc +0 -0
  7. llava/lib/python3.10/encodings/__pycache__/raw_unicode_escape.cpython-310.pyc +0 -0
  8. llava/lib/python3.10/encodings/__pycache__/utf_8_sig.cpython-310.pyc +0 -0
  9. llava/lib/python3.10/site-packages/pip/_internal/commands/__init__.py +132 -0
  10. llava/lib/python3.10/site-packages/pip/_internal/commands/cache.py +228 -0
  11. llava/lib/python3.10/site-packages/pip/_internal/commands/completion.py +130 -0
  12. llava/lib/python3.10/site-packages/pip/_internal/commands/configuration.py +280 -0
  13. llava/lib/python3.10/site-packages/pip/_internal/commands/download.py +146 -0
  14. llava/lib/python3.10/site-packages/pip/_internal/commands/hash.py +59 -0
  15. llava/lib/python3.10/site-packages/pip/_internal/commands/help.py +41 -0
  16. llava/lib/python3.10/site-packages/pip/_internal/commands/index.py +139 -0
  17. llava/lib/python3.10/site-packages/pip/_internal/commands/inspect.py +92 -0
  18. llava/lib/python3.10/site-packages/pip/_internal/commands/search.py +172 -0
  19. llava/lib/python3.10/site-packages/pip/_internal/commands/uninstall.py +114 -0
  20. llava/lib/python3.10/site-packages/pip/_internal/commands/wheel.py +182 -0
  21. llava/lib/python3.10/site-packages/pip/_internal/index/__init__.py +2 -0
  22. llava/lib/python3.10/site-packages/pip/_internal/index/__pycache__/__init__.cpython-310.pyc +0 -0
  23. llava/lib/python3.10/site-packages/pip/_internal/index/__pycache__/collector.cpython-310.pyc +0 -0
  24. llava/lib/python3.10/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-310.pyc +0 -0
  25. llava/lib/python3.10/site-packages/pip/_internal/index/__pycache__/sources.cpython-310.pyc +0 -0
  26. llava/lib/python3.10/site-packages/pip/_internal/index/collector.py +494 -0
  27. llava/lib/python3.10/site-packages/pip/_internal/index/package_finder.py +1029 -0
  28. llava/lib/python3.10/site-packages/pip/_internal/index/sources.py +284 -0
  29. llava/lib/python3.10/site-packages/pip/_internal/network/__pycache__/__init__.cpython-310.pyc +0 -0
  30. llava/lib/python3.10/site-packages/pip/_internal/network/__pycache__/auth.cpython-310.pyc +0 -0
  31. llava/lib/python3.10/site-packages/pip/_internal/network/__pycache__/cache.cpython-310.pyc +0 -0
  32. llava/lib/python3.10/site-packages/pip/_internal/network/__pycache__/download.cpython-310.pyc +0 -0
  33. llava/lib/python3.10/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-310.pyc +0 -0
  34. llava/lib/python3.10/site-packages/pip/_internal/network/__pycache__/session.cpython-310.pyc +0 -0
  35. llava/lib/python3.10/site-packages/pip/_internal/network/__pycache__/utils.cpython-310.pyc +0 -0
  36. llava/lib/python3.10/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-310.pyc +0 -0
  37. llava/lib/python3.10/site-packages/pip/_internal/network/auth.py +566 -0
  38. llava/lib/python3.10/site-packages/pip/_internal/network/xmlrpc.py +62 -0
  39. minigpt2/lib/python3.10/site-packages/ray/_private/__pycache__/async_compat.cpython-310.pyc +0 -0
  40. minigpt2/lib/python3.10/site-packages/ray/_private/__pycache__/conftest_utils.cpython-310.pyc +0 -0
  41. minigpt2/lib/python3.10/site-packages/ray/_private/__pycache__/gcs_aio_client.cpython-310.pyc +0 -0
  42. minigpt2/lib/python3.10/site-packages/ray/_private/__pycache__/gcs_utils.cpython-310.pyc +0 -0
  43. minigpt2/lib/python3.10/site-packages/ray/_private/__pycache__/log.cpython-310.pyc +0 -0
  44. minigpt2/lib/python3.10/site-packages/ray/_private/__pycache__/metrics_agent.cpython-310.pyc +0 -0
  45. minigpt2/lib/python3.10/site-packages/ray/_private/__pycache__/protobuf_compat.cpython-310.pyc +0 -0
  46. minigpt2/lib/python3.10/site-packages/ray/_private/__pycache__/services.cpython-310.pyc +0 -0
  47. minigpt2/lib/python3.10/site-packages/ray/_private/__pycache__/state_api_test_utils.cpython-310.pyc +0 -0
  48. parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_amp_update_scale.h +44 -0
  49. parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_ctc_loss_ops.h +61 -0
  50. parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_remove_batch_dim_native.h +21 -0
llava/lib/python3.10/encodings/__pycache__/cp1251.cpython-310.pyc ADDED
Binary file (2.63 kB). View file
 
llava/lib/python3.10/encodings/__pycache__/iso2022_jp.cpython-310.pyc ADDED
Binary file (1.64 kB). View file
 
llava/lib/python3.10/encodings/__pycache__/iso8859_10.cpython-310.pyc ADDED
Binary file (2.61 kB). View file
 
llava/lib/python3.10/encodings/__pycache__/iso8859_13.cpython-310.pyc ADDED
Binary file (2.61 kB). View file
 
llava/lib/python3.10/encodings/__pycache__/iso8859_5.cpython-310.pyc ADDED
Binary file (2.6 kB). View file
 
llava/lib/python3.10/encodings/__pycache__/koi8_t.cpython-310.pyc ADDED
Binary file (2.31 kB). View file
 
llava/lib/python3.10/encodings/__pycache__/raw_unicode_escape.cpython-310.pyc ADDED
Binary file (2.21 kB). View file
 
llava/lib/python3.10/encodings/__pycache__/utf_8_sig.cpython-310.pyc ADDED
Binary file (4.65 kB). View file
 
llava/lib/python3.10/site-packages/pip/_internal/commands/__init__.py ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Package containing all pip commands
3
+ """
4
+
5
+ import importlib
6
+ from collections import namedtuple
7
+ from typing import Any, Dict, Optional
8
+
9
+ from pip._internal.cli.base_command import Command
10
+
11
+ CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary")
12
+
13
+ # This dictionary does a bunch of heavy lifting for help output:
14
+ # - Enables avoiding additional (costly) imports for presenting `--help`.
15
+ # - The ordering matters for help display.
16
+ #
17
+ # Even though the module path starts with the same "pip._internal.commands"
18
+ # prefix, the full path makes testing easier (specifically when modifying
19
+ # `commands_dict` in test setup / teardown).
20
+ commands_dict: Dict[str, CommandInfo] = {
21
+ "install": CommandInfo(
22
+ "pip._internal.commands.install",
23
+ "InstallCommand",
24
+ "Install packages.",
25
+ ),
26
+ "download": CommandInfo(
27
+ "pip._internal.commands.download",
28
+ "DownloadCommand",
29
+ "Download packages.",
30
+ ),
31
+ "uninstall": CommandInfo(
32
+ "pip._internal.commands.uninstall",
33
+ "UninstallCommand",
34
+ "Uninstall packages.",
35
+ ),
36
+ "freeze": CommandInfo(
37
+ "pip._internal.commands.freeze",
38
+ "FreezeCommand",
39
+ "Output installed packages in requirements format.",
40
+ ),
41
+ "inspect": CommandInfo(
42
+ "pip._internal.commands.inspect",
43
+ "InspectCommand",
44
+ "Inspect the python environment.",
45
+ ),
46
+ "list": CommandInfo(
47
+ "pip._internal.commands.list",
48
+ "ListCommand",
49
+ "List installed packages.",
50
+ ),
51
+ "show": CommandInfo(
52
+ "pip._internal.commands.show",
53
+ "ShowCommand",
54
+ "Show information about installed packages.",
55
+ ),
56
+ "check": CommandInfo(
57
+ "pip._internal.commands.check",
58
+ "CheckCommand",
59
+ "Verify installed packages have compatible dependencies.",
60
+ ),
61
+ "config": CommandInfo(
62
+ "pip._internal.commands.configuration",
63
+ "ConfigurationCommand",
64
+ "Manage local and global configuration.",
65
+ ),
66
+ "search": CommandInfo(
67
+ "pip._internal.commands.search",
68
+ "SearchCommand",
69
+ "Search PyPI for packages.",
70
+ ),
71
+ "cache": CommandInfo(
72
+ "pip._internal.commands.cache",
73
+ "CacheCommand",
74
+ "Inspect and manage pip's wheel cache.",
75
+ ),
76
+ "index": CommandInfo(
77
+ "pip._internal.commands.index",
78
+ "IndexCommand",
79
+ "Inspect information available from package indexes.",
80
+ ),
81
+ "wheel": CommandInfo(
82
+ "pip._internal.commands.wheel",
83
+ "WheelCommand",
84
+ "Build wheels from your requirements.",
85
+ ),
86
+ "hash": CommandInfo(
87
+ "pip._internal.commands.hash",
88
+ "HashCommand",
89
+ "Compute hashes of package archives.",
90
+ ),
91
+ "completion": CommandInfo(
92
+ "pip._internal.commands.completion",
93
+ "CompletionCommand",
94
+ "A helper command used for command completion.",
95
+ ),
96
+ "debug": CommandInfo(
97
+ "pip._internal.commands.debug",
98
+ "DebugCommand",
99
+ "Show information useful for debugging.",
100
+ ),
101
+ "help": CommandInfo(
102
+ "pip._internal.commands.help",
103
+ "HelpCommand",
104
+ "Show help for commands.",
105
+ ),
106
+ }
107
+
108
+
109
+ def create_command(name: str, **kwargs: Any) -> Command:
110
+ """
111
+ Create an instance of the Command class with the given name.
112
+ """
113
+ module_path, class_name, summary = commands_dict[name]
114
+ module = importlib.import_module(module_path)
115
+ command_class = getattr(module, class_name)
116
+ command = command_class(name=name, summary=summary, **kwargs)
117
+
118
+ return command
119
+
120
+
121
+ def get_similar_commands(name: str) -> Optional[str]:
122
+ """Command name auto-correct."""
123
+ from difflib import get_close_matches
124
+
125
+ name = name.lower()
126
+
127
+ close_commands = get_close_matches(name, commands_dict.keys())
128
+
129
+ if close_commands:
130
+ return close_commands[0]
131
+ else:
132
+ return None
llava/lib/python3.10/site-packages/pip/_internal/commands/cache.py ADDED
@@ -0,0 +1,228 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import textwrap
3
+ from optparse import Values
4
+ from typing import Any, List
5
+
6
+ from pip._internal.cli.base_command import Command
7
+ from pip._internal.cli.status_codes import ERROR, SUCCESS
8
+ from pip._internal.exceptions import CommandError, PipError
9
+ from pip._internal.utils import filesystem
10
+ from pip._internal.utils.logging import getLogger
11
+ from pip._internal.utils.misc import format_size
12
+
13
+ logger = getLogger(__name__)
14
+
15
+
16
+ class CacheCommand(Command):
17
+ """
18
+ Inspect and manage pip's wheel cache.
19
+
20
+ Subcommands:
21
+
22
+ - dir: Show the cache directory.
23
+ - info: Show information about the cache.
24
+ - list: List filenames of packages stored in the cache.
25
+ - remove: Remove one or more package from the cache.
26
+ - purge: Remove all items from the cache.
27
+
28
+ ``<pattern>`` can be a glob expression or a package name.
29
+ """
30
+
31
+ ignore_require_venv = True
32
+ usage = """
33
+ %prog dir
34
+ %prog info
35
+ %prog list [<pattern>] [--format=[human, abspath]]
36
+ %prog remove <pattern>
37
+ %prog purge
38
+ """
39
+
40
+ def add_options(self) -> None:
41
+ self.cmd_opts.add_option(
42
+ "--format",
43
+ action="store",
44
+ dest="list_format",
45
+ default="human",
46
+ choices=("human", "abspath"),
47
+ help="Select the output format among: human (default) or abspath",
48
+ )
49
+
50
+ self.parser.insert_option_group(0, self.cmd_opts)
51
+
52
+ def run(self, options: Values, args: List[str]) -> int:
53
+ handlers = {
54
+ "dir": self.get_cache_dir,
55
+ "info": self.get_cache_info,
56
+ "list": self.list_cache_items,
57
+ "remove": self.remove_cache_items,
58
+ "purge": self.purge_cache,
59
+ }
60
+
61
+ if not options.cache_dir:
62
+ logger.error("pip cache commands can not function since cache is disabled.")
63
+ return ERROR
64
+
65
+ # Determine action
66
+ if not args or args[0] not in handlers:
67
+ logger.error(
68
+ "Need an action (%s) to perform.",
69
+ ", ".join(sorted(handlers)),
70
+ )
71
+ return ERROR
72
+
73
+ action = args[0]
74
+
75
+ # Error handling happens here, not in the action-handlers.
76
+ try:
77
+ handlers[action](options, args[1:])
78
+ except PipError as e:
79
+ logger.error(e.args[0])
80
+ return ERROR
81
+
82
+ return SUCCESS
83
+
84
+ def get_cache_dir(self, options: Values, args: List[Any]) -> None:
85
+ if args:
86
+ raise CommandError("Too many arguments")
87
+
88
+ logger.info(options.cache_dir)
89
+
90
+ def get_cache_info(self, options: Values, args: List[Any]) -> None:
91
+ if args:
92
+ raise CommandError("Too many arguments")
93
+
94
+ num_http_files = len(self._find_http_files(options))
95
+ num_packages = len(self._find_wheels(options, "*"))
96
+
97
+ http_cache_location = self._cache_dir(options, "http-v2")
98
+ old_http_cache_location = self._cache_dir(options, "http")
99
+ wheels_cache_location = self._cache_dir(options, "wheels")
100
+ http_cache_size = filesystem.format_size(
101
+ filesystem.directory_size(http_cache_location)
102
+ + filesystem.directory_size(old_http_cache_location)
103
+ )
104
+ wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)
105
+
106
+ message = (
107
+ textwrap.dedent(
108
+ """
109
+ Package index page cache location (pip v23.3+): {http_cache_location}
110
+ Package index page cache location (older pips): {old_http_cache_location}
111
+ Package index page cache size: {http_cache_size}
112
+ Number of HTTP files: {num_http_files}
113
+ Locally built wheels location: {wheels_cache_location}
114
+ Locally built wheels size: {wheels_cache_size}
115
+ Number of locally built wheels: {package_count}
116
+ """ # noqa: E501
117
+ )
118
+ .format(
119
+ http_cache_location=http_cache_location,
120
+ old_http_cache_location=old_http_cache_location,
121
+ http_cache_size=http_cache_size,
122
+ num_http_files=num_http_files,
123
+ wheels_cache_location=wheels_cache_location,
124
+ package_count=num_packages,
125
+ wheels_cache_size=wheels_cache_size,
126
+ )
127
+ .strip()
128
+ )
129
+
130
+ logger.info(message)
131
+
132
+ def list_cache_items(self, options: Values, args: List[Any]) -> None:
133
+ if len(args) > 1:
134
+ raise CommandError("Too many arguments")
135
+
136
+ if args:
137
+ pattern = args[0]
138
+ else:
139
+ pattern = "*"
140
+
141
+ files = self._find_wheels(options, pattern)
142
+ if options.list_format == "human":
143
+ self.format_for_human(files)
144
+ else:
145
+ self.format_for_abspath(files)
146
+
147
+ def format_for_human(self, files: List[str]) -> None:
148
+ if not files:
149
+ logger.info("No locally built wheels cached.")
150
+ return
151
+
152
+ results = []
153
+ for filename in files:
154
+ wheel = os.path.basename(filename)
155
+ size = filesystem.format_file_size(filename)
156
+ results.append(f" - {wheel} ({size})")
157
+ logger.info("Cache contents:\n")
158
+ logger.info("\n".join(sorted(results)))
159
+
160
+ def format_for_abspath(self, files: List[str]) -> None:
161
+ if files:
162
+ logger.info("\n".join(sorted(files)))
163
+
164
+ def remove_cache_items(self, options: Values, args: List[Any]) -> None:
165
+ if len(args) > 1:
166
+ raise CommandError("Too many arguments")
167
+
168
+ if not args:
169
+ raise CommandError("Please provide a pattern")
170
+
171
+ files = self._find_wheels(options, args[0])
172
+
173
+ no_matching_msg = "No matching packages"
174
+ if args[0] == "*":
175
+ # Only fetch http files if no specific pattern given
176
+ files += self._find_http_files(options)
177
+ else:
178
+ # Add the pattern to the log message
179
+ no_matching_msg += f' for pattern "{args[0]}"'
180
+
181
+ if not files:
182
+ logger.warning(no_matching_msg)
183
+
184
+ bytes_removed = 0
185
+ for filename in files:
186
+ bytes_removed += os.stat(filename).st_size
187
+ os.unlink(filename)
188
+ logger.verbose("Removed %s", filename)
189
+ logger.info("Files removed: %s (%s)", len(files), format_size(bytes_removed))
190
+
191
+ def purge_cache(self, options: Values, args: List[Any]) -> None:
192
+ if args:
193
+ raise CommandError("Too many arguments")
194
+
195
+ return self.remove_cache_items(options, ["*"])
196
+
197
+ def _cache_dir(self, options: Values, subdir: str) -> str:
198
+ return os.path.join(options.cache_dir, subdir)
199
+
200
+ def _find_http_files(self, options: Values) -> List[str]:
201
+ old_http_dir = self._cache_dir(options, "http")
202
+ new_http_dir = self._cache_dir(options, "http-v2")
203
+ return filesystem.find_files(old_http_dir, "*") + filesystem.find_files(
204
+ new_http_dir, "*"
205
+ )
206
+
207
+ def _find_wheels(self, options: Values, pattern: str) -> List[str]:
208
+ wheel_dir = self._cache_dir(options, "wheels")
209
+
210
+ # The wheel filename format, as specified in PEP 427, is:
211
+ # {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl
212
+ #
213
+ # Additionally, non-alphanumeric values in the distribution are
214
+ # normalized to underscores (_), meaning hyphens can never occur
215
+ # before `-{version}`.
216
+ #
217
+ # Given that information:
218
+ # - If the pattern we're given contains a hyphen (-), the user is
219
+ # providing at least the version. Thus, we can just append `*.whl`
220
+ # to match the rest of it.
221
+ # - If the pattern we're given doesn't contain a hyphen (-), the
222
+ # user is only providing the name. Thus, we append `-*.whl` to
223
+ # match the hyphen before the version, followed by anything else.
224
+ #
225
+ # PEP 427: https://www.python.org/dev/peps/pep-0427/
226
+ pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl")
227
+
228
+ return filesystem.find_files(wheel_dir, pattern)
llava/lib/python3.10/site-packages/pip/_internal/commands/completion.py ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ import textwrap
3
+ from optparse import Values
4
+ from typing import List
5
+
6
+ from pip._internal.cli.base_command import Command
7
+ from pip._internal.cli.status_codes import SUCCESS
8
+ from pip._internal.utils.misc import get_prog
9
+
10
+ BASE_COMPLETION = """
11
+ # pip {shell} completion start{script}# pip {shell} completion end
12
+ """
13
+
14
+ COMPLETION_SCRIPTS = {
15
+ "bash": """
16
+ _pip_completion()
17
+ {{
18
+ COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\
19
+ COMP_CWORD=$COMP_CWORD \\
20
+ PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) )
21
+ }}
22
+ complete -o default -F _pip_completion {prog}
23
+ """,
24
+ "zsh": """
25
+ #compdef -P pip[0-9.]#
26
+ __pip() {{
27
+ compadd $( COMP_WORDS="$words[*]" \\
28
+ COMP_CWORD=$((CURRENT-1)) \\
29
+ PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )
30
+ }}
31
+ if [[ $zsh_eval_context[-1] == loadautofunc ]]; then
32
+ # autoload from fpath, call function directly
33
+ __pip "$@"
34
+ else
35
+ # eval/source/. command, register function for later
36
+ compdef __pip -P 'pip[0-9.]#'
37
+ fi
38
+ """,
39
+ "fish": """
40
+ function __fish_complete_pip
41
+ set -lx COMP_WORDS (commandline -o) ""
42
+ set -lx COMP_CWORD ( \\
43
+ math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
44
+ )
45
+ set -lx PIP_AUTO_COMPLETE 1
46
+ string split \\ -- (eval $COMP_WORDS[1])
47
+ end
48
+ complete -fa "(__fish_complete_pip)" -c {prog}
49
+ """,
50
+ "powershell": """
51
+ if ((Test-Path Function:\\TabExpansion) -and -not `
52
+ (Test-Path Function:\\_pip_completeBackup)) {{
53
+ Rename-Item Function:\\TabExpansion _pip_completeBackup
54
+ }}
55
+ function TabExpansion($line, $lastWord) {{
56
+ $lastBlock = [regex]::Split($line, '[|;]')[-1].TrimStart()
57
+ if ($lastBlock.StartsWith("{prog} ")) {{
58
+ $Env:COMP_WORDS=$lastBlock
59
+ $Env:COMP_CWORD=$lastBlock.Split().Length - 1
60
+ $Env:PIP_AUTO_COMPLETE=1
61
+ (& {prog}).Split()
62
+ Remove-Item Env:COMP_WORDS
63
+ Remove-Item Env:COMP_CWORD
64
+ Remove-Item Env:PIP_AUTO_COMPLETE
65
+ }}
66
+ elseif (Test-Path Function:\\_pip_completeBackup) {{
67
+ # Fall back on existing tab expansion
68
+ _pip_completeBackup $line $lastWord
69
+ }}
70
+ }}
71
+ """,
72
+ }
73
+
74
+
75
+ class CompletionCommand(Command):
76
+ """A helper command to be used for command completion."""
77
+
78
+ ignore_require_venv = True
79
+
80
+ def add_options(self) -> None:
81
+ self.cmd_opts.add_option(
82
+ "--bash",
83
+ "-b",
84
+ action="store_const",
85
+ const="bash",
86
+ dest="shell",
87
+ help="Emit completion code for bash",
88
+ )
89
+ self.cmd_opts.add_option(
90
+ "--zsh",
91
+ "-z",
92
+ action="store_const",
93
+ const="zsh",
94
+ dest="shell",
95
+ help="Emit completion code for zsh",
96
+ )
97
+ self.cmd_opts.add_option(
98
+ "--fish",
99
+ "-f",
100
+ action="store_const",
101
+ const="fish",
102
+ dest="shell",
103
+ help="Emit completion code for fish",
104
+ )
105
+ self.cmd_opts.add_option(
106
+ "--powershell",
107
+ "-p",
108
+ action="store_const",
109
+ const="powershell",
110
+ dest="shell",
111
+ help="Emit completion code for powershell",
112
+ )
113
+
114
+ self.parser.insert_option_group(0, self.cmd_opts)
115
+
116
+ def run(self, options: Values, args: List[str]) -> int:
117
+ """Prints the completion code of the given shell"""
118
+ shells = COMPLETION_SCRIPTS.keys()
119
+ shell_options = ["--" + shell for shell in sorted(shells)]
120
+ if options.shell in shells:
121
+ script = textwrap.dedent(
122
+ COMPLETION_SCRIPTS.get(options.shell, "").format(prog=get_prog())
123
+ )
124
+ print(BASE_COMPLETION.format(script=script, shell=options.shell))
125
+ return SUCCESS
126
+ else:
127
+ sys.stderr.write(
128
+ "ERROR: You must pass {}\n".format(" or ".join(shell_options))
129
+ )
130
+ return SUCCESS
llava/lib/python3.10/site-packages/pip/_internal/commands/configuration.py ADDED
@@ -0,0 +1,280 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os
3
+ import subprocess
4
+ from optparse import Values
5
+ from typing import Any, List, Optional
6
+
7
+ from pip._internal.cli.base_command import Command
8
+ from pip._internal.cli.status_codes import ERROR, SUCCESS
9
+ from pip._internal.configuration import (
10
+ Configuration,
11
+ Kind,
12
+ get_configuration_files,
13
+ kinds,
14
+ )
15
+ from pip._internal.exceptions import PipError
16
+ from pip._internal.utils.logging import indent_log
17
+ from pip._internal.utils.misc import get_prog, write_output
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+
22
+ class ConfigurationCommand(Command):
23
+ """
24
+ Manage local and global configuration.
25
+
26
+ Subcommands:
27
+
28
+ - list: List the active configuration (or from the file specified)
29
+ - edit: Edit the configuration file in an editor
30
+ - get: Get the value associated with command.option
31
+ - set: Set the command.option=value
32
+ - unset: Unset the value associated with command.option
33
+ - debug: List the configuration files and values defined under them
34
+
35
+ Configuration keys should be dot separated command and option name,
36
+ with the special prefix "global" affecting any command. For example,
37
+ "pip config set global.index-url https://example.org/" would configure
38
+ the index url for all commands, but "pip config set download.timeout 10"
39
+ would configure a 10 second timeout only for "pip download" commands.
40
+
41
+ If none of --user, --global and --site are passed, a virtual
42
+ environment configuration file is used if one is active and the file
43
+ exists. Otherwise, all modifications happen to the user file by
44
+ default.
45
+ """
46
+
47
+ ignore_require_venv = True
48
+ usage = """
49
+ %prog [<file-option>] list
50
+ %prog [<file-option>] [--editor <editor-path>] edit
51
+
52
+ %prog [<file-option>] get command.option
53
+ %prog [<file-option>] set command.option value
54
+ %prog [<file-option>] unset command.option
55
+ %prog [<file-option>] debug
56
+ """
57
+
58
+ def add_options(self) -> None:
59
+ self.cmd_opts.add_option(
60
+ "--editor",
61
+ dest="editor",
62
+ action="store",
63
+ default=None,
64
+ help=(
65
+ "Editor to use to edit the file. Uses VISUAL or EDITOR "
66
+ "environment variables if not provided."
67
+ ),
68
+ )
69
+
70
+ self.cmd_opts.add_option(
71
+ "--global",
72
+ dest="global_file",
73
+ action="store_true",
74
+ default=False,
75
+ help="Use the system-wide configuration file only",
76
+ )
77
+
78
+ self.cmd_opts.add_option(
79
+ "--user",
80
+ dest="user_file",
81
+ action="store_true",
82
+ default=False,
83
+ help="Use the user configuration file only",
84
+ )
85
+
86
+ self.cmd_opts.add_option(
87
+ "--site",
88
+ dest="site_file",
89
+ action="store_true",
90
+ default=False,
91
+ help="Use the current environment configuration file only",
92
+ )
93
+
94
+ self.parser.insert_option_group(0, self.cmd_opts)
95
+
96
+ def run(self, options: Values, args: List[str]) -> int:
97
+ handlers = {
98
+ "list": self.list_values,
99
+ "edit": self.open_in_editor,
100
+ "get": self.get_name,
101
+ "set": self.set_name_value,
102
+ "unset": self.unset_name,
103
+ "debug": self.list_config_values,
104
+ }
105
+
106
+ # Determine action
107
+ if not args or args[0] not in handlers:
108
+ logger.error(
109
+ "Need an action (%s) to perform.",
110
+ ", ".join(sorted(handlers)),
111
+ )
112
+ return ERROR
113
+
114
+ action = args[0]
115
+
116
+ # Determine which configuration files are to be loaded
117
+ # Depends on whether the command is modifying.
118
+ try:
119
+ load_only = self._determine_file(
120
+ options, need_value=(action in ["get", "set", "unset", "edit"])
121
+ )
122
+ except PipError as e:
123
+ logger.error(e.args[0])
124
+ return ERROR
125
+
126
+ # Load a new configuration
127
+ self.configuration = Configuration(
128
+ isolated=options.isolated_mode, load_only=load_only
129
+ )
130
+ self.configuration.load()
131
+
132
+ # Error handling happens here, not in the action-handlers.
133
+ try:
134
+ handlers[action](options, args[1:])
135
+ except PipError as e:
136
+ logger.error(e.args[0])
137
+ return ERROR
138
+
139
+ return SUCCESS
140
+
141
+ def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]:
142
+ file_options = [
143
+ key
144
+ for key, value in (
145
+ (kinds.USER, options.user_file),
146
+ (kinds.GLOBAL, options.global_file),
147
+ (kinds.SITE, options.site_file),
148
+ )
149
+ if value
150
+ ]
151
+
152
+ if not file_options:
153
+ if not need_value:
154
+ return None
155
+ # Default to user, unless there's a site file.
156
+ elif any(
157
+ os.path.exists(site_config_file)
158
+ for site_config_file in get_configuration_files()[kinds.SITE]
159
+ ):
160
+ return kinds.SITE
161
+ else:
162
+ return kinds.USER
163
+ elif len(file_options) == 1:
164
+ return file_options[0]
165
+
166
+ raise PipError(
167
+ "Need exactly one file to operate upon "
168
+ "(--user, --site, --global) to perform."
169
+ )
170
+
171
+ def list_values(self, options: Values, args: List[str]) -> None:
172
+ self._get_n_args(args, "list", n=0)
173
+
174
+ for key, value in sorted(self.configuration.items()):
175
+ write_output("%s=%r", key, value)
176
+
177
+ def get_name(self, options: Values, args: List[str]) -> None:
178
+ key = self._get_n_args(args, "get [name]", n=1)
179
+ value = self.configuration.get_value(key)
180
+
181
+ write_output("%s", value)
182
+
183
+ def set_name_value(self, options: Values, args: List[str]) -> None:
184
+ key, value = self._get_n_args(args, "set [name] [value]", n=2)
185
+ self.configuration.set_value(key, value)
186
+
187
+ self._save_configuration()
188
+
189
+ def unset_name(self, options: Values, args: List[str]) -> None:
190
+ key = self._get_n_args(args, "unset [name]", n=1)
191
+ self.configuration.unset_value(key)
192
+
193
+ self._save_configuration()
194
+
195
+ def list_config_values(self, options: Values, args: List[str]) -> None:
196
+ """List config key-value pairs across different config files"""
197
+ self._get_n_args(args, "debug", n=0)
198
+
199
+ self.print_env_var_values()
200
+ # Iterate over config files and print if they exist, and the
201
+ # key-value pairs present in them if they do
202
+ for variant, files in sorted(self.configuration.iter_config_files()):
203
+ write_output("%s:", variant)
204
+ for fname in files:
205
+ with indent_log():
206
+ file_exists = os.path.exists(fname)
207
+ write_output("%s, exists: %r", fname, file_exists)
208
+ if file_exists:
209
+ self.print_config_file_values(variant)
210
+
211
+ def print_config_file_values(self, variant: Kind) -> None:
212
+ """Get key-value pairs from the file of a variant"""
213
+ for name, value in self.configuration.get_values_in_config(variant).items():
214
+ with indent_log():
215
+ write_output("%s: %s", name, value)
216
+
217
+ def print_env_var_values(self) -> None:
218
+ """Get key-values pairs present as environment variables"""
219
+ write_output("%s:", "env_var")
220
+ with indent_log():
221
+ for key, value in sorted(self.configuration.get_environ_vars()):
222
+ env_var = f"PIP_{key.upper()}"
223
+ write_output("%s=%r", env_var, value)
224
+
225
+ def open_in_editor(self, options: Values, args: List[str]) -> None:
226
+ editor = self._determine_editor(options)
227
+
228
+ fname = self.configuration.get_file_to_edit()
229
+ if fname is None:
230
+ raise PipError("Could not determine appropriate file.")
231
+ elif '"' in fname:
232
+ # This shouldn't happen, unless we see a username like that.
233
+ # If that happens, we'd appreciate a pull request fixing this.
234
+ raise PipError(
235
+ f'Can not open an editor for a file name containing "\n{fname}'
236
+ )
237
+
238
+ try:
239
+ subprocess.check_call(f'{editor} "{fname}"', shell=True)
240
+ except FileNotFoundError as e:
241
+ if not e.filename:
242
+ e.filename = editor
243
+ raise
244
+ except subprocess.CalledProcessError as e:
245
+ raise PipError(f"Editor Subprocess exited with exit code {e.returncode}")
246
+
247
+ def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
248
+ """Helper to make sure the command got the right number of arguments"""
249
+ if len(args) != n:
250
+ msg = (
251
+ f"Got unexpected number of arguments, expected {n}. "
252
+ f'(example: "{get_prog()} config {example}")'
253
+ )
254
+ raise PipError(msg)
255
+
256
+ if n == 1:
257
+ return args[0]
258
+ else:
259
+ return args
260
+
261
+ def _save_configuration(self) -> None:
262
+ # We successfully ran a modifying command. Need to save the
263
+ # configuration.
264
+ try:
265
+ self.configuration.save()
266
+ except Exception:
267
+ logger.exception(
268
+ "Unable to save configuration. Please report this as a bug."
269
+ )
270
+ raise PipError("Internal Error.")
271
+
272
+ def _determine_editor(self, options: Values) -> str:
273
+ if options.editor is not None:
274
+ return options.editor
275
+ elif "VISUAL" in os.environ:
276
+ return os.environ["VISUAL"]
277
+ elif "EDITOR" in os.environ:
278
+ return os.environ["EDITOR"]
279
+ else:
280
+ raise PipError("Could not determine editor to use.")
llava/lib/python3.10/site-packages/pip/_internal/commands/download.py ADDED
@@ -0,0 +1,146 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os
3
+ from optparse import Values
4
+ from typing import List
5
+
6
+ from pip._internal.cli import cmdoptions
7
+ from pip._internal.cli.cmdoptions import make_target_python
8
+ from pip._internal.cli.req_command import RequirementCommand, with_cleanup
9
+ from pip._internal.cli.status_codes import SUCCESS
10
+ from pip._internal.operations.build.build_tracker import get_build_tracker
11
+ from pip._internal.req.req_install import check_legacy_setup_py_options
12
+ from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
13
+ from pip._internal.utils.temp_dir import TempDirectory
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ class DownloadCommand(RequirementCommand):
19
+ """
20
+ Download packages from:
21
+
22
+ - PyPI (and other indexes) using requirement specifiers.
23
+ - VCS project urls.
24
+ - Local project directories.
25
+ - Local or remote source archives.
26
+
27
+ pip also supports downloading from "requirements files", which provide
28
+ an easy way to specify a whole environment to be downloaded.
29
+ """
30
+
31
+ usage = """
32
+ %prog [options] <requirement specifier> [package-index-options] ...
33
+ %prog [options] -r <requirements file> [package-index-options] ...
34
+ %prog [options] <vcs project url> ...
35
+ %prog [options] <local project path> ...
36
+ %prog [options] <archive url/path> ..."""
37
+
38
+ def add_options(self) -> None:
39
+ self.cmd_opts.add_option(cmdoptions.constraints())
40
+ self.cmd_opts.add_option(cmdoptions.requirements())
41
+ self.cmd_opts.add_option(cmdoptions.no_deps())
42
+ self.cmd_opts.add_option(cmdoptions.global_options())
43
+ self.cmd_opts.add_option(cmdoptions.no_binary())
44
+ self.cmd_opts.add_option(cmdoptions.only_binary())
45
+ self.cmd_opts.add_option(cmdoptions.prefer_binary())
46
+ self.cmd_opts.add_option(cmdoptions.src())
47
+ self.cmd_opts.add_option(cmdoptions.pre())
48
+ self.cmd_opts.add_option(cmdoptions.require_hashes())
49
+ self.cmd_opts.add_option(cmdoptions.progress_bar())
50
+ self.cmd_opts.add_option(cmdoptions.no_build_isolation())
51
+ self.cmd_opts.add_option(cmdoptions.use_pep517())
52
+ self.cmd_opts.add_option(cmdoptions.no_use_pep517())
53
+ self.cmd_opts.add_option(cmdoptions.check_build_deps())
54
+ self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
55
+
56
+ self.cmd_opts.add_option(
57
+ "-d",
58
+ "--dest",
59
+ "--destination-dir",
60
+ "--destination-directory",
61
+ dest="download_dir",
62
+ metavar="dir",
63
+ default=os.curdir,
64
+ help="Download packages into <dir>.",
65
+ )
66
+
67
+ cmdoptions.add_target_python_options(self.cmd_opts)
68
+
69
+ index_opts = cmdoptions.make_option_group(
70
+ cmdoptions.index_group,
71
+ self.parser,
72
+ )
73
+
74
+ self.parser.insert_option_group(0, index_opts)
75
+ self.parser.insert_option_group(0, self.cmd_opts)
76
+
77
+ @with_cleanup
78
+ def run(self, options: Values, args: List[str]) -> int:
79
+ options.ignore_installed = True
80
+ # editable doesn't really make sense for `pip download`, but the bowels
81
+ # of the RequirementSet code require that property.
82
+ options.editables = []
83
+
84
+ cmdoptions.check_dist_restriction(options)
85
+
86
+ options.download_dir = normalize_path(options.download_dir)
87
+ ensure_dir(options.download_dir)
88
+
89
+ session = self.get_default_session(options)
90
+
91
+ target_python = make_target_python(options)
92
+ finder = self._build_package_finder(
93
+ options=options,
94
+ session=session,
95
+ target_python=target_python,
96
+ ignore_requires_python=options.ignore_requires_python,
97
+ )
98
+
99
+ build_tracker = self.enter_context(get_build_tracker())
100
+
101
+ directory = TempDirectory(
102
+ delete=not options.no_clean,
103
+ kind="download",
104
+ globally_managed=True,
105
+ )
106
+
107
+ reqs = self.get_requirements(args, options, finder, session)
108
+ check_legacy_setup_py_options(options, reqs)
109
+
110
+ preparer = self.make_requirement_preparer(
111
+ temp_build_dir=directory,
112
+ options=options,
113
+ build_tracker=build_tracker,
114
+ session=session,
115
+ finder=finder,
116
+ download_dir=options.download_dir,
117
+ use_user_site=False,
118
+ verbosity=self.verbosity,
119
+ )
120
+
121
+ resolver = self.make_resolver(
122
+ preparer=preparer,
123
+ finder=finder,
124
+ options=options,
125
+ ignore_requires_python=options.ignore_requires_python,
126
+ use_pep517=options.use_pep517,
127
+ py_version_info=options.python_version,
128
+ )
129
+
130
+ self.trace_basic_info(finder)
131
+
132
+ requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
133
+
134
+ downloaded: List[str] = []
135
+ for req in requirement_set.requirements.values():
136
+ if req.satisfied_by is None:
137
+ assert req.name is not None
138
+ preparer.save_linked_requirement(req)
139
+ downloaded.append(req.name)
140
+
141
+ preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
142
+
143
+ if downloaded:
144
+ write_output("Successfully downloaded %s", " ".join(downloaded))
145
+
146
+ return SUCCESS
llava/lib/python3.10/site-packages/pip/_internal/commands/hash.py ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import hashlib
2
+ import logging
3
+ import sys
4
+ from optparse import Values
5
+ from typing import List
6
+
7
+ from pip._internal.cli.base_command import Command
8
+ from pip._internal.cli.status_codes import ERROR, SUCCESS
9
+ from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
10
+ from pip._internal.utils.misc import read_chunks, write_output
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ class HashCommand(Command):
16
+ """
17
+ Compute a hash of a local package archive.
18
+
19
+ These can be used with --hash in a requirements file to do repeatable
20
+ installs.
21
+ """
22
+
23
+ usage = "%prog [options] <file> ..."
24
+ ignore_require_venv = True
25
+
26
+ def add_options(self) -> None:
27
+ self.cmd_opts.add_option(
28
+ "-a",
29
+ "--algorithm",
30
+ dest="algorithm",
31
+ choices=STRONG_HASHES,
32
+ action="store",
33
+ default=FAVORITE_HASH,
34
+ help="The hash algorithm to use: one of {}".format(
35
+ ", ".join(STRONG_HASHES)
36
+ ),
37
+ )
38
+ self.parser.insert_option_group(0, self.cmd_opts)
39
+
40
+ def run(self, options: Values, args: List[str]) -> int:
41
+ if not args:
42
+ self.parser.print_usage(sys.stderr)
43
+ return ERROR
44
+
45
+ algorithm = options.algorithm
46
+ for path in args:
47
+ write_output(
48
+ "%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm)
49
+ )
50
+ return SUCCESS
51
+
52
+
53
+ def _hash_of_file(path: str, algorithm: str) -> str:
54
+ """Return the hash digest of a file."""
55
+ with open(path, "rb") as archive:
56
+ hash = hashlib.new(algorithm)
57
+ for chunk in read_chunks(archive):
58
+ hash.update(chunk)
59
+ return hash.hexdigest()
llava/lib/python3.10/site-packages/pip/_internal/commands/help.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from optparse import Values
2
+ from typing import List
3
+
4
+ from pip._internal.cli.base_command import Command
5
+ from pip._internal.cli.status_codes import SUCCESS
6
+ from pip._internal.exceptions import CommandError
7
+
8
+
9
+ class HelpCommand(Command):
10
+ """Show help for commands"""
11
+
12
+ usage = """
13
+ %prog <command>"""
14
+ ignore_require_venv = True
15
+
16
+ def run(self, options: Values, args: List[str]) -> int:
17
+ from pip._internal.commands import (
18
+ commands_dict,
19
+ create_command,
20
+ get_similar_commands,
21
+ )
22
+
23
+ try:
24
+ # 'pip help' with no args is handled by pip.__init__.parseopt()
25
+ cmd_name = args[0] # the command we need help for
26
+ except IndexError:
27
+ return SUCCESS
28
+
29
+ if cmd_name not in commands_dict:
30
+ guess = get_similar_commands(cmd_name)
31
+
32
+ msg = [f'unknown command "{cmd_name}"']
33
+ if guess:
34
+ msg.append(f'maybe you meant "{guess}"')
35
+
36
+ raise CommandError(" - ".join(msg))
37
+
38
+ command = create_command(cmd_name)
39
+ command.parser.print_help()
40
+
41
+ return SUCCESS
llava/lib/python3.10/site-packages/pip/_internal/commands/index.py ADDED
@@ -0,0 +1,139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from optparse import Values
3
+ from typing import Any, Iterable, List, Optional
4
+
5
+ from pip._vendor.packaging.version import Version
6
+
7
+ from pip._internal.cli import cmdoptions
8
+ from pip._internal.cli.req_command import IndexGroupCommand
9
+ from pip._internal.cli.status_codes import ERROR, SUCCESS
10
+ from pip._internal.commands.search import print_dist_installation_info
11
+ from pip._internal.exceptions import CommandError, DistributionNotFound, PipError
12
+ from pip._internal.index.collector import LinkCollector
13
+ from pip._internal.index.package_finder import PackageFinder
14
+ from pip._internal.models.selection_prefs import SelectionPreferences
15
+ from pip._internal.models.target_python import TargetPython
16
+ from pip._internal.network.session import PipSession
17
+ from pip._internal.utils.misc import write_output
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+
22
+ class IndexCommand(IndexGroupCommand):
23
+ """
24
+ Inspect information available from package indexes.
25
+ """
26
+
27
+ ignore_require_venv = True
28
+ usage = """
29
+ %prog versions <package>
30
+ """
31
+
32
+ def add_options(self) -> None:
33
+ cmdoptions.add_target_python_options(self.cmd_opts)
34
+
35
+ self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
36
+ self.cmd_opts.add_option(cmdoptions.pre())
37
+ self.cmd_opts.add_option(cmdoptions.no_binary())
38
+ self.cmd_opts.add_option(cmdoptions.only_binary())
39
+
40
+ index_opts = cmdoptions.make_option_group(
41
+ cmdoptions.index_group,
42
+ self.parser,
43
+ )
44
+
45
+ self.parser.insert_option_group(0, index_opts)
46
+ self.parser.insert_option_group(0, self.cmd_opts)
47
+
48
+ def run(self, options: Values, args: List[str]) -> int:
49
+ handlers = {
50
+ "versions": self.get_available_package_versions,
51
+ }
52
+
53
+ logger.warning(
54
+ "pip index is currently an experimental command. "
55
+ "It may be removed/changed in a future release "
56
+ "without prior warning."
57
+ )
58
+
59
+ # Determine action
60
+ if not args or args[0] not in handlers:
61
+ logger.error(
62
+ "Need an action (%s) to perform.",
63
+ ", ".join(sorted(handlers)),
64
+ )
65
+ return ERROR
66
+
67
+ action = args[0]
68
+
69
+ # Error handling happens here, not in the action-handlers.
70
+ try:
71
+ handlers[action](options, args[1:])
72
+ except PipError as e:
73
+ logger.error(e.args[0])
74
+ return ERROR
75
+
76
+ return SUCCESS
77
+
78
+ def _build_package_finder(
79
+ self,
80
+ options: Values,
81
+ session: PipSession,
82
+ target_python: Optional[TargetPython] = None,
83
+ ignore_requires_python: Optional[bool] = None,
84
+ ) -> PackageFinder:
85
+ """
86
+ Create a package finder appropriate to the index command.
87
+ """
88
+ link_collector = LinkCollector.create(session, options=options)
89
+
90
+ # Pass allow_yanked=False to ignore yanked versions.
91
+ selection_prefs = SelectionPreferences(
92
+ allow_yanked=False,
93
+ allow_all_prereleases=options.pre,
94
+ ignore_requires_python=ignore_requires_python,
95
+ )
96
+
97
+ return PackageFinder.create(
98
+ link_collector=link_collector,
99
+ selection_prefs=selection_prefs,
100
+ target_python=target_python,
101
+ )
102
+
103
+ def get_available_package_versions(self, options: Values, args: List[Any]) -> None:
104
+ if len(args) != 1:
105
+ raise CommandError("You need to specify exactly one argument")
106
+
107
+ target_python = cmdoptions.make_target_python(options)
108
+ query = args[0]
109
+
110
+ with self._build_session(options) as session:
111
+ finder = self._build_package_finder(
112
+ options=options,
113
+ session=session,
114
+ target_python=target_python,
115
+ ignore_requires_python=options.ignore_requires_python,
116
+ )
117
+
118
+ versions: Iterable[Version] = (
119
+ candidate.version for candidate in finder.find_all_candidates(query)
120
+ )
121
+
122
+ if not options.pre:
123
+ # Remove prereleases
124
+ versions = (
125
+ version for version in versions if not version.is_prerelease
126
+ )
127
+ versions = set(versions)
128
+
129
+ if not versions:
130
+ raise DistributionNotFound(
131
+ f"No matching distribution found for {query}"
132
+ )
133
+
134
+ formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)]
135
+ latest = formatted_versions[0]
136
+
137
+ write_output(f"{query} ({latest})")
138
+ write_output("Available versions: {}".format(", ".join(formatted_versions)))
139
+ print_dist_installation_info(query, latest)
llava/lib/python3.10/site-packages/pip/_internal/commands/inspect.py ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from optparse import Values
3
+ from typing import Any, Dict, List
4
+
5
+ from pip._vendor.packaging.markers import default_environment
6
+ from pip._vendor.rich import print_json
7
+
8
+ from pip import __version__
9
+ from pip._internal.cli import cmdoptions
10
+ from pip._internal.cli.base_command import Command
11
+ from pip._internal.cli.status_codes import SUCCESS
12
+ from pip._internal.metadata import BaseDistribution, get_environment
13
+ from pip._internal.utils.compat import stdlib_pkgs
14
+ from pip._internal.utils.urls import path_to_url
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ class InspectCommand(Command):
20
+ """
21
+ Inspect the content of a Python environment and produce a report in JSON format.
22
+ """
23
+
24
+ ignore_require_venv = True
25
+ usage = """
26
+ %prog [options]"""
27
+
28
+ def add_options(self) -> None:
29
+ self.cmd_opts.add_option(
30
+ "--local",
31
+ action="store_true",
32
+ default=False,
33
+ help=(
34
+ "If in a virtualenv that has global access, do not list "
35
+ "globally-installed packages."
36
+ ),
37
+ )
38
+ self.cmd_opts.add_option(
39
+ "--user",
40
+ dest="user",
41
+ action="store_true",
42
+ default=False,
43
+ help="Only output packages installed in user-site.",
44
+ )
45
+ self.cmd_opts.add_option(cmdoptions.list_path())
46
+ self.parser.insert_option_group(0, self.cmd_opts)
47
+
48
+ def run(self, options: Values, args: List[str]) -> int:
49
+ cmdoptions.check_list_path_option(options)
50
+ dists = get_environment(options.path).iter_installed_distributions(
51
+ local_only=options.local,
52
+ user_only=options.user,
53
+ skip=set(stdlib_pkgs),
54
+ )
55
+ output = {
56
+ "version": "1",
57
+ "pip_version": __version__,
58
+ "installed": [self._dist_to_dict(dist) for dist in dists],
59
+ "environment": default_environment(),
60
+ # TODO tags? scheme?
61
+ }
62
+ print_json(data=output)
63
+ return SUCCESS
64
+
65
+ def _dist_to_dict(self, dist: BaseDistribution) -> Dict[str, Any]:
66
+ res: Dict[str, Any] = {
67
+ "metadata": dist.metadata_dict,
68
+ "metadata_location": dist.info_location,
69
+ }
70
+ # direct_url. Note that we don't have download_info (as in the installation
71
+ # report) since it is not recorded in installed metadata.
72
+ direct_url = dist.direct_url
73
+ if direct_url is not None:
74
+ res["direct_url"] = direct_url.to_dict()
75
+ else:
76
+ # Emulate direct_url for legacy editable installs.
77
+ editable_project_location = dist.editable_project_location
78
+ if editable_project_location is not None:
79
+ res["direct_url"] = {
80
+ "url": path_to_url(editable_project_location),
81
+ "dir_info": {
82
+ "editable": True,
83
+ },
84
+ }
85
+ # installer
86
+ installer = dist.installer
87
+ if dist.installer:
88
+ res["installer"] = installer
89
+ # requested
90
+ if dist.installed_with_dist_info:
91
+ res["requested"] = dist.requested
92
+ return res
llava/lib/python3.10/site-packages/pip/_internal/commands/search.py ADDED
@@ -0,0 +1,172 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import shutil
3
+ import sys
4
+ import textwrap
5
+ import xmlrpc.client
6
+ from collections import OrderedDict
7
+ from optparse import Values
8
+ from typing import TYPE_CHECKING, Dict, List, Optional, TypedDict
9
+
10
+ from pip._vendor.packaging.version import parse as parse_version
11
+
12
+ from pip._internal.cli.base_command import Command
13
+ from pip._internal.cli.req_command import SessionCommandMixin
14
+ from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
15
+ from pip._internal.exceptions import CommandError
16
+ from pip._internal.metadata import get_default_environment
17
+ from pip._internal.models.index import PyPI
18
+ from pip._internal.network.xmlrpc import PipXmlrpcTransport
19
+ from pip._internal.utils.logging import indent_log
20
+ from pip._internal.utils.misc import write_output
21
+
22
+ if TYPE_CHECKING:
23
+
24
+ class TransformedHit(TypedDict):
25
+ name: str
26
+ summary: str
27
+ versions: List[str]
28
+
29
+
30
+ logger = logging.getLogger(__name__)
31
+
32
+
33
+ class SearchCommand(Command, SessionCommandMixin):
34
+ """Search for PyPI packages whose name or summary contains <query>."""
35
+
36
+ usage = """
37
+ %prog [options] <query>"""
38
+ ignore_require_venv = True
39
+
40
+ def add_options(self) -> None:
41
+ self.cmd_opts.add_option(
42
+ "-i",
43
+ "--index",
44
+ dest="index",
45
+ metavar="URL",
46
+ default=PyPI.pypi_url,
47
+ help="Base URL of Python Package Index (default %default)",
48
+ )
49
+
50
+ self.parser.insert_option_group(0, self.cmd_opts)
51
+
52
+ def run(self, options: Values, args: List[str]) -> int:
53
+ if not args:
54
+ raise CommandError("Missing required argument (search query).")
55
+ query = args
56
+ pypi_hits = self.search(query, options)
57
+ hits = transform_hits(pypi_hits)
58
+
59
+ terminal_width = None
60
+ if sys.stdout.isatty():
61
+ terminal_width = shutil.get_terminal_size()[0]
62
+
63
+ print_results(hits, terminal_width=terminal_width)
64
+ if pypi_hits:
65
+ return SUCCESS
66
+ return NO_MATCHES_FOUND
67
+
68
+ def search(self, query: List[str], options: Values) -> List[Dict[str, str]]:
69
+ index_url = options.index
70
+
71
+ session = self.get_default_session(options)
72
+
73
+ transport = PipXmlrpcTransport(index_url, session)
74
+ pypi = xmlrpc.client.ServerProxy(index_url, transport)
75
+ try:
76
+ hits = pypi.search({"name": query, "summary": query}, "or")
77
+ except xmlrpc.client.Fault as fault:
78
+ message = (
79
+ f"XMLRPC request failed [code: {fault.faultCode}]\n{fault.faultString}"
80
+ )
81
+ raise CommandError(message)
82
+ assert isinstance(hits, list)
83
+ return hits
84
+
85
+
86
+ def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]:
87
+ """
88
+ The list from pypi is really a list of versions. We want a list of
89
+ packages with the list of versions stored inline. This converts the
90
+ list from pypi into one we can use.
91
+ """
92
+ packages: Dict[str, TransformedHit] = OrderedDict()
93
+ for hit in hits:
94
+ name = hit["name"]
95
+ summary = hit["summary"]
96
+ version = hit["version"]
97
+
98
+ if name not in packages.keys():
99
+ packages[name] = {
100
+ "name": name,
101
+ "summary": summary,
102
+ "versions": [version],
103
+ }
104
+ else:
105
+ packages[name]["versions"].append(version)
106
+
107
+ # if this is the highest version, replace summary and score
108
+ if version == highest_version(packages[name]["versions"]):
109
+ packages[name]["summary"] = summary
110
+
111
+ return list(packages.values())
112
+
113
+
114
+ def print_dist_installation_info(name: str, latest: str) -> None:
115
+ env = get_default_environment()
116
+ dist = env.get_distribution(name)
117
+ if dist is not None:
118
+ with indent_log():
119
+ if dist.version == latest:
120
+ write_output("INSTALLED: %s (latest)", dist.version)
121
+ else:
122
+ write_output("INSTALLED: %s", dist.version)
123
+ if parse_version(latest).pre:
124
+ write_output(
125
+ "LATEST: %s (pre-release; install"
126
+ " with `pip install --pre`)",
127
+ latest,
128
+ )
129
+ else:
130
+ write_output("LATEST: %s", latest)
131
+
132
+
133
+ def print_results(
134
+ hits: List["TransformedHit"],
135
+ name_column_width: Optional[int] = None,
136
+ terminal_width: Optional[int] = None,
137
+ ) -> None:
138
+ if not hits:
139
+ return
140
+ if name_column_width is None:
141
+ name_column_width = (
142
+ max(
143
+ [
144
+ len(hit["name"]) + len(highest_version(hit.get("versions", ["-"])))
145
+ for hit in hits
146
+ ]
147
+ )
148
+ + 4
149
+ )
150
+
151
+ for hit in hits:
152
+ name = hit["name"]
153
+ summary = hit["summary"] or ""
154
+ latest = highest_version(hit.get("versions", ["-"]))
155
+ if terminal_width is not None:
156
+ target_width = terminal_width - name_column_width - 5
157
+ if target_width > 10:
158
+ # wrap and indent summary to fit terminal
159
+ summary_lines = textwrap.wrap(summary, target_width)
160
+ summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines)
161
+
162
+ name_latest = f"{name} ({latest})"
163
+ line = f"{name_latest:{name_column_width}} - {summary}"
164
+ try:
165
+ write_output(line)
166
+ print_dist_installation_info(name, latest)
167
+ except UnicodeEncodeError:
168
+ pass
169
+
170
+
171
+ def highest_version(versions: List[str]) -> str:
172
+ return max(versions, key=parse_version)
llava/lib/python3.10/site-packages/pip/_internal/commands/uninstall.py ADDED
@@ -0,0 +1,114 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from optparse import Values
3
+ from typing import List
4
+
5
+ from pip._vendor.packaging.utils import canonicalize_name
6
+
7
+ from pip._internal.cli import cmdoptions
8
+ from pip._internal.cli.base_command import Command
9
+ from pip._internal.cli.index_command import SessionCommandMixin
10
+ from pip._internal.cli.status_codes import SUCCESS
11
+ from pip._internal.exceptions import InstallationError
12
+ from pip._internal.req import parse_requirements
13
+ from pip._internal.req.constructors import (
14
+ install_req_from_line,
15
+ install_req_from_parsed_requirement,
16
+ )
17
+ from pip._internal.utils.misc import (
18
+ check_externally_managed,
19
+ protect_pip_from_modification_on_windows,
20
+ warn_if_run_as_root,
21
+ )
22
+
23
+ logger = logging.getLogger(__name__)
24
+
25
+
26
+ class UninstallCommand(Command, SessionCommandMixin):
27
+ """
28
+ Uninstall packages.
29
+
30
+ pip is able to uninstall most installed packages. Known exceptions are:
31
+
32
+ - Pure distutils packages installed with ``python setup.py install``, which
33
+ leave behind no metadata to determine what files were installed.
34
+ - Script wrappers installed by ``python setup.py develop``.
35
+ """
36
+
37
+ usage = """
38
+ %prog [options] <package> ...
39
+ %prog [options] -r <requirements file> ..."""
40
+
41
+ def add_options(self) -> None:
42
+ self.cmd_opts.add_option(
43
+ "-r",
44
+ "--requirement",
45
+ dest="requirements",
46
+ action="append",
47
+ default=[],
48
+ metavar="file",
49
+ help=(
50
+ "Uninstall all the packages listed in the given requirements "
51
+ "file. This option can be used multiple times."
52
+ ),
53
+ )
54
+ self.cmd_opts.add_option(
55
+ "-y",
56
+ "--yes",
57
+ dest="yes",
58
+ action="store_true",
59
+ help="Don't ask for confirmation of uninstall deletions.",
60
+ )
61
+ self.cmd_opts.add_option(cmdoptions.root_user_action())
62
+ self.cmd_opts.add_option(cmdoptions.override_externally_managed())
63
+ self.parser.insert_option_group(0, self.cmd_opts)
64
+
65
+ def run(self, options: Values, args: List[str]) -> int:
66
+ session = self.get_default_session(options)
67
+
68
+ reqs_to_uninstall = {}
69
+ for name in args:
70
+ req = install_req_from_line(
71
+ name,
72
+ isolated=options.isolated_mode,
73
+ )
74
+ if req.name:
75
+ reqs_to_uninstall[canonicalize_name(req.name)] = req
76
+ else:
77
+ logger.warning(
78
+ "Invalid requirement: %r ignored -"
79
+ " the uninstall command expects named"
80
+ " requirements.",
81
+ name,
82
+ )
83
+ for filename in options.requirements:
84
+ for parsed_req in parse_requirements(
85
+ filename, options=options, session=session
86
+ ):
87
+ req = install_req_from_parsed_requirement(
88
+ parsed_req, isolated=options.isolated_mode
89
+ )
90
+ if req.name:
91
+ reqs_to_uninstall[canonicalize_name(req.name)] = req
92
+ if not reqs_to_uninstall:
93
+ raise InstallationError(
94
+ f"You must give at least one requirement to {self.name} (see "
95
+ f'"pip help {self.name}")'
96
+ )
97
+
98
+ if not options.override_externally_managed:
99
+ check_externally_managed()
100
+
101
+ protect_pip_from_modification_on_windows(
102
+ modifying_pip="pip" in reqs_to_uninstall
103
+ )
104
+
105
+ for req in reqs_to_uninstall.values():
106
+ uninstall_pathset = req.uninstall(
107
+ auto_confirm=options.yes,
108
+ verbose=self.verbosity > 0,
109
+ )
110
+ if uninstall_pathset:
111
+ uninstall_pathset.commit()
112
+ if options.root_user_action == "warn":
113
+ warn_if_run_as_root()
114
+ return SUCCESS
llava/lib/python3.10/site-packages/pip/_internal/commands/wheel.py ADDED
@@ -0,0 +1,182 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os
3
+ import shutil
4
+ from optparse import Values
5
+ from typing import List
6
+
7
+ from pip._internal.cache import WheelCache
8
+ from pip._internal.cli import cmdoptions
9
+ from pip._internal.cli.req_command import RequirementCommand, with_cleanup
10
+ from pip._internal.cli.status_codes import SUCCESS
11
+ from pip._internal.exceptions import CommandError
12
+ from pip._internal.operations.build.build_tracker import get_build_tracker
13
+ from pip._internal.req.req_install import (
14
+ InstallRequirement,
15
+ check_legacy_setup_py_options,
16
+ )
17
+ from pip._internal.utils.misc import ensure_dir, normalize_path
18
+ from pip._internal.utils.temp_dir import TempDirectory
19
+ from pip._internal.wheel_builder import build, should_build_for_wheel_command
20
+
21
+ logger = logging.getLogger(__name__)
22
+
23
+
24
+ class WheelCommand(RequirementCommand):
25
+ """
26
+ Build Wheel archives for your requirements and dependencies.
27
+
28
+ Wheel is a built-package format, and offers the advantage of not
29
+ recompiling your software during every install. For more details, see the
30
+ wheel docs: https://wheel.readthedocs.io/en/latest/
31
+
32
+ 'pip wheel' uses the build system interface as described here:
33
+ https://pip.pypa.io/en/stable/reference/build-system/
34
+
35
+ """
36
+
37
+ usage = """
38
+ %prog [options] <requirement specifier> ...
39
+ %prog [options] -r <requirements file> ...
40
+ %prog [options] [-e] <vcs project url> ...
41
+ %prog [options] [-e] <local project path> ...
42
+ %prog [options] <archive url/path> ..."""
43
+
44
+ def add_options(self) -> None:
45
+ self.cmd_opts.add_option(
46
+ "-w",
47
+ "--wheel-dir",
48
+ dest="wheel_dir",
49
+ metavar="dir",
50
+ default=os.curdir,
51
+ help=(
52
+ "Build wheels into <dir>, where the default is the "
53
+ "current working directory."
54
+ ),
55
+ )
56
+ self.cmd_opts.add_option(cmdoptions.no_binary())
57
+ self.cmd_opts.add_option(cmdoptions.only_binary())
58
+ self.cmd_opts.add_option(cmdoptions.prefer_binary())
59
+ self.cmd_opts.add_option(cmdoptions.no_build_isolation())
60
+ self.cmd_opts.add_option(cmdoptions.use_pep517())
61
+ self.cmd_opts.add_option(cmdoptions.no_use_pep517())
62
+ self.cmd_opts.add_option(cmdoptions.check_build_deps())
63
+ self.cmd_opts.add_option(cmdoptions.constraints())
64
+ self.cmd_opts.add_option(cmdoptions.editable())
65
+ self.cmd_opts.add_option(cmdoptions.requirements())
66
+ self.cmd_opts.add_option(cmdoptions.src())
67
+ self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
68
+ self.cmd_opts.add_option(cmdoptions.no_deps())
69
+ self.cmd_opts.add_option(cmdoptions.progress_bar())
70
+
71
+ self.cmd_opts.add_option(
72
+ "--no-verify",
73
+ dest="no_verify",
74
+ action="store_true",
75
+ default=False,
76
+ help="Don't verify if built wheel is valid.",
77
+ )
78
+
79
+ self.cmd_opts.add_option(cmdoptions.config_settings())
80
+ self.cmd_opts.add_option(cmdoptions.build_options())
81
+ self.cmd_opts.add_option(cmdoptions.global_options())
82
+
83
+ self.cmd_opts.add_option(
84
+ "--pre",
85
+ action="store_true",
86
+ default=False,
87
+ help=(
88
+ "Include pre-release and development versions. By default, "
89
+ "pip only finds stable versions."
90
+ ),
91
+ )
92
+
93
+ self.cmd_opts.add_option(cmdoptions.require_hashes())
94
+
95
+ index_opts = cmdoptions.make_option_group(
96
+ cmdoptions.index_group,
97
+ self.parser,
98
+ )
99
+
100
+ self.parser.insert_option_group(0, index_opts)
101
+ self.parser.insert_option_group(0, self.cmd_opts)
102
+
103
+ @with_cleanup
104
+ def run(self, options: Values, args: List[str]) -> int:
105
+ session = self.get_default_session(options)
106
+
107
+ finder = self._build_package_finder(options, session)
108
+
109
+ options.wheel_dir = normalize_path(options.wheel_dir)
110
+ ensure_dir(options.wheel_dir)
111
+
112
+ build_tracker = self.enter_context(get_build_tracker())
113
+
114
+ directory = TempDirectory(
115
+ delete=not options.no_clean,
116
+ kind="wheel",
117
+ globally_managed=True,
118
+ )
119
+
120
+ reqs = self.get_requirements(args, options, finder, session)
121
+ check_legacy_setup_py_options(options, reqs)
122
+
123
+ wheel_cache = WheelCache(options.cache_dir)
124
+
125
+ preparer = self.make_requirement_preparer(
126
+ temp_build_dir=directory,
127
+ options=options,
128
+ build_tracker=build_tracker,
129
+ session=session,
130
+ finder=finder,
131
+ download_dir=options.wheel_dir,
132
+ use_user_site=False,
133
+ verbosity=self.verbosity,
134
+ )
135
+
136
+ resolver = self.make_resolver(
137
+ preparer=preparer,
138
+ finder=finder,
139
+ options=options,
140
+ wheel_cache=wheel_cache,
141
+ ignore_requires_python=options.ignore_requires_python,
142
+ use_pep517=options.use_pep517,
143
+ )
144
+
145
+ self.trace_basic_info(finder)
146
+
147
+ requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
148
+
149
+ reqs_to_build: List[InstallRequirement] = []
150
+ for req in requirement_set.requirements.values():
151
+ if req.is_wheel:
152
+ preparer.save_linked_requirement(req)
153
+ elif should_build_for_wheel_command(req):
154
+ reqs_to_build.append(req)
155
+
156
+ preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
157
+
158
+ # build wheels
159
+ build_successes, build_failures = build(
160
+ reqs_to_build,
161
+ wheel_cache=wheel_cache,
162
+ verify=(not options.no_verify),
163
+ build_options=options.build_options or [],
164
+ global_options=options.global_options or [],
165
+ )
166
+ for req in build_successes:
167
+ assert req.link and req.link.is_wheel
168
+ assert req.local_file_path
169
+ # copy from cache to target directory
170
+ try:
171
+ shutil.copy(req.local_file_path, options.wheel_dir)
172
+ except OSError as e:
173
+ logger.warning(
174
+ "Building wheel for %s failed: %s",
175
+ req.name,
176
+ e,
177
+ )
178
+ build_failures.append(req)
179
+ if len(build_failures) != 0:
180
+ raise CommandError("Failed to build one or more wheels")
181
+
182
+ return SUCCESS
llava/lib/python3.10/site-packages/pip/_internal/index/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ """Index interaction code
2
+ """
llava/lib/python3.10/site-packages/pip/_internal/index/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (205 Bytes). View file
 
llava/lib/python3.10/site-packages/pip/_internal/index/__pycache__/collector.cpython-310.pyc ADDED
Binary file (15.1 kB). View file
 
llava/lib/python3.10/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-310.pyc ADDED
Binary file (29.6 kB). View file
 
llava/lib/python3.10/site-packages/pip/_internal/index/__pycache__/sources.cpython-310.pyc ADDED
Binary file (8.86 kB). View file
 
llava/lib/python3.10/site-packages/pip/_internal/index/collector.py ADDED
@@ -0,0 +1,494 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ The main purpose of this module is to expose LinkCollector.collect_sources().
3
+ """
4
+
5
+ import collections
6
+ import email.message
7
+ import functools
8
+ import itertools
9
+ import json
10
+ import logging
11
+ import os
12
+ import urllib.parse
13
+ import urllib.request
14
+ from dataclasses import dataclass
15
+ from html.parser import HTMLParser
16
+ from optparse import Values
17
+ from typing import (
18
+ Callable,
19
+ Dict,
20
+ Iterable,
21
+ List,
22
+ MutableMapping,
23
+ NamedTuple,
24
+ Optional,
25
+ Protocol,
26
+ Sequence,
27
+ Tuple,
28
+ Union,
29
+ )
30
+
31
+ from pip._vendor import requests
32
+ from pip._vendor.requests import Response
33
+ from pip._vendor.requests.exceptions import RetryError, SSLError
34
+
35
+ from pip._internal.exceptions import NetworkConnectionError
36
+ from pip._internal.models.link import Link
37
+ from pip._internal.models.search_scope import SearchScope
38
+ from pip._internal.network.session import PipSession
39
+ from pip._internal.network.utils import raise_for_status
40
+ from pip._internal.utils.filetypes import is_archive_file
41
+ from pip._internal.utils.misc import redact_auth_from_url
42
+ from pip._internal.vcs import vcs
43
+
44
+ from .sources import CandidatesFromPage, LinkSource, build_source
45
+
46
+ logger = logging.getLogger(__name__)
47
+
48
+ ResponseHeaders = MutableMapping[str, str]
49
+
50
+
51
+ def _match_vcs_scheme(url: str) -> Optional[str]:
52
+ """Look for VCS schemes in the URL.
53
+
54
+ Returns the matched VCS scheme, or None if there's no match.
55
+ """
56
+ for scheme in vcs.schemes:
57
+ if url.lower().startswith(scheme) and url[len(scheme)] in "+:":
58
+ return scheme
59
+ return None
60
+
61
+
62
+ class _NotAPIContent(Exception):
63
+ def __init__(self, content_type: str, request_desc: str) -> None:
64
+ super().__init__(content_type, request_desc)
65
+ self.content_type = content_type
66
+ self.request_desc = request_desc
67
+
68
+
69
+ def _ensure_api_header(response: Response) -> None:
70
+ """
71
+ Check the Content-Type header to ensure the response contains a Simple
72
+ API Response.
73
+
74
+ Raises `_NotAPIContent` if the content type is not a valid content-type.
75
+ """
76
+ content_type = response.headers.get("Content-Type", "Unknown")
77
+
78
+ content_type_l = content_type.lower()
79
+ if content_type_l.startswith(
80
+ (
81
+ "text/html",
82
+ "application/vnd.pypi.simple.v1+html",
83
+ "application/vnd.pypi.simple.v1+json",
84
+ )
85
+ ):
86
+ return
87
+
88
+ raise _NotAPIContent(content_type, response.request.method)
89
+
90
+
91
+ class _NotHTTP(Exception):
92
+ pass
93
+
94
+
95
+ def _ensure_api_response(url: str, session: PipSession) -> None:
96
+ """
97
+ Send a HEAD request to the URL, and ensure the response contains a simple
98
+ API Response.
99
+
100
+ Raises `_NotHTTP` if the URL is not available for a HEAD request, or
101
+ `_NotAPIContent` if the content type is not a valid content type.
102
+ """
103
+ scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url)
104
+ if scheme not in {"http", "https"}:
105
+ raise _NotHTTP()
106
+
107
+ resp = session.head(url, allow_redirects=True)
108
+ raise_for_status(resp)
109
+
110
+ _ensure_api_header(resp)
111
+
112
+
113
+ def _get_simple_response(url: str, session: PipSession) -> Response:
114
+ """Access an Simple API response with GET, and return the response.
115
+
116
+ This consists of three parts:
117
+
118
+ 1. If the URL looks suspiciously like an archive, send a HEAD first to
119
+ check the Content-Type is HTML or Simple API, to avoid downloading a
120
+ large file. Raise `_NotHTTP` if the content type cannot be determined, or
121
+ `_NotAPIContent` if it is not HTML or a Simple API.
122
+ 2. Actually perform the request. Raise HTTP exceptions on network failures.
123
+ 3. Check the Content-Type header to make sure we got a Simple API response,
124
+ and raise `_NotAPIContent` otherwise.
125
+ """
126
+ if is_archive_file(Link(url).filename):
127
+ _ensure_api_response(url, session=session)
128
+
129
+ logger.debug("Getting page %s", redact_auth_from_url(url))
130
+
131
+ resp = session.get(
132
+ url,
133
+ headers={
134
+ "Accept": ", ".join(
135
+ [
136
+ "application/vnd.pypi.simple.v1+json",
137
+ "application/vnd.pypi.simple.v1+html; q=0.1",
138
+ "text/html; q=0.01",
139
+ ]
140
+ ),
141
+ # We don't want to blindly returned cached data for
142
+ # /simple/, because authors generally expecting that
143
+ # twine upload && pip install will function, but if
144
+ # they've done a pip install in the last ~10 minutes
145
+ # it won't. Thus by setting this to zero we will not
146
+ # blindly use any cached data, however the benefit of
147
+ # using max-age=0 instead of no-cache, is that we will
148
+ # still support conditional requests, so we will still
149
+ # minimize traffic sent in cases where the page hasn't
150
+ # changed at all, we will just always incur the round
151
+ # trip for the conditional GET now instead of only
152
+ # once per 10 minutes.
153
+ # For more information, please see pypa/pip#5670.
154
+ "Cache-Control": "max-age=0",
155
+ },
156
+ )
157
+ raise_for_status(resp)
158
+
159
+ # The check for archives above only works if the url ends with
160
+ # something that looks like an archive. However that is not a
161
+ # requirement of an url. Unless we issue a HEAD request on every
162
+ # url we cannot know ahead of time for sure if something is a
163
+ # Simple API response or not. However we can check after we've
164
+ # downloaded it.
165
+ _ensure_api_header(resp)
166
+
167
+ logger.debug(
168
+ "Fetched page %s as %s",
169
+ redact_auth_from_url(url),
170
+ resp.headers.get("Content-Type", "Unknown"),
171
+ )
172
+
173
+ return resp
174
+
175
+
176
+ def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]:
177
+ """Determine if we have any encoding information in our headers."""
178
+ if headers and "Content-Type" in headers:
179
+ m = email.message.Message()
180
+ m["content-type"] = headers["Content-Type"]
181
+ charset = m.get_param("charset")
182
+ if charset:
183
+ return str(charset)
184
+ return None
185
+
186
+
187
+ class CacheablePageContent:
188
+ def __init__(self, page: "IndexContent") -> None:
189
+ assert page.cache_link_parsing
190
+ self.page = page
191
+
192
+ def __eq__(self, other: object) -> bool:
193
+ return isinstance(other, type(self)) and self.page.url == other.page.url
194
+
195
+ def __hash__(self) -> int:
196
+ return hash(self.page.url)
197
+
198
+
199
+ class ParseLinks(Protocol):
200
+ def __call__(self, page: "IndexContent") -> Iterable[Link]: ...
201
+
202
+
203
+ def with_cached_index_content(fn: ParseLinks) -> ParseLinks:
204
+ """
205
+ Given a function that parses an Iterable[Link] from an IndexContent, cache the
206
+ function's result (keyed by CacheablePageContent), unless the IndexContent
207
+ `page` has `page.cache_link_parsing == False`.
208
+ """
209
+
210
+ @functools.lru_cache(maxsize=None)
211
+ def wrapper(cacheable_page: CacheablePageContent) -> List[Link]:
212
+ return list(fn(cacheable_page.page))
213
+
214
+ @functools.wraps(fn)
215
+ def wrapper_wrapper(page: "IndexContent") -> List[Link]:
216
+ if page.cache_link_parsing:
217
+ return wrapper(CacheablePageContent(page))
218
+ return list(fn(page))
219
+
220
+ return wrapper_wrapper
221
+
222
+
223
+ @with_cached_index_content
224
+ def parse_links(page: "IndexContent") -> Iterable[Link]:
225
+ """
226
+ Parse a Simple API's Index Content, and yield its anchor elements as Link objects.
227
+ """
228
+
229
+ content_type_l = page.content_type.lower()
230
+ if content_type_l.startswith("application/vnd.pypi.simple.v1+json"):
231
+ data = json.loads(page.content)
232
+ for file in data.get("files", []):
233
+ link = Link.from_json(file, page.url)
234
+ if link is None:
235
+ continue
236
+ yield link
237
+ return
238
+
239
+ parser = HTMLLinkParser(page.url)
240
+ encoding = page.encoding or "utf-8"
241
+ parser.feed(page.content.decode(encoding))
242
+
243
+ url = page.url
244
+ base_url = parser.base_url or url
245
+ for anchor in parser.anchors:
246
+ link = Link.from_element(anchor, page_url=url, base_url=base_url)
247
+ if link is None:
248
+ continue
249
+ yield link
250
+
251
+
252
+ @dataclass(frozen=True)
253
+ class IndexContent:
254
+ """Represents one response (or page), along with its URL.
255
+
256
+ :param encoding: the encoding to decode the given content.
257
+ :param url: the URL from which the HTML was downloaded.
258
+ :param cache_link_parsing: whether links parsed from this page's url
259
+ should be cached. PyPI index urls should
260
+ have this set to False, for example.
261
+ """
262
+
263
+ content: bytes
264
+ content_type: str
265
+ encoding: Optional[str]
266
+ url: str
267
+ cache_link_parsing: bool = True
268
+
269
+ def __str__(self) -> str:
270
+ return redact_auth_from_url(self.url)
271
+
272
+
273
+ class HTMLLinkParser(HTMLParser):
274
+ """
275
+ HTMLParser that keeps the first base HREF and a list of all anchor
276
+ elements' attributes.
277
+ """
278
+
279
+ def __init__(self, url: str) -> None:
280
+ super().__init__(convert_charrefs=True)
281
+
282
+ self.url: str = url
283
+ self.base_url: Optional[str] = None
284
+ self.anchors: List[Dict[str, Optional[str]]] = []
285
+
286
+ def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None:
287
+ if tag == "base" and self.base_url is None:
288
+ href = self.get_href(attrs)
289
+ if href is not None:
290
+ self.base_url = href
291
+ elif tag == "a":
292
+ self.anchors.append(dict(attrs))
293
+
294
+ def get_href(self, attrs: List[Tuple[str, Optional[str]]]) -> Optional[str]:
295
+ for name, value in attrs:
296
+ if name == "href":
297
+ return value
298
+ return None
299
+
300
+
301
+ def _handle_get_simple_fail(
302
+ link: Link,
303
+ reason: Union[str, Exception],
304
+ meth: Optional[Callable[..., None]] = None,
305
+ ) -> None:
306
+ if meth is None:
307
+ meth = logger.debug
308
+ meth("Could not fetch URL %s: %s - skipping", link, reason)
309
+
310
+
311
+ def _make_index_content(
312
+ response: Response, cache_link_parsing: bool = True
313
+ ) -> IndexContent:
314
+ encoding = _get_encoding_from_headers(response.headers)
315
+ return IndexContent(
316
+ response.content,
317
+ response.headers["Content-Type"],
318
+ encoding=encoding,
319
+ url=response.url,
320
+ cache_link_parsing=cache_link_parsing,
321
+ )
322
+
323
+
324
+ def _get_index_content(link: Link, *, session: PipSession) -> Optional["IndexContent"]:
325
+ url = link.url.split("#", 1)[0]
326
+
327
+ # Check for VCS schemes that do not support lookup as web pages.
328
+ vcs_scheme = _match_vcs_scheme(url)
329
+ if vcs_scheme:
330
+ logger.warning(
331
+ "Cannot look at %s URL %s because it does not support lookup as web pages.",
332
+ vcs_scheme,
333
+ link,
334
+ )
335
+ return None
336
+
337
+ # Tack index.html onto file:// URLs that point to directories
338
+ scheme, _, path, _, _, _ = urllib.parse.urlparse(url)
339
+ if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)):
340
+ # add trailing slash if not present so urljoin doesn't trim
341
+ # final segment
342
+ if not url.endswith("/"):
343
+ url += "/"
344
+ # TODO: In the future, it would be nice if pip supported PEP 691
345
+ # style responses in the file:// URLs, however there's no
346
+ # standard file extension for application/vnd.pypi.simple.v1+json
347
+ # so we'll need to come up with something on our own.
348
+ url = urllib.parse.urljoin(url, "index.html")
349
+ logger.debug(" file: URL is directory, getting %s", url)
350
+
351
+ try:
352
+ resp = _get_simple_response(url, session=session)
353
+ except _NotHTTP:
354
+ logger.warning(
355
+ "Skipping page %s because it looks like an archive, and cannot "
356
+ "be checked by a HTTP HEAD request.",
357
+ link,
358
+ )
359
+ except _NotAPIContent as exc:
360
+ logger.warning(
361
+ "Skipping page %s because the %s request got Content-Type: %s. "
362
+ "The only supported Content-Types are application/vnd.pypi.simple.v1+json, "
363
+ "application/vnd.pypi.simple.v1+html, and text/html",
364
+ link,
365
+ exc.request_desc,
366
+ exc.content_type,
367
+ )
368
+ except NetworkConnectionError as exc:
369
+ _handle_get_simple_fail(link, exc)
370
+ except RetryError as exc:
371
+ _handle_get_simple_fail(link, exc)
372
+ except SSLError as exc:
373
+ reason = "There was a problem confirming the ssl certificate: "
374
+ reason += str(exc)
375
+ _handle_get_simple_fail(link, reason, meth=logger.info)
376
+ except requests.ConnectionError as exc:
377
+ _handle_get_simple_fail(link, f"connection error: {exc}")
378
+ except requests.Timeout:
379
+ _handle_get_simple_fail(link, "timed out")
380
+ else:
381
+ return _make_index_content(resp, cache_link_parsing=link.cache_link_parsing)
382
+ return None
383
+
384
+
385
+ class CollectedSources(NamedTuple):
386
+ find_links: Sequence[Optional[LinkSource]]
387
+ index_urls: Sequence[Optional[LinkSource]]
388
+
389
+
390
+ class LinkCollector:
391
+ """
392
+ Responsible for collecting Link objects from all configured locations,
393
+ making network requests as needed.
394
+
395
+ The class's main method is its collect_sources() method.
396
+ """
397
+
398
+ def __init__(
399
+ self,
400
+ session: PipSession,
401
+ search_scope: SearchScope,
402
+ ) -> None:
403
+ self.search_scope = search_scope
404
+ self.session = session
405
+
406
+ @classmethod
407
+ def create(
408
+ cls,
409
+ session: PipSession,
410
+ options: Values,
411
+ suppress_no_index: bool = False,
412
+ ) -> "LinkCollector":
413
+ """
414
+ :param session: The Session to use to make requests.
415
+ :param suppress_no_index: Whether to ignore the --no-index option
416
+ when constructing the SearchScope object.
417
+ """
418
+ index_urls = [options.index_url] + options.extra_index_urls
419
+ if options.no_index and not suppress_no_index:
420
+ logger.debug(
421
+ "Ignoring indexes: %s",
422
+ ",".join(redact_auth_from_url(url) for url in index_urls),
423
+ )
424
+ index_urls = []
425
+
426
+ # Make sure find_links is a list before passing to create().
427
+ find_links = options.find_links or []
428
+
429
+ search_scope = SearchScope.create(
430
+ find_links=find_links,
431
+ index_urls=index_urls,
432
+ no_index=options.no_index,
433
+ )
434
+ link_collector = LinkCollector(
435
+ session=session,
436
+ search_scope=search_scope,
437
+ )
438
+ return link_collector
439
+
440
+ @property
441
+ def find_links(self) -> List[str]:
442
+ return self.search_scope.find_links
443
+
444
+ def fetch_response(self, location: Link) -> Optional[IndexContent]:
445
+ """
446
+ Fetch an HTML page containing package links.
447
+ """
448
+ return _get_index_content(location, session=self.session)
449
+
450
+ def collect_sources(
451
+ self,
452
+ project_name: str,
453
+ candidates_from_page: CandidatesFromPage,
454
+ ) -> CollectedSources:
455
+ # The OrderedDict calls deduplicate sources by URL.
456
+ index_url_sources = collections.OrderedDict(
457
+ build_source(
458
+ loc,
459
+ candidates_from_page=candidates_from_page,
460
+ page_validator=self.session.is_secure_origin,
461
+ expand_dir=False,
462
+ cache_link_parsing=False,
463
+ project_name=project_name,
464
+ )
465
+ for loc in self.search_scope.get_index_urls_locations(project_name)
466
+ ).values()
467
+ find_links_sources = collections.OrderedDict(
468
+ build_source(
469
+ loc,
470
+ candidates_from_page=candidates_from_page,
471
+ page_validator=self.session.is_secure_origin,
472
+ expand_dir=True,
473
+ cache_link_parsing=True,
474
+ project_name=project_name,
475
+ )
476
+ for loc in self.find_links
477
+ ).values()
478
+
479
+ if logger.isEnabledFor(logging.DEBUG):
480
+ lines = [
481
+ f"* {s.link}"
482
+ for s in itertools.chain(find_links_sources, index_url_sources)
483
+ if s is not None and s.link is not None
484
+ ]
485
+ lines = [
486
+ f"{len(lines)} location(s) to search "
487
+ f"for versions of {project_name}:"
488
+ ] + lines
489
+ logger.debug("\n".join(lines))
490
+
491
+ return CollectedSources(
492
+ find_links=list(find_links_sources),
493
+ index_urls=list(index_url_sources),
494
+ )
llava/lib/python3.10/site-packages/pip/_internal/index/package_finder.py ADDED
@@ -0,0 +1,1029 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Routines related to PyPI, indexes"""
2
+
3
+ import enum
4
+ import functools
5
+ import itertools
6
+ import logging
7
+ import re
8
+ from dataclasses import dataclass
9
+ from typing import TYPE_CHECKING, FrozenSet, Iterable, List, Optional, Set, Tuple, Union
10
+
11
+ from pip._vendor.packaging import specifiers
12
+ from pip._vendor.packaging.tags import Tag
13
+ from pip._vendor.packaging.utils import canonicalize_name
14
+ from pip._vendor.packaging.version import InvalidVersion, _BaseVersion
15
+ from pip._vendor.packaging.version import parse as parse_version
16
+
17
+ from pip._internal.exceptions import (
18
+ BestVersionAlreadyInstalled,
19
+ DistributionNotFound,
20
+ InvalidWheelFilename,
21
+ UnsupportedWheel,
22
+ )
23
+ from pip._internal.index.collector import LinkCollector, parse_links
24
+ from pip._internal.models.candidate import InstallationCandidate
25
+ from pip._internal.models.format_control import FormatControl
26
+ from pip._internal.models.link import Link
27
+ from pip._internal.models.search_scope import SearchScope
28
+ from pip._internal.models.selection_prefs import SelectionPreferences
29
+ from pip._internal.models.target_python import TargetPython
30
+ from pip._internal.models.wheel import Wheel
31
+ from pip._internal.req import InstallRequirement
32
+ from pip._internal.utils._log import getLogger
33
+ from pip._internal.utils.filetypes import WHEEL_EXTENSION
34
+ from pip._internal.utils.hashes import Hashes
35
+ from pip._internal.utils.logging import indent_log
36
+ from pip._internal.utils.misc import build_netloc
37
+ from pip._internal.utils.packaging import check_requires_python
38
+ from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS
39
+
40
+ if TYPE_CHECKING:
41
+ from pip._vendor.typing_extensions import TypeGuard
42
+
43
+ __all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"]
44
+
45
+
46
+ logger = getLogger(__name__)
47
+
48
+ BuildTag = Union[Tuple[()], Tuple[int, str]]
49
+ CandidateSortingKey = Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag]
50
+
51
+
52
+ def _check_link_requires_python(
53
+ link: Link,
54
+ version_info: Tuple[int, int, int],
55
+ ignore_requires_python: bool = False,
56
+ ) -> bool:
57
+ """
58
+ Return whether the given Python version is compatible with a link's
59
+ "Requires-Python" value.
60
+
61
+ :param version_info: A 3-tuple of ints representing the Python
62
+ major-minor-micro version to check.
63
+ :param ignore_requires_python: Whether to ignore the "Requires-Python"
64
+ value if the given Python version isn't compatible.
65
+ """
66
+ try:
67
+ is_compatible = check_requires_python(
68
+ link.requires_python,
69
+ version_info=version_info,
70
+ )
71
+ except specifiers.InvalidSpecifier:
72
+ logger.debug(
73
+ "Ignoring invalid Requires-Python (%r) for link: %s",
74
+ link.requires_python,
75
+ link,
76
+ )
77
+ else:
78
+ if not is_compatible:
79
+ version = ".".join(map(str, version_info))
80
+ if not ignore_requires_python:
81
+ logger.verbose(
82
+ "Link requires a different Python (%s not in: %r): %s",
83
+ version,
84
+ link.requires_python,
85
+ link,
86
+ )
87
+ return False
88
+
89
+ logger.debug(
90
+ "Ignoring failed Requires-Python check (%s not in: %r) for link: %s",
91
+ version,
92
+ link.requires_python,
93
+ link,
94
+ )
95
+
96
+ return True
97
+
98
+
99
+ class LinkType(enum.Enum):
100
+ candidate = enum.auto()
101
+ different_project = enum.auto()
102
+ yanked = enum.auto()
103
+ format_unsupported = enum.auto()
104
+ format_invalid = enum.auto()
105
+ platform_mismatch = enum.auto()
106
+ requires_python_mismatch = enum.auto()
107
+
108
+
109
+ class LinkEvaluator:
110
+ """
111
+ Responsible for evaluating links for a particular project.
112
+ """
113
+
114
+ _py_version_re = re.compile(r"-py([123]\.?[0-9]?)$")
115
+
116
+ # Don't include an allow_yanked default value to make sure each call
117
+ # site considers whether yanked releases are allowed. This also causes
118
+ # that decision to be made explicit in the calling code, which helps
119
+ # people when reading the code.
120
+ def __init__(
121
+ self,
122
+ project_name: str,
123
+ canonical_name: str,
124
+ formats: FrozenSet[str],
125
+ target_python: TargetPython,
126
+ allow_yanked: bool,
127
+ ignore_requires_python: Optional[bool] = None,
128
+ ) -> None:
129
+ """
130
+ :param project_name: The user supplied package name.
131
+ :param canonical_name: The canonical package name.
132
+ :param formats: The formats allowed for this package. Should be a set
133
+ with 'binary' or 'source' or both in it.
134
+ :param target_python: The target Python interpreter to use when
135
+ evaluating link compatibility. This is used, for example, to
136
+ check wheel compatibility, as well as when checking the Python
137
+ version, e.g. the Python version embedded in a link filename
138
+ (or egg fragment) and against an HTML link's optional PEP 503
139
+ "data-requires-python" attribute.
140
+ :param allow_yanked: Whether files marked as yanked (in the sense
141
+ of PEP 592) are permitted to be candidates for install.
142
+ :param ignore_requires_python: Whether to ignore incompatible
143
+ PEP 503 "data-requires-python" values in HTML links. Defaults
144
+ to False.
145
+ """
146
+ if ignore_requires_python is None:
147
+ ignore_requires_python = False
148
+
149
+ self._allow_yanked = allow_yanked
150
+ self._canonical_name = canonical_name
151
+ self._ignore_requires_python = ignore_requires_python
152
+ self._formats = formats
153
+ self._target_python = target_python
154
+
155
+ self.project_name = project_name
156
+
157
+ def evaluate_link(self, link: Link) -> Tuple[LinkType, str]:
158
+ """
159
+ Determine whether a link is a candidate for installation.
160
+
161
+ :return: A tuple (result, detail), where *result* is an enum
162
+ representing whether the evaluation found a candidate, or the reason
163
+ why one is not found. If a candidate is found, *detail* will be the
164
+ candidate's version string; if one is not found, it contains the
165
+ reason the link fails to qualify.
166
+ """
167
+ version = None
168
+ if link.is_yanked and not self._allow_yanked:
169
+ reason = link.yanked_reason or "<none given>"
170
+ return (LinkType.yanked, f"yanked for reason: {reason}")
171
+
172
+ if link.egg_fragment:
173
+ egg_info = link.egg_fragment
174
+ ext = link.ext
175
+ else:
176
+ egg_info, ext = link.splitext()
177
+ if not ext:
178
+ return (LinkType.format_unsupported, "not a file")
179
+ if ext not in SUPPORTED_EXTENSIONS:
180
+ return (
181
+ LinkType.format_unsupported,
182
+ f"unsupported archive format: {ext}",
183
+ )
184
+ if "binary" not in self._formats and ext == WHEEL_EXTENSION:
185
+ reason = f"No binaries permitted for {self.project_name}"
186
+ return (LinkType.format_unsupported, reason)
187
+ if "macosx10" in link.path and ext == ".zip":
188
+ return (LinkType.format_unsupported, "macosx10 one")
189
+ if ext == WHEEL_EXTENSION:
190
+ try:
191
+ wheel = Wheel(link.filename)
192
+ except InvalidWheelFilename:
193
+ return (
194
+ LinkType.format_invalid,
195
+ "invalid wheel filename",
196
+ )
197
+ if canonicalize_name(wheel.name) != self._canonical_name:
198
+ reason = f"wrong project name (not {self.project_name})"
199
+ return (LinkType.different_project, reason)
200
+
201
+ supported_tags = self._target_python.get_unsorted_tags()
202
+ if not wheel.supported(supported_tags):
203
+ # Include the wheel's tags in the reason string to
204
+ # simplify troubleshooting compatibility issues.
205
+ file_tags = ", ".join(wheel.get_formatted_file_tags())
206
+ reason = (
207
+ f"none of the wheel's tags ({file_tags}) are compatible "
208
+ f"(run pip debug --verbose to show compatible tags)"
209
+ )
210
+ return (LinkType.platform_mismatch, reason)
211
+
212
+ version = wheel.version
213
+
214
+ # This should be up by the self.ok_binary check, but see issue 2700.
215
+ if "source" not in self._formats and ext != WHEEL_EXTENSION:
216
+ reason = f"No sources permitted for {self.project_name}"
217
+ return (LinkType.format_unsupported, reason)
218
+
219
+ if not version:
220
+ version = _extract_version_from_fragment(
221
+ egg_info,
222
+ self._canonical_name,
223
+ )
224
+ if not version:
225
+ reason = f"Missing project version for {self.project_name}"
226
+ return (LinkType.format_invalid, reason)
227
+
228
+ match = self._py_version_re.search(version)
229
+ if match:
230
+ version = version[: match.start()]
231
+ py_version = match.group(1)
232
+ if py_version != self._target_python.py_version:
233
+ return (
234
+ LinkType.platform_mismatch,
235
+ "Python version is incorrect",
236
+ )
237
+
238
+ supports_python = _check_link_requires_python(
239
+ link,
240
+ version_info=self._target_python.py_version_info,
241
+ ignore_requires_python=self._ignore_requires_python,
242
+ )
243
+ if not supports_python:
244
+ reason = f"{version} Requires-Python {link.requires_python}"
245
+ return (LinkType.requires_python_mismatch, reason)
246
+
247
+ logger.debug("Found link %s, version: %s", link, version)
248
+
249
+ return (LinkType.candidate, version)
250
+
251
+
252
+ def filter_unallowed_hashes(
253
+ candidates: List[InstallationCandidate],
254
+ hashes: Optional[Hashes],
255
+ project_name: str,
256
+ ) -> List[InstallationCandidate]:
257
+ """
258
+ Filter out candidates whose hashes aren't allowed, and return a new
259
+ list of candidates.
260
+
261
+ If at least one candidate has an allowed hash, then all candidates with
262
+ either an allowed hash or no hash specified are returned. Otherwise,
263
+ the given candidates are returned.
264
+
265
+ Including the candidates with no hash specified when there is a match
266
+ allows a warning to be logged if there is a more preferred candidate
267
+ with no hash specified. Returning all candidates in the case of no
268
+ matches lets pip report the hash of the candidate that would otherwise
269
+ have been installed (e.g. permitting the user to more easily update
270
+ their requirements file with the desired hash).
271
+ """
272
+ if not hashes:
273
+ logger.debug(
274
+ "Given no hashes to check %s links for project %r: "
275
+ "discarding no candidates",
276
+ len(candidates),
277
+ project_name,
278
+ )
279
+ # Make sure we're not returning back the given value.
280
+ return list(candidates)
281
+
282
+ matches_or_no_digest = []
283
+ # Collect the non-matches for logging purposes.
284
+ non_matches = []
285
+ match_count = 0
286
+ for candidate in candidates:
287
+ link = candidate.link
288
+ if not link.has_hash:
289
+ pass
290
+ elif link.is_hash_allowed(hashes=hashes):
291
+ match_count += 1
292
+ else:
293
+ non_matches.append(candidate)
294
+ continue
295
+
296
+ matches_or_no_digest.append(candidate)
297
+
298
+ if match_count:
299
+ filtered = matches_or_no_digest
300
+ else:
301
+ # Make sure we're not returning back the given value.
302
+ filtered = list(candidates)
303
+
304
+ if len(filtered) == len(candidates):
305
+ discard_message = "discarding no candidates"
306
+ else:
307
+ discard_message = "discarding {} non-matches:\n {}".format(
308
+ len(non_matches),
309
+ "\n ".join(str(candidate.link) for candidate in non_matches),
310
+ )
311
+
312
+ logger.debug(
313
+ "Checked %s links for project %r against %s hashes "
314
+ "(%s matches, %s no digest): %s",
315
+ len(candidates),
316
+ project_name,
317
+ hashes.digest_count,
318
+ match_count,
319
+ len(matches_or_no_digest) - match_count,
320
+ discard_message,
321
+ )
322
+
323
+ return filtered
324
+
325
+
326
+ @dataclass
327
+ class CandidatePreferences:
328
+ """
329
+ Encapsulates some of the preferences for filtering and sorting
330
+ InstallationCandidate objects.
331
+ """
332
+
333
+ prefer_binary: bool = False
334
+ allow_all_prereleases: bool = False
335
+
336
+
337
+ @dataclass(frozen=True)
338
+ class BestCandidateResult:
339
+ """A collection of candidates, returned by `PackageFinder.find_best_candidate`.
340
+
341
+ This class is only intended to be instantiated by CandidateEvaluator's
342
+ `compute_best_candidate()` method.
343
+
344
+ :param all_candidates: A sequence of all available candidates found.
345
+ :param applicable_candidates: The applicable candidates.
346
+ :param best_candidate: The most preferred candidate found, or None
347
+ if no applicable candidates were found.
348
+ """
349
+
350
+ all_candidates: List[InstallationCandidate]
351
+ applicable_candidates: List[InstallationCandidate]
352
+ best_candidate: Optional[InstallationCandidate]
353
+
354
+ def __post_init__(self) -> None:
355
+ assert set(self.applicable_candidates) <= set(self.all_candidates)
356
+
357
+ if self.best_candidate is None:
358
+ assert not self.applicable_candidates
359
+ else:
360
+ assert self.best_candidate in self.applicable_candidates
361
+
362
+
363
+ class CandidateEvaluator:
364
+ """
365
+ Responsible for filtering and sorting candidates for installation based
366
+ on what tags are valid.
367
+ """
368
+
369
+ @classmethod
370
+ def create(
371
+ cls,
372
+ project_name: str,
373
+ target_python: Optional[TargetPython] = None,
374
+ prefer_binary: bool = False,
375
+ allow_all_prereleases: bool = False,
376
+ specifier: Optional[specifiers.BaseSpecifier] = None,
377
+ hashes: Optional[Hashes] = None,
378
+ ) -> "CandidateEvaluator":
379
+ """Create a CandidateEvaluator object.
380
+
381
+ :param target_python: The target Python interpreter to use when
382
+ checking compatibility. If None (the default), a TargetPython
383
+ object will be constructed from the running Python.
384
+ :param specifier: An optional object implementing `filter`
385
+ (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
386
+ versions.
387
+ :param hashes: An optional collection of allowed hashes.
388
+ """
389
+ if target_python is None:
390
+ target_python = TargetPython()
391
+ if specifier is None:
392
+ specifier = specifiers.SpecifierSet()
393
+
394
+ supported_tags = target_python.get_sorted_tags()
395
+
396
+ return cls(
397
+ project_name=project_name,
398
+ supported_tags=supported_tags,
399
+ specifier=specifier,
400
+ prefer_binary=prefer_binary,
401
+ allow_all_prereleases=allow_all_prereleases,
402
+ hashes=hashes,
403
+ )
404
+
405
+ def __init__(
406
+ self,
407
+ project_name: str,
408
+ supported_tags: List[Tag],
409
+ specifier: specifiers.BaseSpecifier,
410
+ prefer_binary: bool = False,
411
+ allow_all_prereleases: bool = False,
412
+ hashes: Optional[Hashes] = None,
413
+ ) -> None:
414
+ """
415
+ :param supported_tags: The PEP 425 tags supported by the target
416
+ Python in order of preference (most preferred first).
417
+ """
418
+ self._allow_all_prereleases = allow_all_prereleases
419
+ self._hashes = hashes
420
+ self._prefer_binary = prefer_binary
421
+ self._project_name = project_name
422
+ self._specifier = specifier
423
+ self._supported_tags = supported_tags
424
+ # Since the index of the tag in the _supported_tags list is used
425
+ # as a priority, precompute a map from tag to index/priority to be
426
+ # used in wheel.find_most_preferred_tag.
427
+ self._wheel_tag_preferences = {
428
+ tag: idx for idx, tag in enumerate(supported_tags)
429
+ }
430
+
431
+ def get_applicable_candidates(
432
+ self,
433
+ candidates: List[InstallationCandidate],
434
+ ) -> List[InstallationCandidate]:
435
+ """
436
+ Return the applicable candidates from a list of candidates.
437
+ """
438
+ # Using None infers from the specifier instead.
439
+ allow_prereleases = self._allow_all_prereleases or None
440
+ specifier = self._specifier
441
+
442
+ # We turn the version object into a str here because otherwise
443
+ # when we're debundled but setuptools isn't, Python will see
444
+ # packaging.version.Version and
445
+ # pkg_resources._vendor.packaging.version.Version as different
446
+ # types. This way we'll use a str as a common data interchange
447
+ # format. If we stop using the pkg_resources provided specifier
448
+ # and start using our own, we can drop the cast to str().
449
+ candidates_and_versions = [(c, str(c.version)) for c in candidates]
450
+ versions = set(
451
+ specifier.filter(
452
+ (v for _, v in candidates_and_versions),
453
+ prereleases=allow_prereleases,
454
+ )
455
+ )
456
+
457
+ applicable_candidates = [c for c, v in candidates_and_versions if v in versions]
458
+ filtered_applicable_candidates = filter_unallowed_hashes(
459
+ candidates=applicable_candidates,
460
+ hashes=self._hashes,
461
+ project_name=self._project_name,
462
+ )
463
+
464
+ return sorted(filtered_applicable_candidates, key=self._sort_key)
465
+
466
+ def _sort_key(self, candidate: InstallationCandidate) -> CandidateSortingKey:
467
+ """
468
+ Function to pass as the `key` argument to a call to sorted() to sort
469
+ InstallationCandidates by preference.
470
+
471
+ Returns a tuple such that tuples sorting as greater using Python's
472
+ default comparison operator are more preferred.
473
+
474
+ The preference is as follows:
475
+
476
+ First and foremost, candidates with allowed (matching) hashes are
477
+ always preferred over candidates without matching hashes. This is
478
+ because e.g. if the only candidate with an allowed hash is yanked,
479
+ we still want to use that candidate.
480
+
481
+ Second, excepting hash considerations, candidates that have been
482
+ yanked (in the sense of PEP 592) are always less preferred than
483
+ candidates that haven't been yanked. Then:
484
+
485
+ If not finding wheels, they are sorted by version only.
486
+ If finding wheels, then the sort order is by version, then:
487
+ 1. existing installs
488
+ 2. wheels ordered via Wheel.support_index_min(self._supported_tags)
489
+ 3. source archives
490
+ If prefer_binary was set, then all wheels are sorted above sources.
491
+
492
+ Note: it was considered to embed this logic into the Link
493
+ comparison operators, but then different sdist links
494
+ with the same version, would have to be considered equal
495
+ """
496
+ valid_tags = self._supported_tags
497
+ support_num = len(valid_tags)
498
+ build_tag: BuildTag = ()
499
+ binary_preference = 0
500
+ link = candidate.link
501
+ if link.is_wheel:
502
+ # can raise InvalidWheelFilename
503
+ wheel = Wheel(link.filename)
504
+ try:
505
+ pri = -(
506
+ wheel.find_most_preferred_tag(
507
+ valid_tags, self._wheel_tag_preferences
508
+ )
509
+ )
510
+ except ValueError:
511
+ raise UnsupportedWheel(
512
+ f"{wheel.filename} is not a supported wheel for this platform. It "
513
+ "can't be sorted."
514
+ )
515
+ if self._prefer_binary:
516
+ binary_preference = 1
517
+ if wheel.build_tag is not None:
518
+ match = re.match(r"^(\d+)(.*)$", wheel.build_tag)
519
+ assert match is not None, "guaranteed by filename validation"
520
+ build_tag_groups = match.groups()
521
+ build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
522
+ else: # sdist
523
+ pri = -(support_num)
524
+ has_allowed_hash = int(link.is_hash_allowed(self._hashes))
525
+ yank_value = -1 * int(link.is_yanked) # -1 for yanked.
526
+ return (
527
+ has_allowed_hash,
528
+ yank_value,
529
+ binary_preference,
530
+ candidate.version,
531
+ pri,
532
+ build_tag,
533
+ )
534
+
535
+ def sort_best_candidate(
536
+ self,
537
+ candidates: List[InstallationCandidate],
538
+ ) -> Optional[InstallationCandidate]:
539
+ """
540
+ Return the best candidate per the instance's sort order, or None if
541
+ no candidate is acceptable.
542
+ """
543
+ if not candidates:
544
+ return None
545
+ best_candidate = max(candidates, key=self._sort_key)
546
+ return best_candidate
547
+
548
+ def compute_best_candidate(
549
+ self,
550
+ candidates: List[InstallationCandidate],
551
+ ) -> BestCandidateResult:
552
+ """
553
+ Compute and return a `BestCandidateResult` instance.
554
+ """
555
+ applicable_candidates = self.get_applicable_candidates(candidates)
556
+
557
+ best_candidate = self.sort_best_candidate(applicable_candidates)
558
+
559
+ return BestCandidateResult(
560
+ candidates,
561
+ applicable_candidates=applicable_candidates,
562
+ best_candidate=best_candidate,
563
+ )
564
+
565
+
566
+ class PackageFinder:
567
+ """This finds packages.
568
+
569
+ This is meant to match easy_install's technique for looking for
570
+ packages, by reading pages and looking for appropriate links.
571
+ """
572
+
573
+ def __init__(
574
+ self,
575
+ link_collector: LinkCollector,
576
+ target_python: TargetPython,
577
+ allow_yanked: bool,
578
+ format_control: Optional[FormatControl] = None,
579
+ candidate_prefs: Optional[CandidatePreferences] = None,
580
+ ignore_requires_python: Optional[bool] = None,
581
+ ) -> None:
582
+ """
583
+ This constructor is primarily meant to be used by the create() class
584
+ method and from tests.
585
+
586
+ :param format_control: A FormatControl object, used to control
587
+ the selection of source packages / binary packages when consulting
588
+ the index and links.
589
+ :param candidate_prefs: Options to use when creating a
590
+ CandidateEvaluator object.
591
+ """
592
+ if candidate_prefs is None:
593
+ candidate_prefs = CandidatePreferences()
594
+
595
+ format_control = format_control or FormatControl(set(), set())
596
+
597
+ self._allow_yanked = allow_yanked
598
+ self._candidate_prefs = candidate_prefs
599
+ self._ignore_requires_python = ignore_requires_python
600
+ self._link_collector = link_collector
601
+ self._target_python = target_python
602
+
603
+ self.format_control = format_control
604
+
605
+ # These are boring links that have already been logged somehow.
606
+ self._logged_links: Set[Tuple[Link, LinkType, str]] = set()
607
+
608
+ # Don't include an allow_yanked default value to make sure each call
609
+ # site considers whether yanked releases are allowed. This also causes
610
+ # that decision to be made explicit in the calling code, which helps
611
+ # people when reading the code.
612
+ @classmethod
613
+ def create(
614
+ cls,
615
+ link_collector: LinkCollector,
616
+ selection_prefs: SelectionPreferences,
617
+ target_python: Optional[TargetPython] = None,
618
+ ) -> "PackageFinder":
619
+ """Create a PackageFinder.
620
+
621
+ :param selection_prefs: The candidate selection preferences, as a
622
+ SelectionPreferences object.
623
+ :param target_python: The target Python interpreter to use when
624
+ checking compatibility. If None (the default), a TargetPython
625
+ object will be constructed from the running Python.
626
+ """
627
+ if target_python is None:
628
+ target_python = TargetPython()
629
+
630
+ candidate_prefs = CandidatePreferences(
631
+ prefer_binary=selection_prefs.prefer_binary,
632
+ allow_all_prereleases=selection_prefs.allow_all_prereleases,
633
+ )
634
+
635
+ return cls(
636
+ candidate_prefs=candidate_prefs,
637
+ link_collector=link_collector,
638
+ target_python=target_python,
639
+ allow_yanked=selection_prefs.allow_yanked,
640
+ format_control=selection_prefs.format_control,
641
+ ignore_requires_python=selection_prefs.ignore_requires_python,
642
+ )
643
+
644
+ @property
645
+ def target_python(self) -> TargetPython:
646
+ return self._target_python
647
+
648
+ @property
649
+ def search_scope(self) -> SearchScope:
650
+ return self._link_collector.search_scope
651
+
652
+ @search_scope.setter
653
+ def search_scope(self, search_scope: SearchScope) -> None:
654
+ self._link_collector.search_scope = search_scope
655
+
656
+ @property
657
+ def find_links(self) -> List[str]:
658
+ return self._link_collector.find_links
659
+
660
+ @property
661
+ def index_urls(self) -> List[str]:
662
+ return self.search_scope.index_urls
663
+
664
+ @property
665
+ def proxy(self) -> Optional[str]:
666
+ return self._link_collector.session.pip_proxy
667
+
668
+ @property
669
+ def trusted_hosts(self) -> Iterable[str]:
670
+ for host_port in self._link_collector.session.pip_trusted_origins:
671
+ yield build_netloc(*host_port)
672
+
673
+ @property
674
+ def custom_cert(self) -> Optional[str]:
675
+ # session.verify is either a boolean (use default bundle/no SSL
676
+ # verification) or a string path to a custom CA bundle to use. We only
677
+ # care about the latter.
678
+ verify = self._link_collector.session.verify
679
+ return verify if isinstance(verify, str) else None
680
+
681
+ @property
682
+ def client_cert(self) -> Optional[str]:
683
+ cert = self._link_collector.session.cert
684
+ assert not isinstance(cert, tuple), "pip only supports PEM client certs"
685
+ return cert
686
+
687
+ @property
688
+ def allow_all_prereleases(self) -> bool:
689
+ return self._candidate_prefs.allow_all_prereleases
690
+
691
+ def set_allow_all_prereleases(self) -> None:
692
+ self._candidate_prefs.allow_all_prereleases = True
693
+
694
+ @property
695
+ def prefer_binary(self) -> bool:
696
+ return self._candidate_prefs.prefer_binary
697
+
698
+ def set_prefer_binary(self) -> None:
699
+ self._candidate_prefs.prefer_binary = True
700
+
701
+ def requires_python_skipped_reasons(self) -> List[str]:
702
+ reasons = {
703
+ detail
704
+ for _, result, detail in self._logged_links
705
+ if result == LinkType.requires_python_mismatch
706
+ }
707
+ return sorted(reasons)
708
+
709
+ def make_link_evaluator(self, project_name: str) -> LinkEvaluator:
710
+ canonical_name = canonicalize_name(project_name)
711
+ formats = self.format_control.get_allowed_formats(canonical_name)
712
+
713
+ return LinkEvaluator(
714
+ project_name=project_name,
715
+ canonical_name=canonical_name,
716
+ formats=formats,
717
+ target_python=self._target_python,
718
+ allow_yanked=self._allow_yanked,
719
+ ignore_requires_python=self._ignore_requires_python,
720
+ )
721
+
722
+ def _sort_links(self, links: Iterable[Link]) -> List[Link]:
723
+ """
724
+ Returns elements of links in order, non-egg links first, egg links
725
+ second, while eliminating duplicates
726
+ """
727
+ eggs, no_eggs = [], []
728
+ seen: Set[Link] = set()
729
+ for link in links:
730
+ if link not in seen:
731
+ seen.add(link)
732
+ if link.egg_fragment:
733
+ eggs.append(link)
734
+ else:
735
+ no_eggs.append(link)
736
+ return no_eggs + eggs
737
+
738
+ def _log_skipped_link(self, link: Link, result: LinkType, detail: str) -> None:
739
+ # This is a hot method so don't waste time hashing links unless we're
740
+ # actually going to log 'em.
741
+ if not logger.isEnabledFor(logging.DEBUG):
742
+ return
743
+
744
+ entry = (link, result, detail)
745
+ if entry not in self._logged_links:
746
+ # Put the link at the end so the reason is more visible and because
747
+ # the link string is usually very long.
748
+ logger.debug("Skipping link: %s: %s", detail, link)
749
+ self._logged_links.add(entry)
750
+
751
+ def get_install_candidate(
752
+ self, link_evaluator: LinkEvaluator, link: Link
753
+ ) -> Optional[InstallationCandidate]:
754
+ """
755
+ If the link is a candidate for install, convert it to an
756
+ InstallationCandidate and return it. Otherwise, return None.
757
+ """
758
+ result, detail = link_evaluator.evaluate_link(link)
759
+ if result != LinkType.candidate:
760
+ self._log_skipped_link(link, result, detail)
761
+ return None
762
+
763
+ try:
764
+ return InstallationCandidate(
765
+ name=link_evaluator.project_name,
766
+ link=link,
767
+ version=detail,
768
+ )
769
+ except InvalidVersion:
770
+ return None
771
+
772
+ def evaluate_links(
773
+ self, link_evaluator: LinkEvaluator, links: Iterable[Link]
774
+ ) -> List[InstallationCandidate]:
775
+ """
776
+ Convert links that are candidates to InstallationCandidate objects.
777
+ """
778
+ candidates = []
779
+ for link in self._sort_links(links):
780
+ candidate = self.get_install_candidate(link_evaluator, link)
781
+ if candidate is not None:
782
+ candidates.append(candidate)
783
+
784
+ return candidates
785
+
786
+ def process_project_url(
787
+ self, project_url: Link, link_evaluator: LinkEvaluator
788
+ ) -> List[InstallationCandidate]:
789
+ logger.debug(
790
+ "Fetching project page and analyzing links: %s",
791
+ project_url,
792
+ )
793
+ index_response = self._link_collector.fetch_response(project_url)
794
+ if index_response is None:
795
+ return []
796
+
797
+ page_links = list(parse_links(index_response))
798
+
799
+ with indent_log():
800
+ package_links = self.evaluate_links(
801
+ link_evaluator,
802
+ links=page_links,
803
+ )
804
+
805
+ return package_links
806
+
807
+ @functools.lru_cache(maxsize=None)
808
+ def find_all_candidates(self, project_name: str) -> List[InstallationCandidate]:
809
+ """Find all available InstallationCandidate for project_name
810
+
811
+ This checks index_urls and find_links.
812
+ All versions found are returned as an InstallationCandidate list.
813
+
814
+ See LinkEvaluator.evaluate_link() for details on which files
815
+ are accepted.
816
+ """
817
+ link_evaluator = self.make_link_evaluator(project_name)
818
+
819
+ collected_sources = self._link_collector.collect_sources(
820
+ project_name=project_name,
821
+ candidates_from_page=functools.partial(
822
+ self.process_project_url,
823
+ link_evaluator=link_evaluator,
824
+ ),
825
+ )
826
+
827
+ page_candidates_it = itertools.chain.from_iterable(
828
+ source.page_candidates()
829
+ for sources in collected_sources
830
+ for source in sources
831
+ if source is not None
832
+ )
833
+ page_candidates = list(page_candidates_it)
834
+
835
+ file_links_it = itertools.chain.from_iterable(
836
+ source.file_links()
837
+ for sources in collected_sources
838
+ for source in sources
839
+ if source is not None
840
+ )
841
+ file_candidates = self.evaluate_links(
842
+ link_evaluator,
843
+ sorted(file_links_it, reverse=True),
844
+ )
845
+
846
+ if logger.isEnabledFor(logging.DEBUG) and file_candidates:
847
+ paths = []
848
+ for candidate in file_candidates:
849
+ assert candidate.link.url # we need to have a URL
850
+ try:
851
+ paths.append(candidate.link.file_path)
852
+ except Exception:
853
+ paths.append(candidate.link.url) # it's not a local file
854
+
855
+ logger.debug("Local files found: %s", ", ".join(paths))
856
+
857
+ # This is an intentional priority ordering
858
+ return file_candidates + page_candidates
859
+
860
+ def make_candidate_evaluator(
861
+ self,
862
+ project_name: str,
863
+ specifier: Optional[specifiers.BaseSpecifier] = None,
864
+ hashes: Optional[Hashes] = None,
865
+ ) -> CandidateEvaluator:
866
+ """Create a CandidateEvaluator object to use."""
867
+ candidate_prefs = self._candidate_prefs
868
+ return CandidateEvaluator.create(
869
+ project_name=project_name,
870
+ target_python=self._target_python,
871
+ prefer_binary=candidate_prefs.prefer_binary,
872
+ allow_all_prereleases=candidate_prefs.allow_all_prereleases,
873
+ specifier=specifier,
874
+ hashes=hashes,
875
+ )
876
+
877
+ @functools.lru_cache(maxsize=None)
878
+ def find_best_candidate(
879
+ self,
880
+ project_name: str,
881
+ specifier: Optional[specifiers.BaseSpecifier] = None,
882
+ hashes: Optional[Hashes] = None,
883
+ ) -> BestCandidateResult:
884
+ """Find matches for the given project and specifier.
885
+
886
+ :param specifier: An optional object implementing `filter`
887
+ (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
888
+ versions.
889
+
890
+ :return: A `BestCandidateResult` instance.
891
+ """
892
+ candidates = self.find_all_candidates(project_name)
893
+ candidate_evaluator = self.make_candidate_evaluator(
894
+ project_name=project_name,
895
+ specifier=specifier,
896
+ hashes=hashes,
897
+ )
898
+ return candidate_evaluator.compute_best_candidate(candidates)
899
+
900
+ def find_requirement(
901
+ self, req: InstallRequirement, upgrade: bool
902
+ ) -> Optional[InstallationCandidate]:
903
+ """Try to find a Link matching req
904
+
905
+ Expects req, an InstallRequirement and upgrade, a boolean
906
+ Returns a InstallationCandidate if found,
907
+ Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
908
+ """
909
+ hashes = req.hashes(trust_internet=False)
910
+ best_candidate_result = self.find_best_candidate(
911
+ req.name,
912
+ specifier=req.specifier,
913
+ hashes=hashes,
914
+ )
915
+ best_candidate = best_candidate_result.best_candidate
916
+
917
+ installed_version: Optional[_BaseVersion] = None
918
+ if req.satisfied_by is not None:
919
+ installed_version = req.satisfied_by.version
920
+
921
+ def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str:
922
+ # This repeated parse_version and str() conversion is needed to
923
+ # handle different vendoring sources from pip and pkg_resources.
924
+ # If we stop using the pkg_resources provided specifier and start
925
+ # using our own, we can drop the cast to str().
926
+ return (
927
+ ", ".join(
928
+ sorted(
929
+ {str(c.version) for c in cand_iter},
930
+ key=parse_version,
931
+ )
932
+ )
933
+ or "none"
934
+ )
935
+
936
+ if installed_version is None and best_candidate is None:
937
+ logger.critical(
938
+ "Could not find a version that satisfies the requirement %s "
939
+ "(from versions: %s)",
940
+ req,
941
+ _format_versions(best_candidate_result.all_candidates),
942
+ )
943
+
944
+ raise DistributionNotFound(f"No matching distribution found for {req}")
945
+
946
+ def _should_install_candidate(
947
+ candidate: Optional[InstallationCandidate],
948
+ ) -> "TypeGuard[InstallationCandidate]":
949
+ if installed_version is None:
950
+ return True
951
+ if best_candidate is None:
952
+ return False
953
+ return best_candidate.version > installed_version
954
+
955
+ if not upgrade and installed_version is not None:
956
+ if _should_install_candidate(best_candidate):
957
+ logger.debug(
958
+ "Existing installed version (%s) satisfies requirement "
959
+ "(most up-to-date version is %s)",
960
+ installed_version,
961
+ best_candidate.version,
962
+ )
963
+ else:
964
+ logger.debug(
965
+ "Existing installed version (%s) is most up-to-date and "
966
+ "satisfies requirement",
967
+ installed_version,
968
+ )
969
+ return None
970
+
971
+ if _should_install_candidate(best_candidate):
972
+ logger.debug(
973
+ "Using version %s (newest of versions: %s)",
974
+ best_candidate.version,
975
+ _format_versions(best_candidate_result.applicable_candidates),
976
+ )
977
+ return best_candidate
978
+
979
+ # We have an existing version, and its the best version
980
+ logger.debug(
981
+ "Installed version (%s) is most up-to-date (past versions: %s)",
982
+ installed_version,
983
+ _format_versions(best_candidate_result.applicable_candidates),
984
+ )
985
+ raise BestVersionAlreadyInstalled
986
+
987
+
988
+ def _find_name_version_sep(fragment: str, canonical_name: str) -> int:
989
+ """Find the separator's index based on the package's canonical name.
990
+
991
+ :param fragment: A <package>+<version> filename "fragment" (stem) or
992
+ egg fragment.
993
+ :param canonical_name: The package's canonical name.
994
+
995
+ This function is needed since the canonicalized name does not necessarily
996
+ have the same length as the egg info's name part. An example::
997
+
998
+ >>> fragment = 'foo__bar-1.0'
999
+ >>> canonical_name = 'foo-bar'
1000
+ >>> _find_name_version_sep(fragment, canonical_name)
1001
+ 8
1002
+ """
1003
+ # Project name and version must be separated by one single dash. Find all
1004
+ # occurrences of dashes; if the string in front of it matches the canonical
1005
+ # name, this is the one separating the name and version parts.
1006
+ for i, c in enumerate(fragment):
1007
+ if c != "-":
1008
+ continue
1009
+ if canonicalize_name(fragment[:i]) == canonical_name:
1010
+ return i
1011
+ raise ValueError(f"{fragment} does not match {canonical_name}")
1012
+
1013
+
1014
+ def _extract_version_from_fragment(fragment: str, canonical_name: str) -> Optional[str]:
1015
+ """Parse the version string from a <package>+<version> filename
1016
+ "fragment" (stem) or egg fragment.
1017
+
1018
+ :param fragment: The string to parse. E.g. foo-2.1
1019
+ :param canonical_name: The canonicalized name of the package this
1020
+ belongs to.
1021
+ """
1022
+ try:
1023
+ version_start = _find_name_version_sep(fragment, canonical_name) + 1
1024
+ except ValueError:
1025
+ return None
1026
+ version = fragment[version_start:]
1027
+ if not version:
1028
+ return None
1029
+ return version
llava/lib/python3.10/site-packages/pip/_internal/index/sources.py ADDED
@@ -0,0 +1,284 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import mimetypes
3
+ import os
4
+ from collections import defaultdict
5
+ from typing import Callable, Dict, Iterable, List, Optional, Tuple
6
+
7
+ from pip._vendor.packaging.utils import (
8
+ InvalidSdistFilename,
9
+ InvalidWheelFilename,
10
+ canonicalize_name,
11
+ parse_sdist_filename,
12
+ parse_wheel_filename,
13
+ )
14
+
15
+ from pip._internal.models.candidate import InstallationCandidate
16
+ from pip._internal.models.link import Link
17
+ from pip._internal.utils.urls import path_to_url, url_to_path
18
+ from pip._internal.vcs import is_url
19
+
20
+ logger = logging.getLogger(__name__)
21
+
22
+ FoundCandidates = Iterable[InstallationCandidate]
23
+ FoundLinks = Iterable[Link]
24
+ CandidatesFromPage = Callable[[Link], Iterable[InstallationCandidate]]
25
+ PageValidator = Callable[[Link], bool]
26
+
27
+
28
+ class LinkSource:
29
+ @property
30
+ def link(self) -> Optional[Link]:
31
+ """Returns the underlying link, if there's one."""
32
+ raise NotImplementedError()
33
+
34
+ def page_candidates(self) -> FoundCandidates:
35
+ """Candidates found by parsing an archive listing HTML file."""
36
+ raise NotImplementedError()
37
+
38
+ def file_links(self) -> FoundLinks:
39
+ """Links found by specifying archives directly."""
40
+ raise NotImplementedError()
41
+
42
+
43
+ def _is_html_file(file_url: str) -> bool:
44
+ return mimetypes.guess_type(file_url, strict=False)[0] == "text/html"
45
+
46
+
47
+ class _FlatDirectoryToUrls:
48
+ """Scans directory and caches results"""
49
+
50
+ def __init__(self, path: str) -> None:
51
+ self._path = path
52
+ self._page_candidates: List[str] = []
53
+ self._project_name_to_urls: Dict[str, List[str]] = defaultdict(list)
54
+ self._scanned_directory = False
55
+
56
+ def _scan_directory(self) -> None:
57
+ """Scans directory once and populates both page_candidates
58
+ and project_name_to_urls at the same time
59
+ """
60
+ for entry in os.scandir(self._path):
61
+ url = path_to_url(entry.path)
62
+ if _is_html_file(url):
63
+ self._page_candidates.append(url)
64
+ continue
65
+
66
+ # File must have a valid wheel or sdist name,
67
+ # otherwise not worth considering as a package
68
+ try:
69
+ project_filename = parse_wheel_filename(entry.name)[0]
70
+ except InvalidWheelFilename:
71
+ try:
72
+ project_filename = parse_sdist_filename(entry.name)[0]
73
+ except InvalidSdistFilename:
74
+ continue
75
+
76
+ self._project_name_to_urls[project_filename].append(url)
77
+ self._scanned_directory = True
78
+
79
+ @property
80
+ def page_candidates(self) -> List[str]:
81
+ if not self._scanned_directory:
82
+ self._scan_directory()
83
+
84
+ return self._page_candidates
85
+
86
+ @property
87
+ def project_name_to_urls(self) -> Dict[str, List[str]]:
88
+ if not self._scanned_directory:
89
+ self._scan_directory()
90
+
91
+ return self._project_name_to_urls
92
+
93
+
94
+ class _FlatDirectorySource(LinkSource):
95
+ """Link source specified by ``--find-links=<path-to-dir>``.
96
+
97
+ This looks the content of the directory, and returns:
98
+
99
+ * ``page_candidates``: Links listed on each HTML file in the directory.
100
+ * ``file_candidates``: Archives in the directory.
101
+ """
102
+
103
+ _paths_to_urls: Dict[str, _FlatDirectoryToUrls] = {}
104
+
105
+ def __init__(
106
+ self,
107
+ candidates_from_page: CandidatesFromPage,
108
+ path: str,
109
+ project_name: str,
110
+ ) -> None:
111
+ self._candidates_from_page = candidates_from_page
112
+ self._project_name = canonicalize_name(project_name)
113
+
114
+ # Get existing instance of _FlatDirectoryToUrls if it exists
115
+ if path in self._paths_to_urls:
116
+ self._path_to_urls = self._paths_to_urls[path]
117
+ else:
118
+ self._path_to_urls = _FlatDirectoryToUrls(path=path)
119
+ self._paths_to_urls[path] = self._path_to_urls
120
+
121
+ @property
122
+ def link(self) -> Optional[Link]:
123
+ return None
124
+
125
+ def page_candidates(self) -> FoundCandidates:
126
+ for url in self._path_to_urls.page_candidates:
127
+ yield from self._candidates_from_page(Link(url))
128
+
129
+ def file_links(self) -> FoundLinks:
130
+ for url in self._path_to_urls.project_name_to_urls[self._project_name]:
131
+ yield Link(url)
132
+
133
+
134
+ class _LocalFileSource(LinkSource):
135
+ """``--find-links=<path-or-url>`` or ``--[extra-]index-url=<path-or-url>``.
136
+
137
+ If a URL is supplied, it must be a ``file:`` URL. If a path is supplied to
138
+ the option, it is converted to a URL first. This returns:
139
+
140
+ * ``page_candidates``: Links listed on an HTML file.
141
+ * ``file_candidates``: The non-HTML file.
142
+ """
143
+
144
+ def __init__(
145
+ self,
146
+ candidates_from_page: CandidatesFromPage,
147
+ link: Link,
148
+ ) -> None:
149
+ self._candidates_from_page = candidates_from_page
150
+ self._link = link
151
+
152
+ @property
153
+ def link(self) -> Optional[Link]:
154
+ return self._link
155
+
156
+ def page_candidates(self) -> FoundCandidates:
157
+ if not _is_html_file(self._link.url):
158
+ return
159
+ yield from self._candidates_from_page(self._link)
160
+
161
+ def file_links(self) -> FoundLinks:
162
+ if _is_html_file(self._link.url):
163
+ return
164
+ yield self._link
165
+
166
+
167
+ class _RemoteFileSource(LinkSource):
168
+ """``--find-links=<url>`` or ``--[extra-]index-url=<url>``.
169
+
170
+ This returns:
171
+
172
+ * ``page_candidates``: Links listed on an HTML file.
173
+ * ``file_candidates``: The non-HTML file.
174
+ """
175
+
176
+ def __init__(
177
+ self,
178
+ candidates_from_page: CandidatesFromPage,
179
+ page_validator: PageValidator,
180
+ link: Link,
181
+ ) -> None:
182
+ self._candidates_from_page = candidates_from_page
183
+ self._page_validator = page_validator
184
+ self._link = link
185
+
186
+ @property
187
+ def link(self) -> Optional[Link]:
188
+ return self._link
189
+
190
+ def page_candidates(self) -> FoundCandidates:
191
+ if not self._page_validator(self._link):
192
+ return
193
+ yield from self._candidates_from_page(self._link)
194
+
195
+ def file_links(self) -> FoundLinks:
196
+ yield self._link
197
+
198
+
199
+ class _IndexDirectorySource(LinkSource):
200
+ """``--[extra-]index-url=<path-to-directory>``.
201
+
202
+ This is treated like a remote URL; ``candidates_from_page`` contains logic
203
+ for this by appending ``index.html`` to the link.
204
+ """
205
+
206
+ def __init__(
207
+ self,
208
+ candidates_from_page: CandidatesFromPage,
209
+ link: Link,
210
+ ) -> None:
211
+ self._candidates_from_page = candidates_from_page
212
+ self._link = link
213
+
214
+ @property
215
+ def link(self) -> Optional[Link]:
216
+ return self._link
217
+
218
+ def page_candidates(self) -> FoundCandidates:
219
+ yield from self._candidates_from_page(self._link)
220
+
221
+ def file_links(self) -> FoundLinks:
222
+ return ()
223
+
224
+
225
+ def build_source(
226
+ location: str,
227
+ *,
228
+ candidates_from_page: CandidatesFromPage,
229
+ page_validator: PageValidator,
230
+ expand_dir: bool,
231
+ cache_link_parsing: bool,
232
+ project_name: str,
233
+ ) -> Tuple[Optional[str], Optional[LinkSource]]:
234
+ path: Optional[str] = None
235
+ url: Optional[str] = None
236
+ if os.path.exists(location): # Is a local path.
237
+ url = path_to_url(location)
238
+ path = location
239
+ elif location.startswith("file:"): # A file: URL.
240
+ url = location
241
+ path = url_to_path(location)
242
+ elif is_url(location):
243
+ url = location
244
+
245
+ if url is None:
246
+ msg = (
247
+ "Location '%s' is ignored: "
248
+ "it is either a non-existing path or lacks a specific scheme."
249
+ )
250
+ logger.warning(msg, location)
251
+ return (None, None)
252
+
253
+ if path is None:
254
+ source: LinkSource = _RemoteFileSource(
255
+ candidates_from_page=candidates_from_page,
256
+ page_validator=page_validator,
257
+ link=Link(url, cache_link_parsing=cache_link_parsing),
258
+ )
259
+ return (url, source)
260
+
261
+ if os.path.isdir(path):
262
+ if expand_dir:
263
+ source = _FlatDirectorySource(
264
+ candidates_from_page=candidates_from_page,
265
+ path=path,
266
+ project_name=project_name,
267
+ )
268
+ else:
269
+ source = _IndexDirectorySource(
270
+ candidates_from_page=candidates_from_page,
271
+ link=Link(url, cache_link_parsing=cache_link_parsing),
272
+ )
273
+ return (url, source)
274
+ elif os.path.isfile(path):
275
+ source = _LocalFileSource(
276
+ candidates_from_page=candidates_from_page,
277
+ link=Link(url, cache_link_parsing=cache_link_parsing),
278
+ )
279
+ return (url, source)
280
+ logger.warning(
281
+ "Location '%s' is ignored: it is neither a file nor a directory.",
282
+ location,
283
+ )
284
+ return (url, None)
llava/lib/python3.10/site-packages/pip/_internal/network/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (227 Bytes). View file
 
llava/lib/python3.10/site-packages/pip/_internal/network/__pycache__/auth.cpython-310.pyc ADDED
Binary file (14.5 kB). View file
 
llava/lib/python3.10/site-packages/pip/_internal/network/__pycache__/cache.cpython-310.pyc ADDED
Binary file (4.78 kB). View file
 
llava/lib/python3.10/site-packages/pip/_internal/network/__pycache__/download.cpython-310.pyc ADDED
Binary file (5.47 kB). View file
 
llava/lib/python3.10/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-310.pyc ADDED
Binary file (8.4 kB). View file
 
llava/lib/python3.10/site-packages/pip/_internal/network/__pycache__/session.cpython-310.pyc ADDED
Binary file (12.6 kB). View file
 
llava/lib/python3.10/site-packages/pip/_internal/network/__pycache__/utils.cpython-310.pyc ADDED
Binary file (1.43 kB). View file
 
llava/lib/python3.10/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-310.pyc ADDED
Binary file (2.08 kB). View file
 
llava/lib/python3.10/site-packages/pip/_internal/network/auth.py ADDED
@@ -0,0 +1,566 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Network Authentication Helpers
2
+
3
+ Contains interface (MultiDomainBasicAuth) and associated glue code for
4
+ providing credentials in the context of network requests.
5
+ """
6
+
7
+ import logging
8
+ import os
9
+ import shutil
10
+ import subprocess
11
+ import sysconfig
12
+ import typing
13
+ import urllib.parse
14
+ from abc import ABC, abstractmethod
15
+ from functools import lru_cache
16
+ from os.path import commonprefix
17
+ from pathlib import Path
18
+ from typing import Any, Dict, List, NamedTuple, Optional, Tuple
19
+
20
+ from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
21
+ from pip._vendor.requests.models import Request, Response
22
+ from pip._vendor.requests.utils import get_netrc_auth
23
+
24
+ from pip._internal.utils.logging import getLogger
25
+ from pip._internal.utils.misc import (
26
+ ask,
27
+ ask_input,
28
+ ask_password,
29
+ remove_auth_from_url,
30
+ split_auth_netloc_from_url,
31
+ )
32
+ from pip._internal.vcs.versioncontrol import AuthInfo
33
+
34
+ logger = getLogger(__name__)
35
+
36
+ KEYRING_DISABLED = False
37
+
38
+
39
+ class Credentials(NamedTuple):
40
+ url: str
41
+ username: str
42
+ password: str
43
+
44
+
45
+ class KeyRingBaseProvider(ABC):
46
+ """Keyring base provider interface"""
47
+
48
+ has_keyring: bool
49
+
50
+ @abstractmethod
51
+ def get_auth_info(
52
+ self, url: str, username: Optional[str]
53
+ ) -> Optional[AuthInfo]: ...
54
+
55
+ @abstractmethod
56
+ def save_auth_info(self, url: str, username: str, password: str) -> None: ...
57
+
58
+
59
+ class KeyRingNullProvider(KeyRingBaseProvider):
60
+ """Keyring null provider"""
61
+
62
+ has_keyring = False
63
+
64
+ def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
65
+ return None
66
+
67
+ def save_auth_info(self, url: str, username: str, password: str) -> None:
68
+ return None
69
+
70
+
71
+ class KeyRingPythonProvider(KeyRingBaseProvider):
72
+ """Keyring interface which uses locally imported `keyring`"""
73
+
74
+ has_keyring = True
75
+
76
+ def __init__(self) -> None:
77
+ import keyring
78
+
79
+ self.keyring = keyring
80
+
81
+ def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
82
+ # Support keyring's get_credential interface which supports getting
83
+ # credentials without a username. This is only available for
84
+ # keyring>=15.2.0.
85
+ if hasattr(self.keyring, "get_credential"):
86
+ logger.debug("Getting credentials from keyring for %s", url)
87
+ cred = self.keyring.get_credential(url, username)
88
+ if cred is not None:
89
+ return cred.username, cred.password
90
+ return None
91
+
92
+ if username is not None:
93
+ logger.debug("Getting password from keyring for %s", url)
94
+ password = self.keyring.get_password(url, username)
95
+ if password:
96
+ return username, password
97
+ return None
98
+
99
+ def save_auth_info(self, url: str, username: str, password: str) -> None:
100
+ self.keyring.set_password(url, username, password)
101
+
102
+
103
+ class KeyRingCliProvider(KeyRingBaseProvider):
104
+ """Provider which uses `keyring` cli
105
+
106
+ Instead of calling the keyring package installed alongside pip
107
+ we call keyring on the command line which will enable pip to
108
+ use which ever installation of keyring is available first in
109
+ PATH.
110
+ """
111
+
112
+ has_keyring = True
113
+
114
+ def __init__(self, cmd: str) -> None:
115
+ self.keyring = cmd
116
+
117
+ def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
118
+ # This is the default implementation of keyring.get_credential
119
+ # https://github.com/jaraco/keyring/blob/97689324abcf01bd1793d49063e7ca01e03d7d07/keyring/backend.py#L134-L139
120
+ if username is not None:
121
+ password = self._get_password(url, username)
122
+ if password is not None:
123
+ return username, password
124
+ return None
125
+
126
+ def save_auth_info(self, url: str, username: str, password: str) -> None:
127
+ return self._set_password(url, username, password)
128
+
129
+ def _get_password(self, service_name: str, username: str) -> Optional[str]:
130
+ """Mirror the implementation of keyring.get_password using cli"""
131
+ if self.keyring is None:
132
+ return None
133
+
134
+ cmd = [self.keyring, "get", service_name, username]
135
+ env = os.environ.copy()
136
+ env["PYTHONIOENCODING"] = "utf-8"
137
+ res = subprocess.run(
138
+ cmd,
139
+ stdin=subprocess.DEVNULL,
140
+ stdout=subprocess.PIPE,
141
+ env=env,
142
+ )
143
+ if res.returncode:
144
+ return None
145
+ return res.stdout.decode("utf-8").strip(os.linesep)
146
+
147
+ def _set_password(self, service_name: str, username: str, password: str) -> None:
148
+ """Mirror the implementation of keyring.set_password using cli"""
149
+ if self.keyring is None:
150
+ return None
151
+ env = os.environ.copy()
152
+ env["PYTHONIOENCODING"] = "utf-8"
153
+ subprocess.run(
154
+ [self.keyring, "set", service_name, username],
155
+ input=f"{password}{os.linesep}".encode(),
156
+ env=env,
157
+ check=True,
158
+ )
159
+ return None
160
+
161
+
162
+ @lru_cache(maxsize=None)
163
+ def get_keyring_provider(provider: str) -> KeyRingBaseProvider:
164
+ logger.verbose("Keyring provider requested: %s", provider)
165
+
166
+ # keyring has previously failed and been disabled
167
+ if KEYRING_DISABLED:
168
+ provider = "disabled"
169
+ if provider in ["import", "auto"]:
170
+ try:
171
+ impl = KeyRingPythonProvider()
172
+ logger.verbose("Keyring provider set: import")
173
+ return impl
174
+ except ImportError:
175
+ pass
176
+ except Exception as exc:
177
+ # In the event of an unexpected exception
178
+ # we should warn the user
179
+ msg = "Installed copy of keyring fails with exception %s"
180
+ if provider == "auto":
181
+ msg = msg + ", trying to find a keyring executable as a fallback"
182
+ logger.warning(msg, exc, exc_info=logger.isEnabledFor(logging.DEBUG))
183
+ if provider in ["subprocess", "auto"]:
184
+ cli = shutil.which("keyring")
185
+ if cli and cli.startswith(sysconfig.get_path("scripts")):
186
+ # all code within this function is stolen from shutil.which implementation
187
+ @typing.no_type_check
188
+ def PATH_as_shutil_which_determines_it() -> str:
189
+ path = os.environ.get("PATH", None)
190
+ if path is None:
191
+ try:
192
+ path = os.confstr("CS_PATH")
193
+ except (AttributeError, ValueError):
194
+ # os.confstr() or CS_PATH is not available
195
+ path = os.defpath
196
+ # bpo-35755: Don't use os.defpath if the PATH environment variable is
197
+ # set to an empty string
198
+
199
+ return path
200
+
201
+ scripts = Path(sysconfig.get_path("scripts"))
202
+
203
+ paths = []
204
+ for path in PATH_as_shutil_which_determines_it().split(os.pathsep):
205
+ p = Path(path)
206
+ try:
207
+ if not p.samefile(scripts):
208
+ paths.append(path)
209
+ except FileNotFoundError:
210
+ pass
211
+
212
+ path = os.pathsep.join(paths)
213
+
214
+ cli = shutil.which("keyring", path=path)
215
+
216
+ if cli:
217
+ logger.verbose("Keyring provider set: subprocess with executable %s", cli)
218
+ return KeyRingCliProvider(cli)
219
+
220
+ logger.verbose("Keyring provider set: disabled")
221
+ return KeyRingNullProvider()
222
+
223
+
224
+ class MultiDomainBasicAuth(AuthBase):
225
+ def __init__(
226
+ self,
227
+ prompting: bool = True,
228
+ index_urls: Optional[List[str]] = None,
229
+ keyring_provider: str = "auto",
230
+ ) -> None:
231
+ self.prompting = prompting
232
+ self.index_urls = index_urls
233
+ self.keyring_provider = keyring_provider # type: ignore[assignment]
234
+ self.passwords: Dict[str, AuthInfo] = {}
235
+ # When the user is prompted to enter credentials and keyring is
236
+ # available, we will offer to save them. If the user accepts,
237
+ # this value is set to the credentials they entered. After the
238
+ # request authenticates, the caller should call
239
+ # ``save_credentials`` to save these.
240
+ self._credentials_to_save: Optional[Credentials] = None
241
+
242
+ @property
243
+ def keyring_provider(self) -> KeyRingBaseProvider:
244
+ return get_keyring_provider(self._keyring_provider)
245
+
246
+ @keyring_provider.setter
247
+ def keyring_provider(self, provider: str) -> None:
248
+ # The free function get_keyring_provider has been decorated with
249
+ # functools.cache. If an exception occurs in get_keyring_auth that
250
+ # cache will be cleared and keyring disabled, take that into account
251
+ # if you want to remove this indirection.
252
+ self._keyring_provider = provider
253
+
254
+ @property
255
+ def use_keyring(self) -> bool:
256
+ # We won't use keyring when --no-input is passed unless
257
+ # a specific provider is requested because it might require
258
+ # user interaction
259
+ return self.prompting or self._keyring_provider not in ["auto", "disabled"]
260
+
261
+ def _get_keyring_auth(
262
+ self,
263
+ url: Optional[str],
264
+ username: Optional[str],
265
+ ) -> Optional[AuthInfo]:
266
+ """Return the tuple auth for a given url from keyring."""
267
+ # Do nothing if no url was provided
268
+ if not url:
269
+ return None
270
+
271
+ try:
272
+ return self.keyring_provider.get_auth_info(url, username)
273
+ except Exception as exc:
274
+ # Log the full exception (with stacktrace) at debug, so it'll only
275
+ # show up when running in verbose mode.
276
+ logger.debug("Keyring is skipped due to an exception", exc_info=True)
277
+ # Always log a shortened version of the exception.
278
+ logger.warning(
279
+ "Keyring is skipped due to an exception: %s",
280
+ str(exc),
281
+ )
282
+ global KEYRING_DISABLED
283
+ KEYRING_DISABLED = True
284
+ get_keyring_provider.cache_clear()
285
+ return None
286
+
287
+ def _get_index_url(self, url: str) -> Optional[str]:
288
+ """Return the original index URL matching the requested URL.
289
+
290
+ Cached or dynamically generated credentials may work against
291
+ the original index URL rather than just the netloc.
292
+
293
+ The provided url should have had its username and password
294
+ removed already. If the original index url had credentials then
295
+ they will be included in the return value.
296
+
297
+ Returns None if no matching index was found, or if --no-index
298
+ was specified by the user.
299
+ """
300
+ if not url or not self.index_urls:
301
+ return None
302
+
303
+ url = remove_auth_from_url(url).rstrip("/") + "/"
304
+ parsed_url = urllib.parse.urlsplit(url)
305
+
306
+ candidates = []
307
+
308
+ for index in self.index_urls:
309
+ index = index.rstrip("/") + "/"
310
+ parsed_index = urllib.parse.urlsplit(remove_auth_from_url(index))
311
+ if parsed_url == parsed_index:
312
+ return index
313
+
314
+ if parsed_url.netloc != parsed_index.netloc:
315
+ continue
316
+
317
+ candidate = urllib.parse.urlsplit(index)
318
+ candidates.append(candidate)
319
+
320
+ if not candidates:
321
+ return None
322
+
323
+ candidates.sort(
324
+ reverse=True,
325
+ key=lambda candidate: commonprefix(
326
+ [
327
+ parsed_url.path,
328
+ candidate.path,
329
+ ]
330
+ ).rfind("/"),
331
+ )
332
+
333
+ return urllib.parse.urlunsplit(candidates[0])
334
+
335
+ def _get_new_credentials(
336
+ self,
337
+ original_url: str,
338
+ *,
339
+ allow_netrc: bool = True,
340
+ allow_keyring: bool = False,
341
+ ) -> AuthInfo:
342
+ """Find and return credentials for the specified URL."""
343
+ # Split the credentials and netloc from the url.
344
+ url, netloc, url_user_password = split_auth_netloc_from_url(
345
+ original_url,
346
+ )
347
+
348
+ # Start with the credentials embedded in the url
349
+ username, password = url_user_password
350
+ if username is not None and password is not None:
351
+ logger.debug("Found credentials in url for %s", netloc)
352
+ return url_user_password
353
+
354
+ # Find a matching index url for this request
355
+ index_url = self._get_index_url(url)
356
+ if index_url:
357
+ # Split the credentials from the url.
358
+ index_info = split_auth_netloc_from_url(index_url)
359
+ if index_info:
360
+ index_url, _, index_url_user_password = index_info
361
+ logger.debug("Found index url %s", index_url)
362
+
363
+ # If an index URL was found, try its embedded credentials
364
+ if index_url and index_url_user_password[0] is not None:
365
+ username, password = index_url_user_password
366
+ if username is not None and password is not None:
367
+ logger.debug("Found credentials in index url for %s", netloc)
368
+ return index_url_user_password
369
+
370
+ # Get creds from netrc if we still don't have them
371
+ if allow_netrc:
372
+ netrc_auth = get_netrc_auth(original_url)
373
+ if netrc_auth:
374
+ logger.debug("Found credentials in netrc for %s", netloc)
375
+ return netrc_auth
376
+
377
+ # If we don't have a password and keyring is available, use it.
378
+ if allow_keyring:
379
+ # The index url is more specific than the netloc, so try it first
380
+ # fmt: off
381
+ kr_auth = (
382
+ self._get_keyring_auth(index_url, username) or
383
+ self._get_keyring_auth(netloc, username)
384
+ )
385
+ # fmt: on
386
+ if kr_auth:
387
+ logger.debug("Found credentials in keyring for %s", netloc)
388
+ return kr_auth
389
+
390
+ return username, password
391
+
392
+ def _get_url_and_credentials(
393
+ self, original_url: str
394
+ ) -> Tuple[str, Optional[str], Optional[str]]:
395
+ """Return the credentials to use for the provided URL.
396
+
397
+ If allowed, netrc and keyring may be used to obtain the
398
+ correct credentials.
399
+
400
+ Returns (url_without_credentials, username, password). Note
401
+ that even if the original URL contains credentials, this
402
+ function may return a different username and password.
403
+ """
404
+ url, netloc, _ = split_auth_netloc_from_url(original_url)
405
+
406
+ # Try to get credentials from original url
407
+ username, password = self._get_new_credentials(original_url)
408
+
409
+ # If credentials not found, use any stored credentials for this netloc.
410
+ # Do this if either the username or the password is missing.
411
+ # This accounts for the situation in which the user has specified
412
+ # the username in the index url, but the password comes from keyring.
413
+ if (username is None or password is None) and netloc in self.passwords:
414
+ un, pw = self.passwords[netloc]
415
+ # It is possible that the cached credentials are for a different username,
416
+ # in which case the cache should be ignored.
417
+ if username is None or username == un:
418
+ username, password = un, pw
419
+
420
+ if username is not None or password is not None:
421
+ # Convert the username and password if they're None, so that
422
+ # this netloc will show up as "cached" in the conditional above.
423
+ # Further, HTTPBasicAuth doesn't accept None, so it makes sense to
424
+ # cache the value that is going to be used.
425
+ username = username or ""
426
+ password = password or ""
427
+
428
+ # Store any acquired credentials.
429
+ self.passwords[netloc] = (username, password)
430
+
431
+ assert (
432
+ # Credentials were found
433
+ (username is not None and password is not None)
434
+ # Credentials were not found
435
+ or (username is None and password is None)
436
+ ), f"Could not load credentials from url: {original_url}"
437
+
438
+ return url, username, password
439
+
440
+ def __call__(self, req: Request) -> Request:
441
+ # Get credentials for this request
442
+ url, username, password = self._get_url_and_credentials(req.url)
443
+
444
+ # Set the url of the request to the url without any credentials
445
+ req.url = url
446
+
447
+ if username is not None and password is not None:
448
+ # Send the basic auth with this request
449
+ req = HTTPBasicAuth(username, password)(req)
450
+
451
+ # Attach a hook to handle 401 responses
452
+ req.register_hook("response", self.handle_401)
453
+
454
+ return req
455
+
456
+ # Factored out to allow for easy patching in tests
457
+ def _prompt_for_password(
458
+ self, netloc: str
459
+ ) -> Tuple[Optional[str], Optional[str], bool]:
460
+ username = ask_input(f"User for {netloc}: ") if self.prompting else None
461
+ if not username:
462
+ return None, None, False
463
+ if self.use_keyring:
464
+ auth = self._get_keyring_auth(netloc, username)
465
+ if auth and auth[0] is not None and auth[1] is not None:
466
+ return auth[0], auth[1], False
467
+ password = ask_password("Password: ")
468
+ return username, password, True
469
+
470
+ # Factored out to allow for easy patching in tests
471
+ def _should_save_password_to_keyring(self) -> bool:
472
+ if (
473
+ not self.prompting
474
+ or not self.use_keyring
475
+ or not self.keyring_provider.has_keyring
476
+ ):
477
+ return False
478
+ return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
479
+
480
+ def handle_401(self, resp: Response, **kwargs: Any) -> Response:
481
+ # We only care about 401 responses, anything else we want to just
482
+ # pass through the actual response
483
+ if resp.status_code != 401:
484
+ return resp
485
+
486
+ username, password = None, None
487
+
488
+ # Query the keyring for credentials:
489
+ if self.use_keyring:
490
+ username, password = self._get_new_credentials(
491
+ resp.url,
492
+ allow_netrc=False,
493
+ allow_keyring=True,
494
+ )
495
+
496
+ # We are not able to prompt the user so simply return the response
497
+ if not self.prompting and not username and not password:
498
+ return resp
499
+
500
+ parsed = urllib.parse.urlparse(resp.url)
501
+
502
+ # Prompt the user for a new username and password
503
+ save = False
504
+ if not username and not password:
505
+ username, password, save = self._prompt_for_password(parsed.netloc)
506
+
507
+ # Store the new username and password to use for future requests
508
+ self._credentials_to_save = None
509
+ if username is not None and password is not None:
510
+ self.passwords[parsed.netloc] = (username, password)
511
+
512
+ # Prompt to save the password to keyring
513
+ if save and self._should_save_password_to_keyring():
514
+ self._credentials_to_save = Credentials(
515
+ url=parsed.netloc,
516
+ username=username,
517
+ password=password,
518
+ )
519
+
520
+ # Consume content and release the original connection to allow our new
521
+ # request to reuse the same one.
522
+ # The result of the assignment isn't used, it's just needed to consume
523
+ # the content.
524
+ _ = resp.content
525
+ resp.raw.release_conn()
526
+
527
+ # Add our new username and password to the request
528
+ req = HTTPBasicAuth(username or "", password or "")(resp.request)
529
+ req.register_hook("response", self.warn_on_401)
530
+
531
+ # On successful request, save the credentials that were used to
532
+ # keyring. (Note that if the user responded "no" above, this member
533
+ # is not set and nothing will be saved.)
534
+ if self._credentials_to_save:
535
+ req.register_hook("response", self.save_credentials)
536
+
537
+ # Send our new request
538
+ new_resp = resp.connection.send(req, **kwargs)
539
+ new_resp.history.append(resp)
540
+
541
+ return new_resp
542
+
543
+ def warn_on_401(self, resp: Response, **kwargs: Any) -> None:
544
+ """Response callback to warn about incorrect credentials."""
545
+ if resp.status_code == 401:
546
+ logger.warning(
547
+ "401 Error, Credentials not correct for %s",
548
+ resp.request.url,
549
+ )
550
+
551
+ def save_credentials(self, resp: Response, **kwargs: Any) -> None:
552
+ """Response callback to save credentials on success."""
553
+ assert (
554
+ self.keyring_provider.has_keyring
555
+ ), "should never reach here without keyring"
556
+
557
+ creds = self._credentials_to_save
558
+ self._credentials_to_save = None
559
+ if creds and resp.status_code < 400:
560
+ try:
561
+ logger.info("Saving credentials to keyring")
562
+ self.keyring_provider.save_auth_info(
563
+ creds.url, creds.username, creds.password
564
+ )
565
+ except Exception:
566
+ logger.exception("Failed to save credentials")
llava/lib/python3.10/site-packages/pip/_internal/network/xmlrpc.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """xmlrpclib.Transport implementation
2
+ """
3
+
4
+ import logging
5
+ import urllib.parse
6
+ import xmlrpc.client
7
+ from typing import TYPE_CHECKING, Tuple
8
+
9
+ from pip._internal.exceptions import NetworkConnectionError
10
+ from pip._internal.network.session import PipSession
11
+ from pip._internal.network.utils import raise_for_status
12
+
13
+ if TYPE_CHECKING:
14
+ from xmlrpc.client import _HostType, _Marshallable
15
+
16
+ from _typeshed import SizedBuffer
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ class PipXmlrpcTransport(xmlrpc.client.Transport):
22
+ """Provide a `xmlrpclib.Transport` implementation via a `PipSession`
23
+ object.
24
+ """
25
+
26
+ def __init__(
27
+ self, index_url: str, session: PipSession, use_datetime: bool = False
28
+ ) -> None:
29
+ super().__init__(use_datetime)
30
+ index_parts = urllib.parse.urlparse(index_url)
31
+ self._scheme = index_parts.scheme
32
+ self._session = session
33
+
34
+ def request(
35
+ self,
36
+ host: "_HostType",
37
+ handler: str,
38
+ request_body: "SizedBuffer",
39
+ verbose: bool = False,
40
+ ) -> Tuple["_Marshallable", ...]:
41
+ assert isinstance(host, str)
42
+ parts = (self._scheme, host, handler, None, None, None)
43
+ url = urllib.parse.urlunparse(parts)
44
+ try:
45
+ headers = {"Content-Type": "text/xml"}
46
+ response = self._session.post(
47
+ url,
48
+ data=request_body,
49
+ headers=headers,
50
+ stream=True,
51
+ )
52
+ raise_for_status(response)
53
+ self.verbose = verbose
54
+ return self.parse_response(response.raw)
55
+ except NetworkConnectionError as exc:
56
+ assert exc.response
57
+ logger.critical(
58
+ "HTTP error %s while getting %s",
59
+ exc.response.status_code,
60
+ url,
61
+ )
62
+ raise
minigpt2/lib/python3.10/site-packages/ray/_private/__pycache__/async_compat.cpython-310.pyc ADDED
Binary file (1.75 kB). View file
 
minigpt2/lib/python3.10/site-packages/ray/_private/__pycache__/conftest_utils.cpython-310.pyc ADDED
Binary file (630 Bytes). View file
 
minigpt2/lib/python3.10/site-packages/ray/_private/__pycache__/gcs_aio_client.cpython-310.pyc ADDED
Binary file (7.7 kB). View file
 
minigpt2/lib/python3.10/site-packages/ray/_private/__pycache__/gcs_utils.cpython-310.pyc ADDED
Binary file (3.72 kB). View file
 
minigpt2/lib/python3.10/site-packages/ray/_private/__pycache__/log.cpython-310.pyc ADDED
Binary file (3.1 kB). View file
 
minigpt2/lib/python3.10/site-packages/ray/_private/__pycache__/metrics_agent.cpython-310.pyc ADDED
Binary file (20.6 kB). View file
 
minigpt2/lib/python3.10/site-packages/ray/_private/__pycache__/protobuf_compat.cpython-310.pyc ADDED
Binary file (1.64 kB). View file
 
minigpt2/lib/python3.10/site-packages/ray/_private/__pycache__/services.cpython-310.pyc ADDED
Binary file (58.4 kB). View file
 
minigpt2/lib/python3.10/site-packages/ray/_private/__pycache__/state_api_test_utils.cpython-310.pyc ADDED
Binary file (14.1 kB). View file
 
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_amp_update_scale.h ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #pragma once
2
+
3
+ // @generated by torchgen/gen.py from Function.h
4
+
5
+ #include <ATen/Context.h>
6
+ #include <ATen/DeviceGuard.h>
7
+ #include <ATen/TensorUtils.h>
8
+ #include <ATen/TracerMode.h>
9
+ #include <ATen/core/Generator.h>
10
+ #include <ATen/core/Reduction.h>
11
+ #include <ATen/core/Tensor.h>
12
+ #include <c10/core/Scalar.h>
13
+ #include <c10/core/Storage.h>
14
+ #include <c10/core/TensorOptions.h>
15
+ #include <c10/util/Deprecated.h>
16
+ #include <c10/util/Optional.h>
17
+
18
+
19
+
20
+ #include <ATen/ops/_amp_update_scale_ops.h>
21
+
22
+ namespace at {
23
+
24
+
25
+ // aten::_amp_update_scale_(Tensor(a!) self, Tensor(b!) growth_tracker, Tensor found_inf, float scale_growth_factor, float scale_backoff_factor, int growth_interval) -> Tensor(a!)
26
+ inline at::Tensor & _amp_update_scale_(at::Tensor & self, at::Tensor & growth_tracker, const at::Tensor & found_inf, double scale_growth_factor, double scale_backoff_factor, int64_t growth_interval) {
27
+ return at::_ops::_amp_update_scale_::call(self, growth_tracker, found_inf, scale_growth_factor, scale_backoff_factor, growth_interval);
28
+ }
29
+
30
+ // aten::_amp_update_scale.out(Tensor self, Tensor(b!) growth_tracker, Tensor found_inf, float scale_growth_factor, float scale_backoff_factor, int growth_interval, *, Tensor(a!) out) -> Tensor(a!)
31
+ inline at::Tensor & _amp_update_scale_out(at::Tensor & out, const at::Tensor & self, at::Tensor & growth_tracker, const at::Tensor & found_inf, double scale_growth_factor, double scale_backoff_factor, int64_t growth_interval) {
32
+ return at::_ops::_amp_update_scale_out::call(self, growth_tracker, found_inf, scale_growth_factor, scale_backoff_factor, growth_interval, out);
33
+ }
34
+ // aten::_amp_update_scale.out(Tensor self, Tensor(b!) growth_tracker, Tensor found_inf, float scale_growth_factor, float scale_backoff_factor, int growth_interval, *, Tensor(a!) out) -> Tensor(a!)
35
+ inline at::Tensor & _amp_update_scale_outf(const at::Tensor & self, at::Tensor & growth_tracker, const at::Tensor & found_inf, double scale_growth_factor, double scale_backoff_factor, int64_t growth_interval, at::Tensor & out) {
36
+ return at::_ops::_amp_update_scale_out::call(self, growth_tracker, found_inf, scale_growth_factor, scale_backoff_factor, growth_interval, out);
37
+ }
38
+
39
+ // aten::_amp_update_scale(Tensor self, Tensor growth_tracker, Tensor found_inf, float scale_growth_factor, float scale_backoff_factor, int growth_interval) -> (Tensor, Tensor growth_tracker_out)
40
+ inline ::std::tuple<at::Tensor,at::Tensor> _amp_update_scale(const at::Tensor & self, const at::Tensor & growth_tracker, const at::Tensor & found_inf, double scale_growth_factor, double scale_backoff_factor, int64_t growth_interval) {
41
+ return at::_ops::_amp_update_scale::call(self, growth_tracker, found_inf, scale_growth_factor, scale_backoff_factor, growth_interval);
42
+ }
43
+
44
+ }
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_ctc_loss_ops.h ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #pragma once
2
+
3
+ // @generated by torchgen/gen.py from Operator.h
4
+
5
+ #include <tuple>
6
+ #include <vector>
7
+
8
+ // Forward declarations of any types needed in the operator signatures.
9
+ // We can't directly include these classes because it will cause circular include dependencies.
10
+ // This file is included by TensorBody.h, which defines the Tensor class.
11
+ #include <ATen/core/ATen_fwd.h>
12
+
13
+ namespace at {
14
+ namespace _ops {
15
+
16
+
17
+ struct TORCH_API _ctc_loss {
18
+ using schema = ::std::tuple<at::Tensor,at::Tensor> (const at::Tensor &, const at::Tensor &, at::IntArrayRef, at::IntArrayRef, int64_t, bool);
19
+ using ptr_schema = schema*;
20
+ // See Note [static constexpr char* members for windows NVCC]
21
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_ctc_loss")
22
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
23
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_ctc_loss(Tensor log_probs, Tensor targets, int[] input_lengths, int[] target_lengths, int blank=0, bool zero_infinity=False) -> (Tensor, Tensor)")
24
+ static ::std::tuple<at::Tensor,at::Tensor> call(const at::Tensor & log_probs, const at::Tensor & targets, at::IntArrayRef input_lengths, at::IntArrayRef target_lengths, int64_t blank, bool zero_infinity);
25
+ static ::std::tuple<at::Tensor,at::Tensor> redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & log_probs, const at::Tensor & targets, at::IntArrayRef input_lengths, at::IntArrayRef target_lengths, int64_t blank, bool zero_infinity);
26
+ };
27
+
28
+ struct TORCH_API _ctc_loss_Tensor {
29
+ using schema = ::std::tuple<at::Tensor,at::Tensor> (const at::Tensor &, const at::Tensor &, const at::Tensor &, const at::Tensor &, int64_t, bool);
30
+ using ptr_schema = schema*;
31
+ // See Note [static constexpr char* members for windows NVCC]
32
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_ctc_loss")
33
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Tensor")
34
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_ctc_loss.Tensor(Tensor log_probs, Tensor targets, Tensor input_lengths, Tensor target_lengths, int blank=0, bool zero_infinity=False) -> (Tensor, Tensor)")
35
+ static ::std::tuple<at::Tensor,at::Tensor> call(const at::Tensor & log_probs, const at::Tensor & targets, const at::Tensor & input_lengths, const at::Tensor & target_lengths, int64_t blank, bool zero_infinity);
36
+ static ::std::tuple<at::Tensor,at::Tensor> redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & log_probs, const at::Tensor & targets, const at::Tensor & input_lengths, const at::Tensor & target_lengths, int64_t blank, bool zero_infinity);
37
+ };
38
+
39
+ struct TORCH_API _ctc_loss_out {
40
+ using schema = ::std::tuple<at::Tensor &,at::Tensor &> (const at::Tensor &, const at::Tensor &, at::IntArrayRef, at::IntArrayRef, int64_t, bool, at::Tensor &, at::Tensor &);
41
+ using ptr_schema = schema*;
42
+ // See Note [static constexpr char* members for windows NVCC]
43
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_ctc_loss")
44
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "out")
45
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_ctc_loss.out(Tensor log_probs, Tensor targets, int[] input_lengths, int[] target_lengths, int blank=0, bool zero_infinity=False, *, Tensor(a!) out0, Tensor(b!) out1) -> (Tensor(a!), Tensor(b!))")
46
+ static ::std::tuple<at::Tensor &,at::Tensor &> call(const at::Tensor & log_probs, const at::Tensor & targets, at::IntArrayRef input_lengths, at::IntArrayRef target_lengths, int64_t blank, bool zero_infinity, at::Tensor & out0, at::Tensor & out1);
47
+ static ::std::tuple<at::Tensor &,at::Tensor &> redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & log_probs, const at::Tensor & targets, at::IntArrayRef input_lengths, at::IntArrayRef target_lengths, int64_t blank, bool zero_infinity, at::Tensor & out0, at::Tensor & out1);
48
+ };
49
+
50
+ struct TORCH_API _ctc_loss_Tensor_out {
51
+ using schema = ::std::tuple<at::Tensor &,at::Tensor &> (const at::Tensor &, const at::Tensor &, const at::Tensor &, const at::Tensor &, int64_t, bool, at::Tensor &, at::Tensor &);
52
+ using ptr_schema = schema*;
53
+ // See Note [static constexpr char* members for windows NVCC]
54
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_ctc_loss")
55
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Tensor_out")
56
+ STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_ctc_loss.Tensor_out(Tensor log_probs, Tensor targets, Tensor input_lengths, Tensor target_lengths, int blank=0, bool zero_infinity=False, *, Tensor(a!) out0, Tensor(b!) out1) -> (Tensor(a!), Tensor(b!))")
57
+ static ::std::tuple<at::Tensor &,at::Tensor &> call(const at::Tensor & log_probs, const at::Tensor & targets, const at::Tensor & input_lengths, const at::Tensor & target_lengths, int64_t blank, bool zero_infinity, at::Tensor & out0, at::Tensor & out1);
58
+ static ::std::tuple<at::Tensor &,at::Tensor &> redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & log_probs, const at::Tensor & targets, const at::Tensor & input_lengths, const at::Tensor & target_lengths, int64_t blank, bool zero_infinity, at::Tensor & out0, at::Tensor & out1);
59
+ };
60
+
61
+ }} // namespace at::_ops
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_remove_batch_dim_native.h ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #pragma once
2
+
3
+ // @generated by torchgen/gen.py from NativeFunction.h
4
+
5
+ #include <c10/core/Scalar.h>
6
+ #include <c10/core/Storage.h>
7
+ #include <c10/core/TensorOptions.h>
8
+ #include <c10/util/Deprecated.h>
9
+ #include <c10/util/Optional.h>
10
+ #include <c10/core/QScheme.h>
11
+ #include <ATen/core/Reduction.h>
12
+ #include <ATen/core/Tensor.h>
13
+ #include <tuple>
14
+ #include <vector>
15
+
16
+
17
+ namespace at {
18
+ namespace native {
19
+ TORCH_API at::Tensor _remove_batch_dim(const at::Tensor & self, int64_t level, int64_t batch_size, int64_t out_dim);
20
+ } // namespace native
21
+ } // namespace at