koichi12 commited on
Commit
3e89aee
·
verified ·
1 Parent(s): 19160d9

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .venv/lib/python3.11/site-packages/pip/_internal/__pycache__/__init__.cpython-311.pyc +0 -0
  2. .venv/lib/python3.11/site-packages/pip/_internal/__pycache__/build_env.cpython-311.pyc +0 -0
  3. .venv/lib/python3.11/site-packages/pip/_internal/__pycache__/cache.cpython-311.pyc +0 -0
  4. .venv/lib/python3.11/site-packages/pip/_internal/__pycache__/configuration.cpython-311.pyc +0 -0
  5. .venv/lib/python3.11/site-packages/pip/_internal/__pycache__/exceptions.cpython-311.pyc +0 -0
  6. .venv/lib/python3.11/site-packages/pip/_internal/__pycache__/main.cpython-311.pyc +0 -0
  7. .venv/lib/python3.11/site-packages/pip/_internal/__pycache__/pyproject.cpython-311.pyc +0 -0
  8. .venv/lib/python3.11/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-311.pyc +0 -0
  9. .venv/lib/python3.11/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-311.pyc +0 -0
  10. .venv/lib/python3.11/site-packages/pip/_internal/cli/__init__.py +4 -0
  11. .venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-311.pyc +0 -0
  12. .venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-311.pyc +0 -0
  13. .venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-311.pyc +0 -0
  14. .venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-311.pyc +0 -0
  15. .venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-311.pyc +0 -0
  16. .venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main.cpython-311.pyc +0 -0
  17. .venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-311.pyc +0 -0
  18. .venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/parser.cpython-311.pyc +0 -0
  19. .venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-311.pyc +0 -0
  20. .venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-311.pyc +0 -0
  21. .venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-311.pyc +0 -0
  22. .venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-311.pyc +0 -0
  23. .venv/lib/python3.11/site-packages/pip/_internal/cli/autocompletion.py +172 -0
  24. .venv/lib/python3.11/site-packages/pip/_internal/cli/base_command.py +236 -0
  25. .venv/lib/python3.11/site-packages/pip/_internal/cli/cmdoptions.py +1074 -0
  26. .venv/lib/python3.11/site-packages/pip/_internal/cli/command_context.py +27 -0
  27. .venv/lib/python3.11/site-packages/pip/_internal/cli/main.py +79 -0
  28. .venv/lib/python3.11/site-packages/pip/_internal/cli/main_parser.py +134 -0
  29. .venv/lib/python3.11/site-packages/pip/_internal/cli/parser.py +294 -0
  30. .venv/lib/python3.11/site-packages/pip/_internal/cli/progress_bars.py +68 -0
  31. .venv/lib/python3.11/site-packages/pip/_internal/cli/req_command.py +505 -0
  32. .venv/lib/python3.11/site-packages/pip/_internal/cli/spinners.py +159 -0
  33. .venv/lib/python3.11/site-packages/pip/_internal/cli/status_codes.py +6 -0
  34. .venv/lib/python3.11/site-packages/pip/_internal/distributions/__init__.py +21 -0
  35. .venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-311.pyc +0 -0
  36. .venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/base.cpython-311.pyc +0 -0
  37. .venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-311.pyc +0 -0
  38. .venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-311.pyc +0 -0
  39. .venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-311.pyc +0 -0
  40. .venv/lib/python3.11/site-packages/pip/_internal/distributions/base.py +51 -0
  41. .venv/lib/python3.11/site-packages/pip/_internal/distributions/installed.py +29 -0
  42. .venv/lib/python3.11/site-packages/pip/_internal/distributions/sdist.py +156 -0
  43. .venv/lib/python3.11/site-packages/pip/_internal/distributions/wheel.py +40 -0
  44. .venv/lib/python3.11/site-packages/pip/_internal/models/__init__.py +2 -0
  45. .venv/lib/python3.11/site-packages/pip/_internal/models/candidate.py +30 -0
  46. .venv/lib/python3.11/site-packages/pip/_internal/models/direct_url.py +235 -0
  47. .venv/lib/python3.11/site-packages/pip/_internal/models/format_control.py +78 -0
  48. .venv/lib/python3.11/site-packages/pip/_internal/models/index.py +28 -0
  49. .venv/lib/python3.11/site-packages/pip/_internal/models/installation_report.py +56 -0
  50. .venv/lib/python3.11/site-packages/pip/_internal/models/link.py +579 -0
.venv/lib/python3.11/site-packages/pip/_internal/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (879 Bytes). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/__pycache__/build_env.cpython-311.pyc ADDED
Binary file (16.1 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/__pycache__/cache.cpython-311.pyc ADDED
Binary file (14.4 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/__pycache__/configuration.cpython-311.pyc ADDED
Binary file (19.8 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/__pycache__/exceptions.cpython-311.pyc ADDED
Binary file (37.4 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/__pycache__/main.cpython-311.pyc ADDED
Binary file (748 Bytes). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/__pycache__/pyproject.cpython-311.pyc ADDED
Binary file (5.61 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-311.pyc ADDED
Binary file (11.8 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-311.pyc ADDED
Binary file (15.2 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/cli/__init__.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ """Subpackage containing all of pip's command line interface related code
2
+ """
3
+
4
+ # This file intentionally does not import submodules
.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (283 Bytes). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-311.pyc ADDED
Binary file (10.2 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-311.pyc ADDED
Binary file (11.9 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-311.pyc ADDED
Binary file (33.8 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-311.pyc ADDED
Binary file (2.11 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main.cpython-311.pyc ADDED
Binary file (2.58 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-311.pyc ADDED
Binary file (5.52 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/parser.cpython-311.pyc ADDED
Binary file (16.9 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-311.pyc ADDED
Binary file (3.17 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-311.pyc ADDED
Binary file (20.3 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-311.pyc ADDED
Binary file (8.83 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-311.pyc ADDED
Binary file (371 Bytes). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/cli/autocompletion.py ADDED
@@ -0,0 +1,172 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Logic that powers autocompletion installed by ``pip completion``.
2
+ """
3
+
4
+ import optparse
5
+ import os
6
+ import sys
7
+ from itertools import chain
8
+ from typing import Any, Iterable, List, Optional
9
+
10
+ from pip._internal.cli.main_parser import create_main_parser
11
+ from pip._internal.commands import commands_dict, create_command
12
+ from pip._internal.metadata import get_default_environment
13
+
14
+
15
+ def autocomplete() -> None:
16
+ """Entry Point for completion of main and subcommand options."""
17
+ # Don't complete if user hasn't sourced bash_completion file.
18
+ if "PIP_AUTO_COMPLETE" not in os.environ:
19
+ return
20
+ cwords = os.environ["COMP_WORDS"].split()[1:]
21
+ cword = int(os.environ["COMP_CWORD"])
22
+ try:
23
+ current = cwords[cword - 1]
24
+ except IndexError:
25
+ current = ""
26
+
27
+ parser = create_main_parser()
28
+ subcommands = list(commands_dict)
29
+ options = []
30
+
31
+ # subcommand
32
+ subcommand_name: Optional[str] = None
33
+ for word in cwords:
34
+ if word in subcommands:
35
+ subcommand_name = word
36
+ break
37
+ # subcommand options
38
+ if subcommand_name is not None:
39
+ # special case: 'help' subcommand has no options
40
+ if subcommand_name == "help":
41
+ sys.exit(1)
42
+ # special case: list locally installed dists for show and uninstall
43
+ should_list_installed = not current.startswith("-") and subcommand_name in [
44
+ "show",
45
+ "uninstall",
46
+ ]
47
+ if should_list_installed:
48
+ env = get_default_environment()
49
+ lc = current.lower()
50
+ installed = [
51
+ dist.canonical_name
52
+ for dist in env.iter_installed_distributions(local_only=True)
53
+ if dist.canonical_name.startswith(lc)
54
+ and dist.canonical_name not in cwords[1:]
55
+ ]
56
+ # if there are no dists installed, fall back to option completion
57
+ if installed:
58
+ for dist in installed:
59
+ print(dist)
60
+ sys.exit(1)
61
+
62
+ should_list_installables = (
63
+ not current.startswith("-") and subcommand_name == "install"
64
+ )
65
+ if should_list_installables:
66
+ for path in auto_complete_paths(current, "path"):
67
+ print(path)
68
+ sys.exit(1)
69
+
70
+ subcommand = create_command(subcommand_name)
71
+
72
+ for opt in subcommand.parser.option_list_all:
73
+ if opt.help != optparse.SUPPRESS_HELP:
74
+ options += [
75
+ (opt_str, opt.nargs) for opt_str in opt._long_opts + opt._short_opts
76
+ ]
77
+
78
+ # filter out previously specified options from available options
79
+ prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
80
+ options = [(x, v) for (x, v) in options if x not in prev_opts]
81
+ # filter options by current input
82
+ options = [(k, v) for k, v in options if k.startswith(current)]
83
+ # get completion type given cwords and available subcommand options
84
+ completion_type = get_path_completion_type(
85
+ cwords,
86
+ cword,
87
+ subcommand.parser.option_list_all,
88
+ )
89
+ # get completion files and directories if ``completion_type`` is
90
+ # ``<file>``, ``<dir>`` or ``<path>``
91
+ if completion_type:
92
+ paths = auto_complete_paths(current, completion_type)
93
+ options = [(path, 0) for path in paths]
94
+ for option in options:
95
+ opt_label = option[0]
96
+ # append '=' to options which require args
97
+ if option[1] and option[0][:2] == "--":
98
+ opt_label += "="
99
+ print(opt_label)
100
+ else:
101
+ # show main parser options only when necessary
102
+
103
+ opts = [i.option_list for i in parser.option_groups]
104
+ opts.append(parser.option_list)
105
+ flattened_opts = chain.from_iterable(opts)
106
+ if current.startswith("-"):
107
+ for opt in flattened_opts:
108
+ if opt.help != optparse.SUPPRESS_HELP:
109
+ subcommands += opt._long_opts + opt._short_opts
110
+ else:
111
+ # get completion type given cwords and all available options
112
+ completion_type = get_path_completion_type(cwords, cword, flattened_opts)
113
+ if completion_type:
114
+ subcommands = list(auto_complete_paths(current, completion_type))
115
+
116
+ print(" ".join([x for x in subcommands if x.startswith(current)]))
117
+ sys.exit(1)
118
+
119
+
120
+ def get_path_completion_type(
121
+ cwords: List[str], cword: int, opts: Iterable[Any]
122
+ ) -> Optional[str]:
123
+ """Get the type of path completion (``file``, ``dir``, ``path`` or None)
124
+
125
+ :param cwords: same as the environmental variable ``COMP_WORDS``
126
+ :param cword: same as the environmental variable ``COMP_CWORD``
127
+ :param opts: The available options to check
128
+ :return: path completion type (``file``, ``dir``, ``path`` or None)
129
+ """
130
+ if cword < 2 or not cwords[cword - 2].startswith("-"):
131
+ return None
132
+ for opt in opts:
133
+ if opt.help == optparse.SUPPRESS_HELP:
134
+ continue
135
+ for o in str(opt).split("/"):
136
+ if cwords[cword - 2].split("=")[0] == o:
137
+ if not opt.metavar or any(
138
+ x in ("path", "file", "dir") for x in opt.metavar.split("/")
139
+ ):
140
+ return opt.metavar
141
+ return None
142
+
143
+
144
+ def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]:
145
+ """If ``completion_type`` is ``file`` or ``path``, list all regular files
146
+ and directories starting with ``current``; otherwise only list directories
147
+ starting with ``current``.
148
+
149
+ :param current: The word to be completed
150
+ :param completion_type: path completion type(``file``, ``path`` or ``dir``)
151
+ :return: A generator of regular files and/or directories
152
+ """
153
+ directory, filename = os.path.split(current)
154
+ current_path = os.path.abspath(directory)
155
+ # Don't complete paths if they can't be accessed
156
+ if not os.access(current_path, os.R_OK):
157
+ return
158
+ filename = os.path.normcase(filename)
159
+ # list all files that start with ``filename``
160
+ file_list = (
161
+ x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename)
162
+ )
163
+ for f in file_list:
164
+ opt = os.path.join(current_path, f)
165
+ comp_file = os.path.normcase(os.path.join(directory, f))
166
+ # complete regular files when there is not ``<dir>`` after option
167
+ # complete directories when there is ``<file>``, ``<path>`` or
168
+ # ``<dir>``after option
169
+ if completion_type != "dir" and os.path.isfile(opt):
170
+ yield comp_file
171
+ elif os.path.isdir(opt):
172
+ yield os.path.join(comp_file, "")
.venv/lib/python3.11/site-packages/pip/_internal/cli/base_command.py ADDED
@@ -0,0 +1,236 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Base Command class, and related routines"""
2
+
3
+ import functools
4
+ import logging
5
+ import logging.config
6
+ import optparse
7
+ import os
8
+ import sys
9
+ import traceback
10
+ from optparse import Values
11
+ from typing import Any, Callable, List, Optional, Tuple
12
+
13
+ from pip._vendor.rich import traceback as rich_traceback
14
+
15
+ from pip._internal.cli import cmdoptions
16
+ from pip._internal.cli.command_context import CommandContextMixIn
17
+ from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
18
+ from pip._internal.cli.status_codes import (
19
+ ERROR,
20
+ PREVIOUS_BUILD_DIR_ERROR,
21
+ UNKNOWN_ERROR,
22
+ VIRTUALENV_NOT_FOUND,
23
+ )
24
+ from pip._internal.exceptions import (
25
+ BadCommand,
26
+ CommandError,
27
+ DiagnosticPipError,
28
+ InstallationError,
29
+ NetworkConnectionError,
30
+ PreviousBuildDirError,
31
+ UninstallationError,
32
+ )
33
+ from pip._internal.utils.filesystem import check_path_owner
34
+ from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
35
+ from pip._internal.utils.misc import get_prog, normalize_path
36
+ from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry
37
+ from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry
38
+ from pip._internal.utils.virtualenv import running_under_virtualenv
39
+
40
+ __all__ = ["Command"]
41
+
42
+ logger = logging.getLogger(__name__)
43
+
44
+
45
+ class Command(CommandContextMixIn):
46
+ usage: str = ""
47
+ ignore_require_venv: bool = False
48
+
49
+ def __init__(self, name: str, summary: str, isolated: bool = False) -> None:
50
+ super().__init__()
51
+
52
+ self.name = name
53
+ self.summary = summary
54
+ self.parser = ConfigOptionParser(
55
+ usage=self.usage,
56
+ prog=f"{get_prog()} {name}",
57
+ formatter=UpdatingDefaultsHelpFormatter(),
58
+ add_help_option=False,
59
+ name=name,
60
+ description=self.__doc__,
61
+ isolated=isolated,
62
+ )
63
+
64
+ self.tempdir_registry: Optional[TempDirRegistry] = None
65
+
66
+ # Commands should add options to this option group
67
+ optgroup_name = f"{self.name.capitalize()} Options"
68
+ self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
69
+
70
+ # Add the general options
71
+ gen_opts = cmdoptions.make_option_group(
72
+ cmdoptions.general_group,
73
+ self.parser,
74
+ )
75
+ self.parser.add_option_group(gen_opts)
76
+
77
+ self.add_options()
78
+
79
+ def add_options(self) -> None:
80
+ pass
81
+
82
+ def handle_pip_version_check(self, options: Values) -> None:
83
+ """
84
+ This is a no-op so that commands by default do not do the pip version
85
+ check.
86
+ """
87
+ # Make sure we do the pip version check if the index_group options
88
+ # are present.
89
+ assert not hasattr(options, "no_index")
90
+
91
+ def run(self, options: Values, args: List[str]) -> int:
92
+ raise NotImplementedError
93
+
94
+ def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]:
95
+ # factored out for testability
96
+ return self.parser.parse_args(args)
97
+
98
+ def main(self, args: List[str]) -> int:
99
+ try:
100
+ with self.main_context():
101
+ return self._main(args)
102
+ finally:
103
+ logging.shutdown()
104
+
105
+ def _main(self, args: List[str]) -> int:
106
+ # We must initialize this before the tempdir manager, otherwise the
107
+ # configuration would not be accessible by the time we clean up the
108
+ # tempdir manager.
109
+ self.tempdir_registry = self.enter_context(tempdir_registry())
110
+ # Intentionally set as early as possible so globally-managed temporary
111
+ # directories are available to the rest of the code.
112
+ self.enter_context(global_tempdir_manager())
113
+
114
+ options, args = self.parse_args(args)
115
+
116
+ # Set verbosity so that it can be used elsewhere.
117
+ self.verbosity = options.verbose - options.quiet
118
+
119
+ level_number = setup_logging(
120
+ verbosity=self.verbosity,
121
+ no_color=options.no_color,
122
+ user_log_file=options.log,
123
+ )
124
+
125
+ always_enabled_features = set(options.features_enabled) & set(
126
+ cmdoptions.ALWAYS_ENABLED_FEATURES
127
+ )
128
+ if always_enabled_features:
129
+ logger.warning(
130
+ "The following features are always enabled: %s. ",
131
+ ", ".join(sorted(always_enabled_features)),
132
+ )
133
+
134
+ # Make sure that the --python argument isn't specified after the
135
+ # subcommand. We can tell, because if --python was specified,
136
+ # we should only reach this point if we're running in the created
137
+ # subprocess, which has the _PIP_RUNNING_IN_SUBPROCESS environment
138
+ # variable set.
139
+ if options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
140
+ logger.critical(
141
+ "The --python option must be placed before the pip subcommand name"
142
+ )
143
+ sys.exit(ERROR)
144
+
145
+ # TODO: Try to get these passing down from the command?
146
+ # without resorting to os.environ to hold these.
147
+ # This also affects isolated builds and it should.
148
+
149
+ if options.no_input:
150
+ os.environ["PIP_NO_INPUT"] = "1"
151
+
152
+ if options.exists_action:
153
+ os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action)
154
+
155
+ if options.require_venv and not self.ignore_require_venv:
156
+ # If a venv is required check if it can really be found
157
+ if not running_under_virtualenv():
158
+ logger.critical("Could not find an activated virtualenv (required).")
159
+ sys.exit(VIRTUALENV_NOT_FOUND)
160
+
161
+ if options.cache_dir:
162
+ options.cache_dir = normalize_path(options.cache_dir)
163
+ if not check_path_owner(options.cache_dir):
164
+ logger.warning(
165
+ "The directory '%s' or its parent directory is not owned "
166
+ "or is not writable by the current user. The cache "
167
+ "has been disabled. Check the permissions and owner of "
168
+ "that directory. If executing pip with sudo, you should "
169
+ "use sudo's -H flag.",
170
+ options.cache_dir,
171
+ )
172
+ options.cache_dir = None
173
+
174
+ def intercepts_unhandled_exc(
175
+ run_func: Callable[..., int]
176
+ ) -> Callable[..., int]:
177
+ @functools.wraps(run_func)
178
+ def exc_logging_wrapper(*args: Any) -> int:
179
+ try:
180
+ status = run_func(*args)
181
+ assert isinstance(status, int)
182
+ return status
183
+ except DiagnosticPipError as exc:
184
+ logger.error("%s", exc, extra={"rich": True})
185
+ logger.debug("Exception information:", exc_info=True)
186
+
187
+ return ERROR
188
+ except PreviousBuildDirError as exc:
189
+ logger.critical(str(exc))
190
+ logger.debug("Exception information:", exc_info=True)
191
+
192
+ return PREVIOUS_BUILD_DIR_ERROR
193
+ except (
194
+ InstallationError,
195
+ UninstallationError,
196
+ BadCommand,
197
+ NetworkConnectionError,
198
+ ) as exc:
199
+ logger.critical(str(exc))
200
+ logger.debug("Exception information:", exc_info=True)
201
+
202
+ return ERROR
203
+ except CommandError as exc:
204
+ logger.critical("%s", exc)
205
+ logger.debug("Exception information:", exc_info=True)
206
+
207
+ return ERROR
208
+ except BrokenStdoutLoggingError:
209
+ # Bypass our logger and write any remaining messages to
210
+ # stderr because stdout no longer works.
211
+ print("ERROR: Pipe to stdout was broken", file=sys.stderr)
212
+ if level_number <= logging.DEBUG:
213
+ traceback.print_exc(file=sys.stderr)
214
+
215
+ return ERROR
216
+ except KeyboardInterrupt:
217
+ logger.critical("Operation cancelled by user")
218
+ logger.debug("Exception information:", exc_info=True)
219
+
220
+ return ERROR
221
+ except BaseException:
222
+ logger.critical("Exception:", exc_info=True)
223
+
224
+ return UNKNOWN_ERROR
225
+
226
+ return exc_logging_wrapper
227
+
228
+ try:
229
+ if not options.debug_mode:
230
+ run = intercepts_unhandled_exc(self.run)
231
+ else:
232
+ run = self.run
233
+ rich_traceback.install(show_locals=True)
234
+ return run(options, args)
235
+ finally:
236
+ self.handle_pip_version_check(options)
.venv/lib/python3.11/site-packages/pip/_internal/cli/cmdoptions.py ADDED
@@ -0,0 +1,1074 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ shared options and groups
3
+
4
+ The principle here is to define options once, but *not* instantiate them
5
+ globally. One reason being that options with action='append' can carry state
6
+ between parses. pip parses general options twice internally, and shouldn't
7
+ pass on state. To be consistent, all options will follow this design.
8
+ """
9
+
10
+ # The following comment should be removed at some point in the future.
11
+ # mypy: strict-optional=False
12
+
13
+ import importlib.util
14
+ import logging
15
+ import os
16
+ import textwrap
17
+ from functools import partial
18
+ from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
19
+ from textwrap import dedent
20
+ from typing import Any, Callable, Dict, Optional, Tuple
21
+
22
+ from pip._vendor.packaging.utils import canonicalize_name
23
+
24
+ from pip._internal.cli.parser import ConfigOptionParser
25
+ from pip._internal.exceptions import CommandError
26
+ from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
27
+ from pip._internal.models.format_control import FormatControl
28
+ from pip._internal.models.index import PyPI
29
+ from pip._internal.models.target_python import TargetPython
30
+ from pip._internal.utils.hashes import STRONG_HASHES
31
+ from pip._internal.utils.misc import strtobool
32
+
33
+ logger = logging.getLogger(__name__)
34
+
35
+
36
+ def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None:
37
+ """
38
+ Raise an option parsing error using parser.error().
39
+
40
+ Args:
41
+ parser: an OptionParser instance.
42
+ option: an Option instance.
43
+ msg: the error text.
44
+ """
45
+ msg = f"{option} error: {msg}"
46
+ msg = textwrap.fill(" ".join(msg.split()))
47
+ parser.error(msg)
48
+
49
+
50
+ def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> OptionGroup:
51
+ """
52
+ Return an OptionGroup object
53
+ group -- assumed to be dict with 'name' and 'options' keys
54
+ parser -- an optparse Parser
55
+ """
56
+ option_group = OptionGroup(parser, group["name"])
57
+ for option in group["options"]:
58
+ option_group.add_option(option())
59
+ return option_group
60
+
61
+
62
+ def check_dist_restriction(options: Values, check_target: bool = False) -> None:
63
+ """Function for determining if custom platform options are allowed.
64
+
65
+ :param options: The OptionParser options.
66
+ :param check_target: Whether or not to check if --target is being used.
67
+ """
68
+ dist_restriction_set = any(
69
+ [
70
+ options.python_version,
71
+ options.platforms,
72
+ options.abis,
73
+ options.implementation,
74
+ ]
75
+ )
76
+
77
+ binary_only = FormatControl(set(), {":all:"})
78
+ sdist_dependencies_allowed = (
79
+ options.format_control != binary_only and not options.ignore_dependencies
80
+ )
81
+
82
+ # Installations or downloads using dist restrictions must not combine
83
+ # source distributions and dist-specific wheels, as they are not
84
+ # guaranteed to be locally compatible.
85
+ if dist_restriction_set and sdist_dependencies_allowed:
86
+ raise CommandError(
87
+ "When restricting platform and interpreter constraints using "
88
+ "--python-version, --platform, --abi, or --implementation, "
89
+ "either --no-deps must be set, or --only-binary=:all: must be "
90
+ "set and --no-binary must not be set (or must be set to "
91
+ ":none:)."
92
+ )
93
+
94
+ if check_target:
95
+ if not options.dry_run and dist_restriction_set and not options.target_dir:
96
+ raise CommandError(
97
+ "Can not use any platform or abi specific options unless "
98
+ "installing via '--target' or using '--dry-run'"
99
+ )
100
+
101
+
102
+ def _path_option_check(option: Option, opt: str, value: str) -> str:
103
+ return os.path.expanduser(value)
104
+
105
+
106
+ def _package_name_option_check(option: Option, opt: str, value: str) -> str:
107
+ return canonicalize_name(value)
108
+
109
+
110
+ class PipOption(Option):
111
+ TYPES = Option.TYPES + ("path", "package_name")
112
+ TYPE_CHECKER = Option.TYPE_CHECKER.copy()
113
+ TYPE_CHECKER["package_name"] = _package_name_option_check
114
+ TYPE_CHECKER["path"] = _path_option_check
115
+
116
+
117
+ ###########
118
+ # options #
119
+ ###########
120
+
121
+ help_: Callable[..., Option] = partial(
122
+ Option,
123
+ "-h",
124
+ "--help",
125
+ dest="help",
126
+ action="help",
127
+ help="Show help.",
128
+ )
129
+
130
+ debug_mode: Callable[..., Option] = partial(
131
+ Option,
132
+ "--debug",
133
+ dest="debug_mode",
134
+ action="store_true",
135
+ default=False,
136
+ help=(
137
+ "Let unhandled exceptions propagate outside the main subroutine, "
138
+ "instead of logging them to stderr."
139
+ ),
140
+ )
141
+
142
+ isolated_mode: Callable[..., Option] = partial(
143
+ Option,
144
+ "--isolated",
145
+ dest="isolated_mode",
146
+ action="store_true",
147
+ default=False,
148
+ help=(
149
+ "Run pip in an isolated mode, ignoring environment variables and user "
150
+ "configuration."
151
+ ),
152
+ )
153
+
154
+ require_virtualenv: Callable[..., Option] = partial(
155
+ Option,
156
+ "--require-virtualenv",
157
+ "--require-venv",
158
+ dest="require_venv",
159
+ action="store_true",
160
+ default=False,
161
+ help=(
162
+ "Allow pip to only run in a virtual environment; "
163
+ "exit with an error otherwise."
164
+ ),
165
+ )
166
+
167
+ override_externally_managed: Callable[..., Option] = partial(
168
+ Option,
169
+ "--break-system-packages",
170
+ dest="override_externally_managed",
171
+ action="store_true",
172
+ help="Allow pip to modify an EXTERNALLY-MANAGED Python installation",
173
+ )
174
+
175
+ python: Callable[..., Option] = partial(
176
+ Option,
177
+ "--python",
178
+ dest="python",
179
+ help="Run pip with the specified Python interpreter.",
180
+ )
181
+
182
+ verbose: Callable[..., Option] = partial(
183
+ Option,
184
+ "-v",
185
+ "--verbose",
186
+ dest="verbose",
187
+ action="count",
188
+ default=0,
189
+ help="Give more output. Option is additive, and can be used up to 3 times.",
190
+ )
191
+
192
+ no_color: Callable[..., Option] = partial(
193
+ Option,
194
+ "--no-color",
195
+ dest="no_color",
196
+ action="store_true",
197
+ default=False,
198
+ help="Suppress colored output.",
199
+ )
200
+
201
+ version: Callable[..., Option] = partial(
202
+ Option,
203
+ "-V",
204
+ "--version",
205
+ dest="version",
206
+ action="store_true",
207
+ help="Show version and exit.",
208
+ )
209
+
210
+ quiet: Callable[..., Option] = partial(
211
+ Option,
212
+ "-q",
213
+ "--quiet",
214
+ dest="quiet",
215
+ action="count",
216
+ default=0,
217
+ help=(
218
+ "Give less output. Option is additive, and can be used up to 3"
219
+ " times (corresponding to WARNING, ERROR, and CRITICAL logging"
220
+ " levels)."
221
+ ),
222
+ )
223
+
224
+ progress_bar: Callable[..., Option] = partial(
225
+ Option,
226
+ "--progress-bar",
227
+ dest="progress_bar",
228
+ type="choice",
229
+ choices=["on", "off"],
230
+ default="on",
231
+ help="Specify whether the progress bar should be used [on, off] (default: on)",
232
+ )
233
+
234
+ log: Callable[..., Option] = partial(
235
+ PipOption,
236
+ "--log",
237
+ "--log-file",
238
+ "--local-log",
239
+ dest="log",
240
+ metavar="path",
241
+ type="path",
242
+ help="Path to a verbose appending log.",
243
+ )
244
+
245
+ no_input: Callable[..., Option] = partial(
246
+ Option,
247
+ # Don't ask for input
248
+ "--no-input",
249
+ dest="no_input",
250
+ action="store_true",
251
+ default=False,
252
+ help="Disable prompting for input.",
253
+ )
254
+
255
+ keyring_provider: Callable[..., Option] = partial(
256
+ Option,
257
+ "--keyring-provider",
258
+ dest="keyring_provider",
259
+ choices=["auto", "disabled", "import", "subprocess"],
260
+ default="auto",
261
+ help=(
262
+ "Enable the credential lookup via the keyring library if user input is allowed."
263
+ " Specify which mechanism to use [disabled, import, subprocess]."
264
+ " (default: disabled)"
265
+ ),
266
+ )
267
+
268
+ proxy: Callable[..., Option] = partial(
269
+ Option,
270
+ "--proxy",
271
+ dest="proxy",
272
+ type="str",
273
+ default="",
274
+ help="Specify a proxy in the form scheme://[user:passwd@]proxy.server:port.",
275
+ )
276
+
277
+ retries: Callable[..., Option] = partial(
278
+ Option,
279
+ "--retries",
280
+ dest="retries",
281
+ type="int",
282
+ default=5,
283
+ help="Maximum number of retries each connection should attempt "
284
+ "(default %default times).",
285
+ )
286
+
287
+ timeout: Callable[..., Option] = partial(
288
+ Option,
289
+ "--timeout",
290
+ "--default-timeout",
291
+ metavar="sec",
292
+ dest="timeout",
293
+ type="float",
294
+ default=15,
295
+ help="Set the socket timeout (default %default seconds).",
296
+ )
297
+
298
+
299
+ def exists_action() -> Option:
300
+ return Option(
301
+ # Option when path already exist
302
+ "--exists-action",
303
+ dest="exists_action",
304
+ type="choice",
305
+ choices=["s", "i", "w", "b", "a"],
306
+ default=[],
307
+ action="append",
308
+ metavar="action",
309
+ help="Default action when a path already exists: "
310
+ "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
311
+ )
312
+
313
+
314
+ cert: Callable[..., Option] = partial(
315
+ PipOption,
316
+ "--cert",
317
+ dest="cert",
318
+ type="path",
319
+ metavar="path",
320
+ help=(
321
+ "Path to PEM-encoded CA certificate bundle. "
322
+ "If provided, overrides the default. "
323
+ "See 'SSL Certificate Verification' in pip documentation "
324
+ "for more information."
325
+ ),
326
+ )
327
+
328
+ client_cert: Callable[..., Option] = partial(
329
+ PipOption,
330
+ "--client-cert",
331
+ dest="client_cert",
332
+ type="path",
333
+ default=None,
334
+ metavar="path",
335
+ help="Path to SSL client certificate, a single file containing the "
336
+ "private key and the certificate in PEM format.",
337
+ )
338
+
339
+ index_url: Callable[..., Option] = partial(
340
+ Option,
341
+ "-i",
342
+ "--index-url",
343
+ "--pypi-url",
344
+ dest="index_url",
345
+ metavar="URL",
346
+ default=PyPI.simple_url,
347
+ help="Base URL of the Python Package Index (default %default). "
348
+ "This should point to a repository compliant with PEP 503 "
349
+ "(the simple repository API) or a local directory laid out "
350
+ "in the same format.",
351
+ )
352
+
353
+
354
+ def extra_index_url() -> Option:
355
+ return Option(
356
+ "--extra-index-url",
357
+ dest="extra_index_urls",
358
+ metavar="URL",
359
+ action="append",
360
+ default=[],
361
+ help="Extra URLs of package indexes to use in addition to "
362
+ "--index-url. Should follow the same rules as "
363
+ "--index-url.",
364
+ )
365
+
366
+
367
+ no_index: Callable[..., Option] = partial(
368
+ Option,
369
+ "--no-index",
370
+ dest="no_index",
371
+ action="store_true",
372
+ default=False,
373
+ help="Ignore package index (only looking at --find-links URLs instead).",
374
+ )
375
+
376
+
377
+ def find_links() -> Option:
378
+ return Option(
379
+ "-f",
380
+ "--find-links",
381
+ dest="find_links",
382
+ action="append",
383
+ default=[],
384
+ metavar="url",
385
+ help="If a URL or path to an html file, then parse for links to "
386
+ "archives such as sdist (.tar.gz) or wheel (.whl) files. "
387
+ "If a local path or file:// URL that's a directory, "
388
+ "then look for archives in the directory listing. "
389
+ "Links to VCS project URLs are not supported.",
390
+ )
391
+
392
+
393
+ def trusted_host() -> Option:
394
+ return Option(
395
+ "--trusted-host",
396
+ dest="trusted_hosts",
397
+ action="append",
398
+ metavar="HOSTNAME",
399
+ default=[],
400
+ help="Mark this host or host:port pair as trusted, even though it "
401
+ "does not have valid or any HTTPS.",
402
+ )
403
+
404
+
405
+ def constraints() -> Option:
406
+ return Option(
407
+ "-c",
408
+ "--constraint",
409
+ dest="constraints",
410
+ action="append",
411
+ default=[],
412
+ metavar="file",
413
+ help="Constrain versions using the given constraints file. "
414
+ "This option can be used multiple times.",
415
+ )
416
+
417
+
418
+ def requirements() -> Option:
419
+ return Option(
420
+ "-r",
421
+ "--requirement",
422
+ dest="requirements",
423
+ action="append",
424
+ default=[],
425
+ metavar="file",
426
+ help="Install from the given requirements file. "
427
+ "This option can be used multiple times.",
428
+ )
429
+
430
+
431
+ def editable() -> Option:
432
+ return Option(
433
+ "-e",
434
+ "--editable",
435
+ dest="editables",
436
+ action="append",
437
+ default=[],
438
+ metavar="path/url",
439
+ help=(
440
+ "Install a project in editable mode (i.e. setuptools "
441
+ '"develop mode") from a local project path or a VCS url.'
442
+ ),
443
+ )
444
+
445
+
446
+ def _handle_src(option: Option, opt_str: str, value: str, parser: OptionParser) -> None:
447
+ value = os.path.abspath(value)
448
+ setattr(parser.values, option.dest, value)
449
+
450
+
451
+ src: Callable[..., Option] = partial(
452
+ PipOption,
453
+ "--src",
454
+ "--source",
455
+ "--source-dir",
456
+ "--source-directory",
457
+ dest="src_dir",
458
+ type="path",
459
+ metavar="dir",
460
+ default=get_src_prefix(),
461
+ action="callback",
462
+ callback=_handle_src,
463
+ help="Directory to check out editable projects into. "
464
+ 'The default in a virtualenv is "<venv path>/src". '
465
+ 'The default for global installs is "<current dir>/src".',
466
+ )
467
+
468
+
469
+ def _get_format_control(values: Values, option: Option) -> Any:
470
+ """Get a format_control object."""
471
+ return getattr(values, option.dest)
472
+
473
+
474
+ def _handle_no_binary(
475
+ option: Option, opt_str: str, value: str, parser: OptionParser
476
+ ) -> None:
477
+ existing = _get_format_control(parser.values, option)
478
+ FormatControl.handle_mutual_excludes(
479
+ value,
480
+ existing.no_binary,
481
+ existing.only_binary,
482
+ )
483
+
484
+
485
+ def _handle_only_binary(
486
+ option: Option, opt_str: str, value: str, parser: OptionParser
487
+ ) -> None:
488
+ existing = _get_format_control(parser.values, option)
489
+ FormatControl.handle_mutual_excludes(
490
+ value,
491
+ existing.only_binary,
492
+ existing.no_binary,
493
+ )
494
+
495
+
496
+ def no_binary() -> Option:
497
+ format_control = FormatControl(set(), set())
498
+ return Option(
499
+ "--no-binary",
500
+ dest="format_control",
501
+ action="callback",
502
+ callback=_handle_no_binary,
503
+ type="str",
504
+ default=format_control,
505
+ help="Do not use binary packages. Can be supplied multiple times, and "
506
+ 'each time adds to the existing value. Accepts either ":all:" to '
507
+ 'disable all binary packages, ":none:" to empty the set (notice '
508
+ "the colons), or one or more package names with commas between "
509
+ "them (no colons). Note that some packages are tricky to compile "
510
+ "and may fail to install when this option is used on them.",
511
+ )
512
+
513
+
514
+ def only_binary() -> Option:
515
+ format_control = FormatControl(set(), set())
516
+ return Option(
517
+ "--only-binary",
518
+ dest="format_control",
519
+ action="callback",
520
+ callback=_handle_only_binary,
521
+ type="str",
522
+ default=format_control,
523
+ help="Do not use source packages. Can be supplied multiple times, and "
524
+ 'each time adds to the existing value. Accepts either ":all:" to '
525
+ 'disable all source packages, ":none:" to empty the set, or one '
526
+ "or more package names with commas between them. Packages "
527
+ "without binary distributions will fail to install when this "
528
+ "option is used on them.",
529
+ )
530
+
531
+
532
+ platforms: Callable[..., Option] = partial(
533
+ Option,
534
+ "--platform",
535
+ dest="platforms",
536
+ metavar="platform",
537
+ action="append",
538
+ default=None,
539
+ help=(
540
+ "Only use wheels compatible with <platform>. Defaults to the "
541
+ "platform of the running system. Use this option multiple times to "
542
+ "specify multiple platforms supported by the target interpreter."
543
+ ),
544
+ )
545
+
546
+
547
+ # This was made a separate function for unit-testing purposes.
548
+ def _convert_python_version(value: str) -> Tuple[Tuple[int, ...], Optional[str]]:
549
+ """
550
+ Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
551
+
552
+ :return: A 2-tuple (version_info, error_msg), where `error_msg` is
553
+ non-None if and only if there was a parsing error.
554
+ """
555
+ if not value:
556
+ # The empty string is the same as not providing a value.
557
+ return (None, None)
558
+
559
+ parts = value.split(".")
560
+ if len(parts) > 3:
561
+ return ((), "at most three version parts are allowed")
562
+
563
+ if len(parts) == 1:
564
+ # Then we are in the case of "3" or "37".
565
+ value = parts[0]
566
+ if len(value) > 1:
567
+ parts = [value[0], value[1:]]
568
+
569
+ try:
570
+ version_info = tuple(int(part) for part in parts)
571
+ except ValueError:
572
+ return ((), "each version part must be an integer")
573
+
574
+ return (version_info, None)
575
+
576
+
577
+ def _handle_python_version(
578
+ option: Option, opt_str: str, value: str, parser: OptionParser
579
+ ) -> None:
580
+ """
581
+ Handle a provided --python-version value.
582
+ """
583
+ version_info, error_msg = _convert_python_version(value)
584
+ if error_msg is not None:
585
+ msg = f"invalid --python-version value: {value!r}: {error_msg}"
586
+ raise_option_error(parser, option=option, msg=msg)
587
+
588
+ parser.values.python_version = version_info
589
+
590
+
591
+ python_version: Callable[..., Option] = partial(
592
+ Option,
593
+ "--python-version",
594
+ dest="python_version",
595
+ metavar="python_version",
596
+ action="callback",
597
+ callback=_handle_python_version,
598
+ type="str",
599
+ default=None,
600
+ help=dedent(
601
+ """\
602
+ The Python interpreter version to use for wheel and "Requires-Python"
603
+ compatibility checks. Defaults to a version derived from the running
604
+ interpreter. The version can be specified using up to three dot-separated
605
+ integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
606
+ version can also be given as a string without dots (e.g. "37" for 3.7.0).
607
+ """
608
+ ),
609
+ )
610
+
611
+
612
+ implementation: Callable[..., Option] = partial(
613
+ Option,
614
+ "--implementation",
615
+ dest="implementation",
616
+ metavar="implementation",
617
+ default=None,
618
+ help=(
619
+ "Only use wheels compatible with Python "
620
+ "implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
621
+ " or 'ip'. If not specified, then the current "
622
+ "interpreter implementation is used. Use 'py' to force "
623
+ "implementation-agnostic wheels."
624
+ ),
625
+ )
626
+
627
+
628
+ abis: Callable[..., Option] = partial(
629
+ Option,
630
+ "--abi",
631
+ dest="abis",
632
+ metavar="abi",
633
+ action="append",
634
+ default=None,
635
+ help=(
636
+ "Only use wheels compatible with Python abi <abi>, e.g. 'pypy_41'. "
637
+ "If not specified, then the current interpreter abi tag is used. "
638
+ "Use this option multiple times to specify multiple abis supported "
639
+ "by the target interpreter. Generally you will need to specify "
640
+ "--implementation, --platform, and --python-version when using this "
641
+ "option."
642
+ ),
643
+ )
644
+
645
+
646
+ def add_target_python_options(cmd_opts: OptionGroup) -> None:
647
+ cmd_opts.add_option(platforms())
648
+ cmd_opts.add_option(python_version())
649
+ cmd_opts.add_option(implementation())
650
+ cmd_opts.add_option(abis())
651
+
652
+
653
+ def make_target_python(options: Values) -> TargetPython:
654
+ target_python = TargetPython(
655
+ platforms=options.platforms,
656
+ py_version_info=options.python_version,
657
+ abis=options.abis,
658
+ implementation=options.implementation,
659
+ )
660
+
661
+ return target_python
662
+
663
+
664
+ def prefer_binary() -> Option:
665
+ return Option(
666
+ "--prefer-binary",
667
+ dest="prefer_binary",
668
+ action="store_true",
669
+ default=False,
670
+ help=(
671
+ "Prefer binary packages over source packages, even if the "
672
+ "source packages are newer."
673
+ ),
674
+ )
675
+
676
+
677
+ cache_dir: Callable[..., Option] = partial(
678
+ PipOption,
679
+ "--cache-dir",
680
+ dest="cache_dir",
681
+ default=USER_CACHE_DIR,
682
+ metavar="dir",
683
+ type="path",
684
+ help="Store the cache data in <dir>.",
685
+ )
686
+
687
+
688
+ def _handle_no_cache_dir(
689
+ option: Option, opt: str, value: str, parser: OptionParser
690
+ ) -> None:
691
+ """
692
+ Process a value provided for the --no-cache-dir option.
693
+
694
+ This is an optparse.Option callback for the --no-cache-dir option.
695
+ """
696
+ # The value argument will be None if --no-cache-dir is passed via the
697
+ # command-line, since the option doesn't accept arguments. However,
698
+ # the value can be non-None if the option is triggered e.g. by an
699
+ # environment variable, like PIP_NO_CACHE_DIR=true.
700
+ if value is not None:
701
+ # Then parse the string value to get argument error-checking.
702
+ try:
703
+ strtobool(value)
704
+ except ValueError as exc:
705
+ raise_option_error(parser, option=option, msg=str(exc))
706
+
707
+ # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
708
+ # converted to 0 (like "false" or "no") caused cache_dir to be disabled
709
+ # rather than enabled (logic would say the latter). Thus, we disable
710
+ # the cache directory not just on values that parse to True, but (for
711
+ # backwards compatibility reasons) also on values that parse to False.
712
+ # In other words, always set it to False if the option is provided in
713
+ # some (valid) form.
714
+ parser.values.cache_dir = False
715
+
716
+
717
+ no_cache: Callable[..., Option] = partial(
718
+ Option,
719
+ "--no-cache-dir",
720
+ dest="cache_dir",
721
+ action="callback",
722
+ callback=_handle_no_cache_dir,
723
+ help="Disable the cache.",
724
+ )
725
+
726
+ no_deps: Callable[..., Option] = partial(
727
+ Option,
728
+ "--no-deps",
729
+ "--no-dependencies",
730
+ dest="ignore_dependencies",
731
+ action="store_true",
732
+ default=False,
733
+ help="Don't install package dependencies.",
734
+ )
735
+
736
+ ignore_requires_python: Callable[..., Option] = partial(
737
+ Option,
738
+ "--ignore-requires-python",
739
+ dest="ignore_requires_python",
740
+ action="store_true",
741
+ help="Ignore the Requires-Python information.",
742
+ )
743
+
744
+ no_build_isolation: Callable[..., Option] = partial(
745
+ Option,
746
+ "--no-build-isolation",
747
+ dest="build_isolation",
748
+ action="store_false",
749
+ default=True,
750
+ help="Disable isolation when building a modern source distribution. "
751
+ "Build dependencies specified by PEP 518 must be already installed "
752
+ "if this option is used.",
753
+ )
754
+
755
+ check_build_deps: Callable[..., Option] = partial(
756
+ Option,
757
+ "--check-build-dependencies",
758
+ dest="check_build_deps",
759
+ action="store_true",
760
+ default=False,
761
+ help="Check the build dependencies when PEP517 is used.",
762
+ )
763
+
764
+
765
+ def _handle_no_use_pep517(
766
+ option: Option, opt: str, value: str, parser: OptionParser
767
+ ) -> None:
768
+ """
769
+ Process a value provided for the --no-use-pep517 option.
770
+
771
+ This is an optparse.Option callback for the no_use_pep517 option.
772
+ """
773
+ # Since --no-use-pep517 doesn't accept arguments, the value argument
774
+ # will be None if --no-use-pep517 is passed via the command-line.
775
+ # However, the value can be non-None if the option is triggered e.g.
776
+ # by an environment variable, for example "PIP_NO_USE_PEP517=true".
777
+ if value is not None:
778
+ msg = """A value was passed for --no-use-pep517,
779
+ probably using either the PIP_NO_USE_PEP517 environment variable
780
+ or the "no-use-pep517" config file option. Use an appropriate value
781
+ of the PIP_USE_PEP517 environment variable or the "use-pep517"
782
+ config file option instead.
783
+ """
784
+ raise_option_error(parser, option=option, msg=msg)
785
+
786
+ # If user doesn't wish to use pep517, we check if setuptools and wheel are installed
787
+ # and raise error if it is not.
788
+ packages = ("setuptools", "wheel")
789
+ if not all(importlib.util.find_spec(package) for package in packages):
790
+ msg = (
791
+ f"It is not possible to use --no-use-pep517 "
792
+ f"without {' and '.join(packages)} installed."
793
+ )
794
+ raise_option_error(parser, option=option, msg=msg)
795
+
796
+ # Otherwise, --no-use-pep517 was passed via the command-line.
797
+ parser.values.use_pep517 = False
798
+
799
+
800
+ use_pep517: Any = partial(
801
+ Option,
802
+ "--use-pep517",
803
+ dest="use_pep517",
804
+ action="store_true",
805
+ default=None,
806
+ help="Use PEP 517 for building source distributions "
807
+ "(use --no-use-pep517 to force legacy behaviour).",
808
+ )
809
+
810
+ no_use_pep517: Any = partial(
811
+ Option,
812
+ "--no-use-pep517",
813
+ dest="use_pep517",
814
+ action="callback",
815
+ callback=_handle_no_use_pep517,
816
+ default=None,
817
+ help=SUPPRESS_HELP,
818
+ )
819
+
820
+
821
+ def _handle_config_settings(
822
+ option: Option, opt_str: str, value: str, parser: OptionParser
823
+ ) -> None:
824
+ key, sep, val = value.partition("=")
825
+ if sep != "=":
826
+ parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL")
827
+ dest = getattr(parser.values, option.dest)
828
+ if dest is None:
829
+ dest = {}
830
+ setattr(parser.values, option.dest, dest)
831
+ if key in dest:
832
+ if isinstance(dest[key], list):
833
+ dest[key].append(val)
834
+ else:
835
+ dest[key] = [dest[key], val]
836
+ else:
837
+ dest[key] = val
838
+
839
+
840
+ config_settings: Callable[..., Option] = partial(
841
+ Option,
842
+ "-C",
843
+ "--config-settings",
844
+ dest="config_settings",
845
+ type=str,
846
+ action="callback",
847
+ callback=_handle_config_settings,
848
+ metavar="settings",
849
+ help="Configuration settings to be passed to the PEP 517 build backend. "
850
+ "Settings take the form KEY=VALUE. Use multiple --config-settings options "
851
+ "to pass multiple keys to the backend.",
852
+ )
853
+
854
+ build_options: Callable[..., Option] = partial(
855
+ Option,
856
+ "--build-option",
857
+ dest="build_options",
858
+ metavar="options",
859
+ action="append",
860
+ help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
861
+ )
862
+
863
+ global_options: Callable[..., Option] = partial(
864
+ Option,
865
+ "--global-option",
866
+ dest="global_options",
867
+ action="append",
868
+ metavar="options",
869
+ help="Extra global options to be supplied to the setup.py "
870
+ "call before the install or bdist_wheel command.",
871
+ )
872
+
873
+ no_clean: Callable[..., Option] = partial(
874
+ Option,
875
+ "--no-clean",
876
+ action="store_true",
877
+ default=False,
878
+ help="Don't clean up build directories.",
879
+ )
880
+
881
+ pre: Callable[..., Option] = partial(
882
+ Option,
883
+ "--pre",
884
+ action="store_true",
885
+ default=False,
886
+ help="Include pre-release and development versions. By default, "
887
+ "pip only finds stable versions.",
888
+ )
889
+
890
+ disable_pip_version_check: Callable[..., Option] = partial(
891
+ Option,
892
+ "--disable-pip-version-check",
893
+ dest="disable_pip_version_check",
894
+ action="store_true",
895
+ default=False,
896
+ help="Don't periodically check PyPI to determine whether a new version "
897
+ "of pip is available for download. Implied with --no-index.",
898
+ )
899
+
900
+ root_user_action: Callable[..., Option] = partial(
901
+ Option,
902
+ "--root-user-action",
903
+ dest="root_user_action",
904
+ default="warn",
905
+ choices=["warn", "ignore"],
906
+ help="Action if pip is run as a root user. By default, a warning message is shown.",
907
+ )
908
+
909
+
910
+ def _handle_merge_hash(
911
+ option: Option, opt_str: str, value: str, parser: OptionParser
912
+ ) -> None:
913
+ """Given a value spelled "algo:digest", append the digest to a list
914
+ pointed to in a dict by the algo name."""
915
+ if not parser.values.hashes:
916
+ parser.values.hashes = {}
917
+ try:
918
+ algo, digest = value.split(":", 1)
919
+ except ValueError:
920
+ parser.error(
921
+ f"Arguments to {opt_str} must be a hash name "
922
+ "followed by a value, like --hash=sha256:"
923
+ "abcde..."
924
+ )
925
+ if algo not in STRONG_HASHES:
926
+ parser.error(
927
+ "Allowed hash algorithms for {} are {}.".format(
928
+ opt_str, ", ".join(STRONG_HASHES)
929
+ )
930
+ )
931
+ parser.values.hashes.setdefault(algo, []).append(digest)
932
+
933
+
934
+ hash: Callable[..., Option] = partial(
935
+ Option,
936
+ "--hash",
937
+ # Hash values eventually end up in InstallRequirement.hashes due to
938
+ # __dict__ copying in process_line().
939
+ dest="hashes",
940
+ action="callback",
941
+ callback=_handle_merge_hash,
942
+ type="string",
943
+ help="Verify that the package's archive matches this "
944
+ "hash before installing. Example: --hash=sha256:abcdef...",
945
+ )
946
+
947
+
948
+ require_hashes: Callable[..., Option] = partial(
949
+ Option,
950
+ "--require-hashes",
951
+ dest="require_hashes",
952
+ action="store_true",
953
+ default=False,
954
+ help="Require a hash to check each requirement against, for "
955
+ "repeatable installs. This option is implied when any package in a "
956
+ "requirements file has a --hash option.",
957
+ )
958
+
959
+
960
+ list_path: Callable[..., Option] = partial(
961
+ PipOption,
962
+ "--path",
963
+ dest="path",
964
+ type="path",
965
+ action="append",
966
+ help="Restrict to the specified installation path for listing "
967
+ "packages (can be used multiple times).",
968
+ )
969
+
970
+
971
+ def check_list_path_option(options: Values) -> None:
972
+ if options.path and (options.user or options.local):
973
+ raise CommandError("Cannot combine '--path' with '--user' or '--local'")
974
+
975
+
976
+ list_exclude: Callable[..., Option] = partial(
977
+ PipOption,
978
+ "--exclude",
979
+ dest="excludes",
980
+ action="append",
981
+ metavar="package",
982
+ type="package_name",
983
+ help="Exclude specified package from the output",
984
+ )
985
+
986
+
987
+ no_python_version_warning: Callable[..., Option] = partial(
988
+ Option,
989
+ "--no-python-version-warning",
990
+ dest="no_python_version_warning",
991
+ action="store_true",
992
+ default=False,
993
+ help="Silence deprecation warnings for upcoming unsupported Pythons.",
994
+ )
995
+
996
+
997
+ # Features that are now always on. A warning is printed if they are used.
998
+ ALWAYS_ENABLED_FEATURES = [
999
+ "no-binary-enable-wheel-cache", # always on since 23.1
1000
+ ]
1001
+
1002
+ use_new_feature: Callable[..., Option] = partial(
1003
+ Option,
1004
+ "--use-feature",
1005
+ dest="features_enabled",
1006
+ metavar="feature",
1007
+ action="append",
1008
+ default=[],
1009
+ choices=[
1010
+ "fast-deps",
1011
+ "truststore",
1012
+ ]
1013
+ + ALWAYS_ENABLED_FEATURES,
1014
+ help="Enable new functionality, that may be backward incompatible.",
1015
+ )
1016
+
1017
+ use_deprecated_feature: Callable[..., Option] = partial(
1018
+ Option,
1019
+ "--use-deprecated",
1020
+ dest="deprecated_features_enabled",
1021
+ metavar="feature",
1022
+ action="append",
1023
+ default=[],
1024
+ choices=[
1025
+ "legacy-resolver",
1026
+ ],
1027
+ help=("Enable deprecated functionality, that will be removed in the future."),
1028
+ )
1029
+
1030
+
1031
+ ##########
1032
+ # groups #
1033
+ ##########
1034
+
1035
+ general_group: Dict[str, Any] = {
1036
+ "name": "General Options",
1037
+ "options": [
1038
+ help_,
1039
+ debug_mode,
1040
+ isolated_mode,
1041
+ require_virtualenv,
1042
+ python,
1043
+ verbose,
1044
+ version,
1045
+ quiet,
1046
+ log,
1047
+ no_input,
1048
+ keyring_provider,
1049
+ proxy,
1050
+ retries,
1051
+ timeout,
1052
+ exists_action,
1053
+ trusted_host,
1054
+ cert,
1055
+ client_cert,
1056
+ cache_dir,
1057
+ no_cache,
1058
+ disable_pip_version_check,
1059
+ no_color,
1060
+ no_python_version_warning,
1061
+ use_new_feature,
1062
+ use_deprecated_feature,
1063
+ ],
1064
+ }
1065
+
1066
+ index_group: Dict[str, Any] = {
1067
+ "name": "Package Index Options",
1068
+ "options": [
1069
+ index_url,
1070
+ extra_index_url,
1071
+ no_index,
1072
+ find_links,
1073
+ ],
1074
+ }
.venv/lib/python3.11/site-packages/pip/_internal/cli/command_context.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from contextlib import ExitStack, contextmanager
2
+ from typing import ContextManager, Generator, TypeVar
3
+
4
+ _T = TypeVar("_T", covariant=True)
5
+
6
+
7
+ class CommandContextMixIn:
8
+ def __init__(self) -> None:
9
+ super().__init__()
10
+ self._in_main_context = False
11
+ self._main_context = ExitStack()
12
+
13
+ @contextmanager
14
+ def main_context(self) -> Generator[None, None, None]:
15
+ assert not self._in_main_context
16
+
17
+ self._in_main_context = True
18
+ try:
19
+ with self._main_context:
20
+ yield
21
+ finally:
22
+ self._in_main_context = False
23
+
24
+ def enter_context(self, context_provider: ContextManager[_T]) -> _T:
25
+ assert self._in_main_context
26
+
27
+ return self._main_context.enter_context(context_provider)
.venv/lib/python3.11/site-packages/pip/_internal/cli/main.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Primary application entrypoint.
2
+ """
3
+ import locale
4
+ import logging
5
+ import os
6
+ import sys
7
+ import warnings
8
+ from typing import List, Optional
9
+
10
+ from pip._internal.cli.autocompletion import autocomplete
11
+ from pip._internal.cli.main_parser import parse_command
12
+ from pip._internal.commands import create_command
13
+ from pip._internal.exceptions import PipError
14
+ from pip._internal.utils import deprecation
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ # Do not import and use main() directly! Using it directly is actively
20
+ # discouraged by pip's maintainers. The name, location and behavior of
21
+ # this function is subject to change, so calling it directly is not
22
+ # portable across different pip versions.
23
+
24
+ # In addition, running pip in-process is unsupported and unsafe. This is
25
+ # elaborated in detail at
26
+ # https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program.
27
+ # That document also provides suggestions that should work for nearly
28
+ # all users that are considering importing and using main() directly.
29
+
30
+ # However, we know that certain users will still want to invoke pip
31
+ # in-process. If you understand and accept the implications of using pip
32
+ # in an unsupported manner, the best approach is to use runpy to avoid
33
+ # depending on the exact location of this entry point.
34
+
35
+ # The following example shows how to use runpy to invoke pip in that
36
+ # case:
37
+ #
38
+ # sys.argv = ["pip", your, args, here]
39
+ # runpy.run_module("pip", run_name="__main__")
40
+ #
41
+ # Note that this will exit the process after running, unlike a direct
42
+ # call to main. As it is not safe to do any processing after calling
43
+ # main, this should not be an issue in practice.
44
+
45
+
46
+ def main(args: Optional[List[str]] = None) -> int:
47
+ if args is None:
48
+ args = sys.argv[1:]
49
+
50
+ # Suppress the pkg_resources deprecation warning
51
+ # Note - we use a module of .*pkg_resources to cover
52
+ # the normal case (pip._vendor.pkg_resources) and the
53
+ # devendored case (a bare pkg_resources)
54
+ warnings.filterwarnings(
55
+ action="ignore", category=DeprecationWarning, module=".*pkg_resources"
56
+ )
57
+
58
+ # Configure our deprecation warnings to be sent through loggers
59
+ deprecation.install_warning_logger()
60
+
61
+ autocomplete()
62
+
63
+ try:
64
+ cmd_name, cmd_args = parse_command(args)
65
+ except PipError as exc:
66
+ sys.stderr.write(f"ERROR: {exc}")
67
+ sys.stderr.write(os.linesep)
68
+ sys.exit(1)
69
+
70
+ # Needed for locale.getpreferredencoding(False) to work
71
+ # in pip._internal.utils.encoding.auto_decode
72
+ try:
73
+ locale.setlocale(locale.LC_ALL, "")
74
+ except locale.Error as e:
75
+ # setlocale can apparently crash if locale are uninitialized
76
+ logger.debug("Ignoring error %s when setting locale", e)
77
+ command = create_command(cmd_name, isolated=("--isolated" in cmd_args))
78
+
79
+ return command.main(cmd_args)
.venv/lib/python3.11/site-packages/pip/_internal/cli/main_parser.py ADDED
@@ -0,0 +1,134 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """A single place for constructing and exposing the main parser
2
+ """
3
+
4
+ import os
5
+ import subprocess
6
+ import sys
7
+ from typing import List, Optional, Tuple
8
+
9
+ from pip._internal.build_env import get_runnable_pip
10
+ from pip._internal.cli import cmdoptions
11
+ from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
12
+ from pip._internal.commands import commands_dict, get_similar_commands
13
+ from pip._internal.exceptions import CommandError
14
+ from pip._internal.utils.misc import get_pip_version, get_prog
15
+
16
+ __all__ = ["create_main_parser", "parse_command"]
17
+
18
+
19
+ def create_main_parser() -> ConfigOptionParser:
20
+ """Creates and returns the main parser for pip's CLI"""
21
+
22
+ parser = ConfigOptionParser(
23
+ usage="\n%prog <command> [options]",
24
+ add_help_option=False,
25
+ formatter=UpdatingDefaultsHelpFormatter(),
26
+ name="global",
27
+ prog=get_prog(),
28
+ )
29
+ parser.disable_interspersed_args()
30
+
31
+ parser.version = get_pip_version()
32
+
33
+ # add the general options
34
+ gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
35
+ parser.add_option_group(gen_opts)
36
+
37
+ # so the help formatter knows
38
+ parser.main = True # type: ignore
39
+
40
+ # create command listing for description
41
+ description = [""] + [
42
+ f"{name:27} {command_info.summary}"
43
+ for name, command_info in commands_dict.items()
44
+ ]
45
+ parser.description = "\n".join(description)
46
+
47
+ return parser
48
+
49
+
50
+ def identify_python_interpreter(python: str) -> Optional[str]:
51
+ # If the named file exists, use it.
52
+ # If it's a directory, assume it's a virtual environment and
53
+ # look for the environment's Python executable.
54
+ if os.path.exists(python):
55
+ if os.path.isdir(python):
56
+ # bin/python for Unix, Scripts/python.exe for Windows
57
+ # Try both in case of odd cases like cygwin.
58
+ for exe in ("bin/python", "Scripts/python.exe"):
59
+ py = os.path.join(python, exe)
60
+ if os.path.exists(py):
61
+ return py
62
+ else:
63
+ return python
64
+
65
+ # Could not find the interpreter specified
66
+ return None
67
+
68
+
69
+ def parse_command(args: List[str]) -> Tuple[str, List[str]]:
70
+ parser = create_main_parser()
71
+
72
+ # Note: parser calls disable_interspersed_args(), so the result of this
73
+ # call is to split the initial args into the general options before the
74
+ # subcommand and everything else.
75
+ # For example:
76
+ # args: ['--timeout=5', 'install', '--user', 'INITools']
77
+ # general_options: ['--timeout==5']
78
+ # args_else: ['install', '--user', 'INITools']
79
+ general_options, args_else = parser.parse_args(args)
80
+
81
+ # --python
82
+ if general_options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
83
+ # Re-invoke pip using the specified Python interpreter
84
+ interpreter = identify_python_interpreter(general_options.python)
85
+ if interpreter is None:
86
+ raise CommandError(
87
+ f"Could not locate Python interpreter {general_options.python}"
88
+ )
89
+
90
+ pip_cmd = [
91
+ interpreter,
92
+ get_runnable_pip(),
93
+ ]
94
+ pip_cmd.extend(args)
95
+
96
+ # Set a flag so the child doesn't re-invoke itself, causing
97
+ # an infinite loop.
98
+ os.environ["_PIP_RUNNING_IN_SUBPROCESS"] = "1"
99
+ returncode = 0
100
+ try:
101
+ proc = subprocess.run(pip_cmd)
102
+ returncode = proc.returncode
103
+ except (subprocess.SubprocessError, OSError) as exc:
104
+ raise CommandError(f"Failed to run pip under {interpreter}: {exc}")
105
+ sys.exit(returncode)
106
+
107
+ # --version
108
+ if general_options.version:
109
+ sys.stdout.write(parser.version)
110
+ sys.stdout.write(os.linesep)
111
+ sys.exit()
112
+
113
+ # pip || pip help -> print_help()
114
+ if not args_else or (args_else[0] == "help" and len(args_else) == 1):
115
+ parser.print_help()
116
+ sys.exit()
117
+
118
+ # the subcommand name
119
+ cmd_name = args_else[0]
120
+
121
+ if cmd_name not in commands_dict:
122
+ guess = get_similar_commands(cmd_name)
123
+
124
+ msg = [f'unknown command "{cmd_name}"']
125
+ if guess:
126
+ msg.append(f'maybe you meant "{guess}"')
127
+
128
+ raise CommandError(" - ".join(msg))
129
+
130
+ # all the args without the subcommand
131
+ cmd_args = args[:]
132
+ cmd_args.remove(cmd_name)
133
+
134
+ return cmd_name, cmd_args
.venv/lib/python3.11/site-packages/pip/_internal/cli/parser.py ADDED
@@ -0,0 +1,294 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Base option parser setup"""
2
+
3
+ import logging
4
+ import optparse
5
+ import shutil
6
+ import sys
7
+ import textwrap
8
+ from contextlib import suppress
9
+ from typing import Any, Dict, Generator, List, Tuple
10
+
11
+ from pip._internal.cli.status_codes import UNKNOWN_ERROR
12
+ from pip._internal.configuration import Configuration, ConfigurationError
13
+ from pip._internal.utils.misc import redact_auth_from_url, strtobool
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
19
+ """A prettier/less verbose help formatter for optparse."""
20
+
21
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
22
+ # help position must be aligned with __init__.parseopts.description
23
+ kwargs["max_help_position"] = 30
24
+ kwargs["indent_increment"] = 1
25
+ kwargs["width"] = shutil.get_terminal_size()[0] - 2
26
+ super().__init__(*args, **kwargs)
27
+
28
+ def format_option_strings(self, option: optparse.Option) -> str:
29
+ return self._format_option_strings(option)
30
+
31
+ def _format_option_strings(
32
+ self, option: optparse.Option, mvarfmt: str = " <{}>", optsep: str = ", "
33
+ ) -> str:
34
+ """
35
+ Return a comma-separated list of option strings and metavars.
36
+
37
+ :param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
38
+ :param mvarfmt: metavar format string
39
+ :param optsep: separator
40
+ """
41
+ opts = []
42
+
43
+ if option._short_opts:
44
+ opts.append(option._short_opts[0])
45
+ if option._long_opts:
46
+ opts.append(option._long_opts[0])
47
+ if len(opts) > 1:
48
+ opts.insert(1, optsep)
49
+
50
+ if option.takes_value():
51
+ assert option.dest is not None
52
+ metavar = option.metavar or option.dest.lower()
53
+ opts.append(mvarfmt.format(metavar.lower()))
54
+
55
+ return "".join(opts)
56
+
57
+ def format_heading(self, heading: str) -> str:
58
+ if heading == "Options":
59
+ return ""
60
+ return heading + ":\n"
61
+
62
+ def format_usage(self, usage: str) -> str:
63
+ """
64
+ Ensure there is only one newline between usage and the first heading
65
+ if there is no description.
66
+ """
67
+ msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " "))
68
+ return msg
69
+
70
+ def format_description(self, description: str) -> str:
71
+ # leave full control over description to us
72
+ if description:
73
+ if hasattr(self.parser, "main"):
74
+ label = "Commands"
75
+ else:
76
+ label = "Description"
77
+ # some doc strings have initial newlines, some don't
78
+ description = description.lstrip("\n")
79
+ # some doc strings have final newlines and spaces, some don't
80
+ description = description.rstrip()
81
+ # dedent, then reindent
82
+ description = self.indent_lines(textwrap.dedent(description), " ")
83
+ description = f"{label}:\n{description}\n"
84
+ return description
85
+ else:
86
+ return ""
87
+
88
+ def format_epilog(self, epilog: str) -> str:
89
+ # leave full control over epilog to us
90
+ if epilog:
91
+ return epilog
92
+ else:
93
+ return ""
94
+
95
+ def indent_lines(self, text: str, indent: str) -> str:
96
+ new_lines = [indent + line for line in text.split("\n")]
97
+ return "\n".join(new_lines)
98
+
99
+
100
+ class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
101
+ """Custom help formatter for use in ConfigOptionParser.
102
+
103
+ This is updates the defaults before expanding them, allowing
104
+ them to show up correctly in the help listing.
105
+
106
+ Also redact auth from url type options
107
+ """
108
+
109
+ def expand_default(self, option: optparse.Option) -> str:
110
+ default_values = None
111
+ if self.parser is not None:
112
+ assert isinstance(self.parser, ConfigOptionParser)
113
+ self.parser._update_defaults(self.parser.defaults)
114
+ assert option.dest is not None
115
+ default_values = self.parser.defaults.get(option.dest)
116
+ help_text = super().expand_default(option)
117
+
118
+ if default_values and option.metavar == "URL":
119
+ if isinstance(default_values, str):
120
+ default_values = [default_values]
121
+
122
+ # If its not a list, we should abort and just return the help text
123
+ if not isinstance(default_values, list):
124
+ default_values = []
125
+
126
+ for val in default_values:
127
+ help_text = help_text.replace(val, redact_auth_from_url(val))
128
+
129
+ return help_text
130
+
131
+
132
+ class CustomOptionParser(optparse.OptionParser):
133
+ def insert_option_group(
134
+ self, idx: int, *args: Any, **kwargs: Any
135
+ ) -> optparse.OptionGroup:
136
+ """Insert an OptionGroup at a given position."""
137
+ group = self.add_option_group(*args, **kwargs)
138
+
139
+ self.option_groups.pop()
140
+ self.option_groups.insert(idx, group)
141
+
142
+ return group
143
+
144
+ @property
145
+ def option_list_all(self) -> List[optparse.Option]:
146
+ """Get a list of all options, including those in option groups."""
147
+ res = self.option_list[:]
148
+ for i in self.option_groups:
149
+ res.extend(i.option_list)
150
+
151
+ return res
152
+
153
+
154
+ class ConfigOptionParser(CustomOptionParser):
155
+ """Custom option parser which updates its defaults by checking the
156
+ configuration files and environmental variables"""
157
+
158
+ def __init__(
159
+ self,
160
+ *args: Any,
161
+ name: str,
162
+ isolated: bool = False,
163
+ **kwargs: Any,
164
+ ) -> None:
165
+ self.name = name
166
+ self.config = Configuration(isolated)
167
+
168
+ assert self.name
169
+ super().__init__(*args, **kwargs)
170
+
171
+ def check_default(self, option: optparse.Option, key: str, val: Any) -> Any:
172
+ try:
173
+ return option.check_value(key, val)
174
+ except optparse.OptionValueError as exc:
175
+ print(f"An error occurred during configuration: {exc}")
176
+ sys.exit(3)
177
+
178
+ def _get_ordered_configuration_items(
179
+ self,
180
+ ) -> Generator[Tuple[str, Any], None, None]:
181
+ # Configuration gives keys in an unordered manner. Order them.
182
+ override_order = ["global", self.name, ":env:"]
183
+
184
+ # Pool the options into different groups
185
+ section_items: Dict[str, List[Tuple[str, Any]]] = {
186
+ name: [] for name in override_order
187
+ }
188
+ for section_key, val in self.config.items():
189
+ # ignore empty values
190
+ if not val:
191
+ logger.debug(
192
+ "Ignoring configuration key '%s' as it's value is empty.",
193
+ section_key,
194
+ )
195
+ continue
196
+
197
+ section, key = section_key.split(".", 1)
198
+ if section in override_order:
199
+ section_items[section].append((key, val))
200
+
201
+ # Yield each group in their override order
202
+ for section in override_order:
203
+ for key, val in section_items[section]:
204
+ yield key, val
205
+
206
+ def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]:
207
+ """Updates the given defaults with values from the config files and
208
+ the environ. Does a little special handling for certain types of
209
+ options (lists)."""
210
+
211
+ # Accumulate complex default state.
212
+ self.values = optparse.Values(self.defaults)
213
+ late_eval = set()
214
+ # Then set the options with those values
215
+ for key, val in self._get_ordered_configuration_items():
216
+ # '--' because configuration supports only long names
217
+ option = self.get_option("--" + key)
218
+
219
+ # Ignore options not present in this parser. E.g. non-globals put
220
+ # in [global] by users that want them to apply to all applicable
221
+ # commands.
222
+ if option is None:
223
+ continue
224
+
225
+ assert option.dest is not None
226
+
227
+ if option.action in ("store_true", "store_false"):
228
+ try:
229
+ val = strtobool(val)
230
+ except ValueError:
231
+ self.error(
232
+ f"{val} is not a valid value for {key} option, "
233
+ "please specify a boolean value like yes/no, "
234
+ "true/false or 1/0 instead."
235
+ )
236
+ elif option.action == "count":
237
+ with suppress(ValueError):
238
+ val = strtobool(val)
239
+ with suppress(ValueError):
240
+ val = int(val)
241
+ if not isinstance(val, int) or val < 0:
242
+ self.error(
243
+ f"{val} is not a valid value for {key} option, "
244
+ "please instead specify either a non-negative integer "
245
+ "or a boolean value like yes/no or false/true "
246
+ "which is equivalent to 1/0."
247
+ )
248
+ elif option.action == "append":
249
+ val = val.split()
250
+ val = [self.check_default(option, key, v) for v in val]
251
+ elif option.action == "callback":
252
+ assert option.callback is not None
253
+ late_eval.add(option.dest)
254
+ opt_str = option.get_opt_string()
255
+ val = option.convert_value(opt_str, val)
256
+ # From take_action
257
+ args = option.callback_args or ()
258
+ kwargs = option.callback_kwargs or {}
259
+ option.callback(option, opt_str, val, self, *args, **kwargs)
260
+ else:
261
+ val = self.check_default(option, key, val)
262
+
263
+ defaults[option.dest] = val
264
+
265
+ for key in late_eval:
266
+ defaults[key] = getattr(self.values, key)
267
+ self.values = None
268
+ return defaults
269
+
270
+ def get_default_values(self) -> optparse.Values:
271
+ """Overriding to make updating the defaults after instantiation of
272
+ the option parser possible, _update_defaults() does the dirty work."""
273
+ if not self.process_default_values:
274
+ # Old, pre-Optik 1.5 behaviour.
275
+ return optparse.Values(self.defaults)
276
+
277
+ # Load the configuration, or error out in case of an error
278
+ try:
279
+ self.config.load()
280
+ except ConfigurationError as err:
281
+ self.exit(UNKNOWN_ERROR, str(err))
282
+
283
+ defaults = self._update_defaults(self.defaults.copy()) # ours
284
+ for option in self._get_all_options():
285
+ assert option.dest is not None
286
+ default = defaults.get(option.dest)
287
+ if isinstance(default, str):
288
+ opt_str = option.get_opt_string()
289
+ defaults[option.dest] = option.check_value(opt_str, default)
290
+ return optparse.Values(defaults)
291
+
292
+ def error(self, msg: str) -> None:
293
+ self.print_usage(sys.stderr)
294
+ self.exit(UNKNOWN_ERROR, f"{msg}\n")
.venv/lib/python3.11/site-packages/pip/_internal/cli/progress_bars.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ from typing import Callable, Generator, Iterable, Iterator, Optional, Tuple
3
+
4
+ from pip._vendor.rich.progress import (
5
+ BarColumn,
6
+ DownloadColumn,
7
+ FileSizeColumn,
8
+ Progress,
9
+ ProgressColumn,
10
+ SpinnerColumn,
11
+ TextColumn,
12
+ TimeElapsedColumn,
13
+ TimeRemainingColumn,
14
+ TransferSpeedColumn,
15
+ )
16
+
17
+ from pip._internal.utils.logging import get_indentation
18
+
19
+ DownloadProgressRenderer = Callable[[Iterable[bytes]], Iterator[bytes]]
20
+
21
+
22
+ def _rich_progress_bar(
23
+ iterable: Iterable[bytes],
24
+ *,
25
+ bar_type: str,
26
+ size: int,
27
+ ) -> Generator[bytes, None, None]:
28
+ assert bar_type == "on", "This should only be used in the default mode."
29
+
30
+ if not size:
31
+ total = float("inf")
32
+ columns: Tuple[ProgressColumn, ...] = (
33
+ TextColumn("[progress.description]{task.description}"),
34
+ SpinnerColumn("line", speed=1.5),
35
+ FileSizeColumn(),
36
+ TransferSpeedColumn(),
37
+ TimeElapsedColumn(),
38
+ )
39
+ else:
40
+ total = size
41
+ columns = (
42
+ TextColumn("[progress.description]{task.description}"),
43
+ BarColumn(),
44
+ DownloadColumn(),
45
+ TransferSpeedColumn(),
46
+ TextColumn("eta"),
47
+ TimeRemainingColumn(),
48
+ )
49
+
50
+ progress = Progress(*columns, refresh_per_second=30)
51
+ task_id = progress.add_task(" " * (get_indentation() + 2), total=total)
52
+ with progress:
53
+ for chunk in iterable:
54
+ yield chunk
55
+ progress.update(task_id, advance=len(chunk))
56
+
57
+
58
+ def get_download_progress_renderer(
59
+ *, bar_type: str, size: Optional[int] = None
60
+ ) -> DownloadProgressRenderer:
61
+ """Get an object that can be used to render the download progress.
62
+
63
+ Returns a callable, that takes an iterable to "wrap".
64
+ """
65
+ if bar_type == "on":
66
+ return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size)
67
+ else:
68
+ return iter # no-op, when passed an iterator
.venv/lib/python3.11/site-packages/pip/_internal/cli/req_command.py ADDED
@@ -0,0 +1,505 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Contains the Command base classes that depend on PipSession.
2
+
3
+ The classes in this module are in a separate module so the commands not
4
+ needing download / PackageFinder capability don't unnecessarily import the
5
+ PackageFinder machinery and all its vendored dependencies, etc.
6
+ """
7
+
8
+ import logging
9
+ import os
10
+ import sys
11
+ from functools import partial
12
+ from optparse import Values
13
+ from typing import TYPE_CHECKING, Any, List, Optional, Tuple
14
+
15
+ from pip._internal.cache import WheelCache
16
+ from pip._internal.cli import cmdoptions
17
+ from pip._internal.cli.base_command import Command
18
+ from pip._internal.cli.command_context import CommandContextMixIn
19
+ from pip._internal.exceptions import CommandError, PreviousBuildDirError
20
+ from pip._internal.index.collector import LinkCollector
21
+ from pip._internal.index.package_finder import PackageFinder
22
+ from pip._internal.models.selection_prefs import SelectionPreferences
23
+ from pip._internal.models.target_python import TargetPython
24
+ from pip._internal.network.session import PipSession
25
+ from pip._internal.operations.build.build_tracker import BuildTracker
26
+ from pip._internal.operations.prepare import RequirementPreparer
27
+ from pip._internal.req.constructors import (
28
+ install_req_from_editable,
29
+ install_req_from_line,
30
+ install_req_from_parsed_requirement,
31
+ install_req_from_req_string,
32
+ )
33
+ from pip._internal.req.req_file import parse_requirements
34
+ from pip._internal.req.req_install import InstallRequirement
35
+ from pip._internal.resolution.base import BaseResolver
36
+ from pip._internal.self_outdated_check import pip_self_version_check
37
+ from pip._internal.utils.temp_dir import (
38
+ TempDirectory,
39
+ TempDirectoryTypeRegistry,
40
+ tempdir_kinds,
41
+ )
42
+ from pip._internal.utils.virtualenv import running_under_virtualenv
43
+
44
+ if TYPE_CHECKING:
45
+ from ssl import SSLContext
46
+
47
+ logger = logging.getLogger(__name__)
48
+
49
+
50
+ def _create_truststore_ssl_context() -> Optional["SSLContext"]:
51
+ if sys.version_info < (3, 10):
52
+ raise CommandError("The truststore feature is only available for Python 3.10+")
53
+
54
+ try:
55
+ import ssl
56
+ except ImportError:
57
+ logger.warning("Disabling truststore since ssl support is missing")
58
+ return None
59
+
60
+ try:
61
+ from pip._vendor import truststore
62
+ except ImportError as e:
63
+ raise CommandError(f"The truststore feature is unavailable: {e}")
64
+
65
+ return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
66
+
67
+
68
+ class SessionCommandMixin(CommandContextMixIn):
69
+
70
+ """
71
+ A class mixin for command classes needing _build_session().
72
+ """
73
+
74
+ def __init__(self) -> None:
75
+ super().__init__()
76
+ self._session: Optional[PipSession] = None
77
+
78
+ @classmethod
79
+ def _get_index_urls(cls, options: Values) -> Optional[List[str]]:
80
+ """Return a list of index urls from user-provided options."""
81
+ index_urls = []
82
+ if not getattr(options, "no_index", False):
83
+ url = getattr(options, "index_url", None)
84
+ if url:
85
+ index_urls.append(url)
86
+ urls = getattr(options, "extra_index_urls", None)
87
+ if urls:
88
+ index_urls.extend(urls)
89
+ # Return None rather than an empty list
90
+ return index_urls or None
91
+
92
+ def get_default_session(self, options: Values) -> PipSession:
93
+ """Get a default-managed session."""
94
+ if self._session is None:
95
+ self._session = self.enter_context(self._build_session(options))
96
+ # there's no type annotation on requests.Session, so it's
97
+ # automatically ContextManager[Any] and self._session becomes Any,
98
+ # then https://github.com/python/mypy/issues/7696 kicks in
99
+ assert self._session is not None
100
+ return self._session
101
+
102
+ def _build_session(
103
+ self,
104
+ options: Values,
105
+ retries: Optional[int] = None,
106
+ timeout: Optional[int] = None,
107
+ fallback_to_certifi: bool = False,
108
+ ) -> PipSession:
109
+ cache_dir = options.cache_dir
110
+ assert not cache_dir or os.path.isabs(cache_dir)
111
+
112
+ if "truststore" in options.features_enabled:
113
+ try:
114
+ ssl_context = _create_truststore_ssl_context()
115
+ except Exception:
116
+ if not fallback_to_certifi:
117
+ raise
118
+ ssl_context = None
119
+ else:
120
+ ssl_context = None
121
+
122
+ session = PipSession(
123
+ cache=os.path.join(cache_dir, "http-v2") if cache_dir else None,
124
+ retries=retries if retries is not None else options.retries,
125
+ trusted_hosts=options.trusted_hosts,
126
+ index_urls=self._get_index_urls(options),
127
+ ssl_context=ssl_context,
128
+ )
129
+
130
+ # Handle custom ca-bundles from the user
131
+ if options.cert:
132
+ session.verify = options.cert
133
+
134
+ # Handle SSL client certificate
135
+ if options.client_cert:
136
+ session.cert = options.client_cert
137
+
138
+ # Handle timeouts
139
+ if options.timeout or timeout:
140
+ session.timeout = timeout if timeout is not None else options.timeout
141
+
142
+ # Handle configured proxies
143
+ if options.proxy:
144
+ session.proxies = {
145
+ "http": options.proxy,
146
+ "https": options.proxy,
147
+ }
148
+
149
+ # Determine if we can prompt the user for authentication or not
150
+ session.auth.prompting = not options.no_input
151
+ session.auth.keyring_provider = options.keyring_provider
152
+
153
+ return session
154
+
155
+
156
+ class IndexGroupCommand(Command, SessionCommandMixin):
157
+
158
+ """
159
+ Abstract base class for commands with the index_group options.
160
+
161
+ This also corresponds to the commands that permit the pip version check.
162
+ """
163
+
164
+ def handle_pip_version_check(self, options: Values) -> None:
165
+ """
166
+ Do the pip version check if not disabled.
167
+
168
+ This overrides the default behavior of not doing the check.
169
+ """
170
+ # Make sure the index_group options are present.
171
+ assert hasattr(options, "no_index")
172
+
173
+ if options.disable_pip_version_check or options.no_index:
174
+ return
175
+
176
+ # Otherwise, check if we're using the latest version of pip available.
177
+ session = self._build_session(
178
+ options,
179
+ retries=0,
180
+ timeout=min(5, options.timeout),
181
+ # This is set to ensure the function does not fail when truststore is
182
+ # specified in use-feature but cannot be loaded. This usually raises a
183
+ # CommandError and shows a nice user-facing error, but this function is not
184
+ # called in that try-except block.
185
+ fallback_to_certifi=True,
186
+ )
187
+ with session:
188
+ pip_self_version_check(session, options)
189
+
190
+
191
+ KEEPABLE_TEMPDIR_TYPES = [
192
+ tempdir_kinds.BUILD_ENV,
193
+ tempdir_kinds.EPHEM_WHEEL_CACHE,
194
+ tempdir_kinds.REQ_BUILD,
195
+ ]
196
+
197
+
198
+ def warn_if_run_as_root() -> None:
199
+ """Output a warning for sudo users on Unix.
200
+
201
+ In a virtual environment, sudo pip still writes to virtualenv.
202
+ On Windows, users may run pip as Administrator without issues.
203
+ This warning only applies to Unix root users outside of virtualenv.
204
+ """
205
+ if running_under_virtualenv():
206
+ return
207
+ if not hasattr(os, "getuid"):
208
+ return
209
+ # On Windows, there are no "system managed" Python packages. Installing as
210
+ # Administrator via pip is the correct way of updating system environments.
211
+ #
212
+ # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform
213
+ # checks: https://mypy.readthedocs.io/en/stable/common_issues.html
214
+ if sys.platform == "win32" or sys.platform == "cygwin":
215
+ return
216
+
217
+ if os.getuid() != 0:
218
+ return
219
+
220
+ logger.warning(
221
+ "Running pip as the 'root' user can result in broken permissions and "
222
+ "conflicting behaviour with the system package manager. "
223
+ "It is recommended to use a virtual environment instead: "
224
+ "https://pip.pypa.io/warnings/venv"
225
+ )
226
+
227
+
228
+ def with_cleanup(func: Any) -> Any:
229
+ """Decorator for common logic related to managing temporary
230
+ directories.
231
+ """
232
+
233
+ def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None:
234
+ for t in KEEPABLE_TEMPDIR_TYPES:
235
+ registry.set_delete(t, False)
236
+
237
+ def wrapper(
238
+ self: RequirementCommand, options: Values, args: List[Any]
239
+ ) -> Optional[int]:
240
+ assert self.tempdir_registry is not None
241
+ if options.no_clean:
242
+ configure_tempdir_registry(self.tempdir_registry)
243
+
244
+ try:
245
+ return func(self, options, args)
246
+ except PreviousBuildDirError:
247
+ # This kind of conflict can occur when the user passes an explicit
248
+ # build directory with a pre-existing folder. In that case we do
249
+ # not want to accidentally remove it.
250
+ configure_tempdir_registry(self.tempdir_registry)
251
+ raise
252
+
253
+ return wrapper
254
+
255
+
256
+ class RequirementCommand(IndexGroupCommand):
257
+ def __init__(self, *args: Any, **kw: Any) -> None:
258
+ super().__init__(*args, **kw)
259
+
260
+ self.cmd_opts.add_option(cmdoptions.no_clean())
261
+
262
+ @staticmethod
263
+ def determine_resolver_variant(options: Values) -> str:
264
+ """Determines which resolver should be used, based on the given options."""
265
+ if "legacy-resolver" in options.deprecated_features_enabled:
266
+ return "legacy"
267
+
268
+ return "resolvelib"
269
+
270
+ @classmethod
271
+ def make_requirement_preparer(
272
+ cls,
273
+ temp_build_dir: TempDirectory,
274
+ options: Values,
275
+ build_tracker: BuildTracker,
276
+ session: PipSession,
277
+ finder: PackageFinder,
278
+ use_user_site: bool,
279
+ download_dir: Optional[str] = None,
280
+ verbosity: int = 0,
281
+ ) -> RequirementPreparer:
282
+ """
283
+ Create a RequirementPreparer instance for the given parameters.
284
+ """
285
+ temp_build_dir_path = temp_build_dir.path
286
+ assert temp_build_dir_path is not None
287
+ legacy_resolver = False
288
+
289
+ resolver_variant = cls.determine_resolver_variant(options)
290
+ if resolver_variant == "resolvelib":
291
+ lazy_wheel = "fast-deps" in options.features_enabled
292
+ if lazy_wheel:
293
+ logger.warning(
294
+ "pip is using lazily downloaded wheels using HTTP "
295
+ "range requests to obtain dependency information. "
296
+ "This experimental feature is enabled through "
297
+ "--use-feature=fast-deps and it is not ready for "
298
+ "production."
299
+ )
300
+ else:
301
+ legacy_resolver = True
302
+ lazy_wheel = False
303
+ if "fast-deps" in options.features_enabled:
304
+ logger.warning(
305
+ "fast-deps has no effect when used with the legacy resolver."
306
+ )
307
+
308
+ return RequirementPreparer(
309
+ build_dir=temp_build_dir_path,
310
+ src_dir=options.src_dir,
311
+ download_dir=download_dir,
312
+ build_isolation=options.build_isolation,
313
+ check_build_deps=options.check_build_deps,
314
+ build_tracker=build_tracker,
315
+ session=session,
316
+ progress_bar=options.progress_bar,
317
+ finder=finder,
318
+ require_hashes=options.require_hashes,
319
+ use_user_site=use_user_site,
320
+ lazy_wheel=lazy_wheel,
321
+ verbosity=verbosity,
322
+ legacy_resolver=legacy_resolver,
323
+ )
324
+
325
+ @classmethod
326
+ def make_resolver(
327
+ cls,
328
+ preparer: RequirementPreparer,
329
+ finder: PackageFinder,
330
+ options: Values,
331
+ wheel_cache: Optional[WheelCache] = None,
332
+ use_user_site: bool = False,
333
+ ignore_installed: bool = True,
334
+ ignore_requires_python: bool = False,
335
+ force_reinstall: bool = False,
336
+ upgrade_strategy: str = "to-satisfy-only",
337
+ use_pep517: Optional[bool] = None,
338
+ py_version_info: Optional[Tuple[int, ...]] = None,
339
+ ) -> BaseResolver:
340
+ """
341
+ Create a Resolver instance for the given parameters.
342
+ """
343
+ make_install_req = partial(
344
+ install_req_from_req_string,
345
+ isolated=options.isolated_mode,
346
+ use_pep517=use_pep517,
347
+ )
348
+ resolver_variant = cls.determine_resolver_variant(options)
349
+ # The long import name and duplicated invocation is needed to convince
350
+ # Mypy into correctly typechecking. Otherwise it would complain the
351
+ # "Resolver" class being redefined.
352
+ if resolver_variant == "resolvelib":
353
+ import pip._internal.resolution.resolvelib.resolver
354
+
355
+ return pip._internal.resolution.resolvelib.resolver.Resolver(
356
+ preparer=preparer,
357
+ finder=finder,
358
+ wheel_cache=wheel_cache,
359
+ make_install_req=make_install_req,
360
+ use_user_site=use_user_site,
361
+ ignore_dependencies=options.ignore_dependencies,
362
+ ignore_installed=ignore_installed,
363
+ ignore_requires_python=ignore_requires_python,
364
+ force_reinstall=force_reinstall,
365
+ upgrade_strategy=upgrade_strategy,
366
+ py_version_info=py_version_info,
367
+ )
368
+ import pip._internal.resolution.legacy.resolver
369
+
370
+ return pip._internal.resolution.legacy.resolver.Resolver(
371
+ preparer=preparer,
372
+ finder=finder,
373
+ wheel_cache=wheel_cache,
374
+ make_install_req=make_install_req,
375
+ use_user_site=use_user_site,
376
+ ignore_dependencies=options.ignore_dependencies,
377
+ ignore_installed=ignore_installed,
378
+ ignore_requires_python=ignore_requires_python,
379
+ force_reinstall=force_reinstall,
380
+ upgrade_strategy=upgrade_strategy,
381
+ py_version_info=py_version_info,
382
+ )
383
+
384
+ def get_requirements(
385
+ self,
386
+ args: List[str],
387
+ options: Values,
388
+ finder: PackageFinder,
389
+ session: PipSession,
390
+ ) -> List[InstallRequirement]:
391
+ """
392
+ Parse command-line arguments into the corresponding requirements.
393
+ """
394
+ requirements: List[InstallRequirement] = []
395
+ for filename in options.constraints:
396
+ for parsed_req in parse_requirements(
397
+ filename,
398
+ constraint=True,
399
+ finder=finder,
400
+ options=options,
401
+ session=session,
402
+ ):
403
+ req_to_add = install_req_from_parsed_requirement(
404
+ parsed_req,
405
+ isolated=options.isolated_mode,
406
+ user_supplied=False,
407
+ )
408
+ requirements.append(req_to_add)
409
+
410
+ for req in args:
411
+ req_to_add = install_req_from_line(
412
+ req,
413
+ comes_from=None,
414
+ isolated=options.isolated_mode,
415
+ use_pep517=options.use_pep517,
416
+ user_supplied=True,
417
+ config_settings=getattr(options, "config_settings", None),
418
+ )
419
+ requirements.append(req_to_add)
420
+
421
+ for req in options.editables:
422
+ req_to_add = install_req_from_editable(
423
+ req,
424
+ user_supplied=True,
425
+ isolated=options.isolated_mode,
426
+ use_pep517=options.use_pep517,
427
+ config_settings=getattr(options, "config_settings", None),
428
+ )
429
+ requirements.append(req_to_add)
430
+
431
+ # NOTE: options.require_hashes may be set if --require-hashes is True
432
+ for filename in options.requirements:
433
+ for parsed_req in parse_requirements(
434
+ filename, finder=finder, options=options, session=session
435
+ ):
436
+ req_to_add = install_req_from_parsed_requirement(
437
+ parsed_req,
438
+ isolated=options.isolated_mode,
439
+ use_pep517=options.use_pep517,
440
+ user_supplied=True,
441
+ config_settings=parsed_req.options.get("config_settings")
442
+ if parsed_req.options
443
+ else None,
444
+ )
445
+ requirements.append(req_to_add)
446
+
447
+ # If any requirement has hash options, enable hash checking.
448
+ if any(req.has_hash_options for req in requirements):
449
+ options.require_hashes = True
450
+
451
+ if not (args or options.editables or options.requirements):
452
+ opts = {"name": self.name}
453
+ if options.find_links:
454
+ raise CommandError(
455
+ "You must give at least one requirement to {name} "
456
+ '(maybe you meant "pip {name} {links}"?)'.format(
457
+ **dict(opts, links=" ".join(options.find_links))
458
+ )
459
+ )
460
+ else:
461
+ raise CommandError(
462
+ "You must give at least one requirement to {name} "
463
+ '(see "pip help {name}")'.format(**opts)
464
+ )
465
+
466
+ return requirements
467
+
468
+ @staticmethod
469
+ def trace_basic_info(finder: PackageFinder) -> None:
470
+ """
471
+ Trace basic information about the provided objects.
472
+ """
473
+ # Display where finder is looking for packages
474
+ search_scope = finder.search_scope
475
+ locations = search_scope.get_formatted_locations()
476
+ if locations:
477
+ logger.info(locations)
478
+
479
+ def _build_package_finder(
480
+ self,
481
+ options: Values,
482
+ session: PipSession,
483
+ target_python: Optional[TargetPython] = None,
484
+ ignore_requires_python: Optional[bool] = None,
485
+ ) -> PackageFinder:
486
+ """
487
+ Create a package finder appropriate to this requirement command.
488
+
489
+ :param ignore_requires_python: Whether to ignore incompatible
490
+ "Requires-Python" values in links. Defaults to False.
491
+ """
492
+ link_collector = LinkCollector.create(session, options=options)
493
+ selection_prefs = SelectionPreferences(
494
+ allow_yanked=True,
495
+ format_control=options.format_control,
496
+ allow_all_prereleases=options.pre,
497
+ prefer_binary=options.prefer_binary,
498
+ ignore_requires_python=ignore_requires_python,
499
+ )
500
+
501
+ return PackageFinder.create(
502
+ link_collector=link_collector,
503
+ selection_prefs=selection_prefs,
504
+ target_python=target_python,
505
+ )
.venv/lib/python3.11/site-packages/pip/_internal/cli/spinners.py ADDED
@@ -0,0 +1,159 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import contextlib
2
+ import itertools
3
+ import logging
4
+ import sys
5
+ import time
6
+ from typing import IO, Generator, Optional
7
+
8
+ from pip._internal.utils.compat import WINDOWS
9
+ from pip._internal.utils.logging import get_indentation
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ class SpinnerInterface:
15
+ def spin(self) -> None:
16
+ raise NotImplementedError()
17
+
18
+ def finish(self, final_status: str) -> None:
19
+ raise NotImplementedError()
20
+
21
+
22
+ class InteractiveSpinner(SpinnerInterface):
23
+ def __init__(
24
+ self,
25
+ message: str,
26
+ file: Optional[IO[str]] = None,
27
+ spin_chars: str = "-\\|/",
28
+ # Empirically, 8 updates/second looks nice
29
+ min_update_interval_seconds: float = 0.125,
30
+ ):
31
+ self._message = message
32
+ if file is None:
33
+ file = sys.stdout
34
+ self._file = file
35
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
36
+ self._finished = False
37
+
38
+ self._spin_cycle = itertools.cycle(spin_chars)
39
+
40
+ self._file.write(" " * get_indentation() + self._message + " ... ")
41
+ self._width = 0
42
+
43
+ def _write(self, status: str) -> None:
44
+ assert not self._finished
45
+ # Erase what we wrote before by backspacing to the beginning, writing
46
+ # spaces to overwrite the old text, and then backspacing again
47
+ backup = "\b" * self._width
48
+ self._file.write(backup + " " * self._width + backup)
49
+ # Now we have a blank slate to add our status
50
+ self._file.write(status)
51
+ self._width = len(status)
52
+ self._file.flush()
53
+ self._rate_limiter.reset()
54
+
55
+ def spin(self) -> None:
56
+ if self._finished:
57
+ return
58
+ if not self._rate_limiter.ready():
59
+ return
60
+ self._write(next(self._spin_cycle))
61
+
62
+ def finish(self, final_status: str) -> None:
63
+ if self._finished:
64
+ return
65
+ self._write(final_status)
66
+ self._file.write("\n")
67
+ self._file.flush()
68
+ self._finished = True
69
+
70
+
71
+ # Used for dumb terminals, non-interactive installs (no tty), etc.
72
+ # We still print updates occasionally (once every 60 seconds by default) to
73
+ # act as a keep-alive for systems like Travis-CI that take lack-of-output as
74
+ # an indication that a task has frozen.
75
+ class NonInteractiveSpinner(SpinnerInterface):
76
+ def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None:
77
+ self._message = message
78
+ self._finished = False
79
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
80
+ self._update("started")
81
+
82
+ def _update(self, status: str) -> None:
83
+ assert not self._finished
84
+ self._rate_limiter.reset()
85
+ logger.info("%s: %s", self._message, status)
86
+
87
+ def spin(self) -> None:
88
+ if self._finished:
89
+ return
90
+ if not self._rate_limiter.ready():
91
+ return
92
+ self._update("still running...")
93
+
94
+ def finish(self, final_status: str) -> None:
95
+ if self._finished:
96
+ return
97
+ self._update(f"finished with status '{final_status}'")
98
+ self._finished = True
99
+
100
+
101
+ class RateLimiter:
102
+ def __init__(self, min_update_interval_seconds: float) -> None:
103
+ self._min_update_interval_seconds = min_update_interval_seconds
104
+ self._last_update: float = 0
105
+
106
+ def ready(self) -> bool:
107
+ now = time.time()
108
+ delta = now - self._last_update
109
+ return delta >= self._min_update_interval_seconds
110
+
111
+ def reset(self) -> None:
112
+ self._last_update = time.time()
113
+
114
+
115
+ @contextlib.contextmanager
116
+ def open_spinner(message: str) -> Generator[SpinnerInterface, None, None]:
117
+ # Interactive spinner goes directly to sys.stdout rather than being routed
118
+ # through the logging system, but it acts like it has level INFO,
119
+ # i.e. it's only displayed if we're at level INFO or better.
120
+ # Non-interactive spinner goes through the logging system, so it is always
121
+ # in sync with logging configuration.
122
+ if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
123
+ spinner: SpinnerInterface = InteractiveSpinner(message)
124
+ else:
125
+ spinner = NonInteractiveSpinner(message)
126
+ try:
127
+ with hidden_cursor(sys.stdout):
128
+ yield spinner
129
+ except KeyboardInterrupt:
130
+ spinner.finish("canceled")
131
+ raise
132
+ except Exception:
133
+ spinner.finish("error")
134
+ raise
135
+ else:
136
+ spinner.finish("done")
137
+
138
+
139
+ HIDE_CURSOR = "\x1b[?25l"
140
+ SHOW_CURSOR = "\x1b[?25h"
141
+
142
+
143
+ @contextlib.contextmanager
144
+ def hidden_cursor(file: IO[str]) -> Generator[None, None, None]:
145
+ # The Windows terminal does not support the hide/show cursor ANSI codes,
146
+ # even via colorama. So don't even try.
147
+ if WINDOWS:
148
+ yield
149
+ # We don't want to clutter the output with control characters if we're
150
+ # writing to a file, or if the user is running with --quiet.
151
+ # See https://github.com/pypa/pip/issues/3418
152
+ elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
153
+ yield
154
+ else:
155
+ file.write(HIDE_CURSOR)
156
+ try:
157
+ yield
158
+ finally:
159
+ file.write(SHOW_CURSOR)
.venv/lib/python3.11/site-packages/pip/_internal/cli/status_codes.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ SUCCESS = 0
2
+ ERROR = 1
3
+ UNKNOWN_ERROR = 2
4
+ VIRTUALENV_NOT_FOUND = 3
5
+ PREVIOUS_BUILD_DIR_ERROR = 4
6
+ NO_MATCHES_FOUND = 23
.venv/lib/python3.11/site-packages/pip/_internal/distributions/__init__.py ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pip._internal.distributions.base import AbstractDistribution
2
+ from pip._internal.distributions.sdist import SourceDistribution
3
+ from pip._internal.distributions.wheel import WheelDistribution
4
+ from pip._internal.req.req_install import InstallRequirement
5
+
6
+
7
+ def make_distribution_for_install_requirement(
8
+ install_req: InstallRequirement,
9
+ ) -> AbstractDistribution:
10
+ """Returns a Distribution for the given InstallRequirement"""
11
+ # Editable requirements will always be source distributions. They use the
12
+ # legacy logic until we create a modern standard for them.
13
+ if install_req.editable:
14
+ return SourceDistribution(install_req)
15
+
16
+ # If it's a wheel, it's a WheelDistribution
17
+ if install_req.is_wheel:
18
+ return WheelDistribution(install_req)
19
+
20
+ # Otherwise, a SourceDistribution
21
+ return SourceDistribution(install_req)
.venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (1.03 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/base.cpython-311.pyc ADDED
Binary file (3.13 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-311.pyc ADDED
Binary file (1.84 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-311.pyc ADDED
Binary file (9.37 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-311.pyc ADDED
Binary file (2.43 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/distributions/base.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import abc
2
+ from typing import Optional
3
+
4
+ from pip._internal.index.package_finder import PackageFinder
5
+ from pip._internal.metadata.base import BaseDistribution
6
+ from pip._internal.req import InstallRequirement
7
+
8
+
9
+ class AbstractDistribution(metaclass=abc.ABCMeta):
10
+ """A base class for handling installable artifacts.
11
+
12
+ The requirements for anything installable are as follows:
13
+
14
+ - we must be able to determine the requirement name
15
+ (or we can't correctly handle the non-upgrade case).
16
+
17
+ - for packages with setup requirements, we must also be able
18
+ to determine their requirements without installing additional
19
+ packages (for the same reason as run-time dependencies)
20
+
21
+ - we must be able to create a Distribution object exposing the
22
+ above metadata.
23
+
24
+ - if we need to do work in the build tracker, we must be able to generate a unique
25
+ string to identify the requirement in the build tracker.
26
+ """
27
+
28
+ def __init__(self, req: InstallRequirement) -> None:
29
+ super().__init__()
30
+ self.req = req
31
+
32
+ @abc.abstractproperty
33
+ def build_tracker_id(self) -> Optional[str]:
34
+ """A string that uniquely identifies this requirement to the build tracker.
35
+
36
+ If None, then this dist has no work to do in the build tracker, and
37
+ ``.prepare_distribution_metadata()`` will not be called."""
38
+ raise NotImplementedError()
39
+
40
+ @abc.abstractmethod
41
+ def get_metadata_distribution(self) -> BaseDistribution:
42
+ raise NotImplementedError()
43
+
44
+ @abc.abstractmethod
45
+ def prepare_distribution_metadata(
46
+ self,
47
+ finder: PackageFinder,
48
+ build_isolation: bool,
49
+ check_build_deps: bool,
50
+ ) -> None:
51
+ raise NotImplementedError()
.venv/lib/python3.11/site-packages/pip/_internal/distributions/installed.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional
2
+
3
+ from pip._internal.distributions.base import AbstractDistribution
4
+ from pip._internal.index.package_finder import PackageFinder
5
+ from pip._internal.metadata import BaseDistribution
6
+
7
+
8
+ class InstalledDistribution(AbstractDistribution):
9
+ """Represents an installed package.
10
+
11
+ This does not need any preparation as the required information has already
12
+ been computed.
13
+ """
14
+
15
+ @property
16
+ def build_tracker_id(self) -> Optional[str]:
17
+ return None
18
+
19
+ def get_metadata_distribution(self) -> BaseDistribution:
20
+ assert self.req.satisfied_by is not None, "not actually installed"
21
+ return self.req.satisfied_by
22
+
23
+ def prepare_distribution_metadata(
24
+ self,
25
+ finder: PackageFinder,
26
+ build_isolation: bool,
27
+ check_build_deps: bool,
28
+ ) -> None:
29
+ pass
.venv/lib/python3.11/site-packages/pip/_internal/distributions/sdist.py ADDED
@@ -0,0 +1,156 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from typing import Iterable, Optional, Set, Tuple
3
+
4
+ from pip._internal.build_env import BuildEnvironment
5
+ from pip._internal.distributions.base import AbstractDistribution
6
+ from pip._internal.exceptions import InstallationError
7
+ from pip._internal.index.package_finder import PackageFinder
8
+ from pip._internal.metadata import BaseDistribution
9
+ from pip._internal.utils.subprocess import runner_with_spinner_message
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ class SourceDistribution(AbstractDistribution):
15
+ """Represents a source distribution.
16
+
17
+ The preparation step for these needs metadata for the packages to be
18
+ generated, either using PEP 517 or using the legacy `setup.py egg_info`.
19
+ """
20
+
21
+ @property
22
+ def build_tracker_id(self) -> Optional[str]:
23
+ """Identify this requirement uniquely by its link."""
24
+ assert self.req.link
25
+ return self.req.link.url_without_fragment
26
+
27
+ def get_metadata_distribution(self) -> BaseDistribution:
28
+ return self.req.get_dist()
29
+
30
+ def prepare_distribution_metadata(
31
+ self,
32
+ finder: PackageFinder,
33
+ build_isolation: bool,
34
+ check_build_deps: bool,
35
+ ) -> None:
36
+ # Load pyproject.toml, to determine whether PEP 517 is to be used
37
+ self.req.load_pyproject_toml()
38
+
39
+ # Set up the build isolation, if this requirement should be isolated
40
+ should_isolate = self.req.use_pep517 and build_isolation
41
+ if should_isolate:
42
+ # Setup an isolated environment and install the build backend static
43
+ # requirements in it.
44
+ self._prepare_build_backend(finder)
45
+ # Check that if the requirement is editable, it either supports PEP 660 or
46
+ # has a setup.py or a setup.cfg. This cannot be done earlier because we need
47
+ # to setup the build backend to verify it supports build_editable, nor can
48
+ # it be done later, because we want to avoid installing build requirements
49
+ # needlessly. Doing it here also works around setuptools generating
50
+ # UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory
51
+ # without setup.py nor setup.cfg.
52
+ self.req.isolated_editable_sanity_check()
53
+ # Install the dynamic build requirements.
54
+ self._install_build_reqs(finder)
55
+ # Check if the current environment provides build dependencies
56
+ should_check_deps = self.req.use_pep517 and check_build_deps
57
+ if should_check_deps:
58
+ pyproject_requires = self.req.pyproject_requires
59
+ assert pyproject_requires is not None
60
+ conflicting, missing = self.req.build_env.check_requirements(
61
+ pyproject_requires
62
+ )
63
+ if conflicting:
64
+ self._raise_conflicts("the backend dependencies", conflicting)
65
+ if missing:
66
+ self._raise_missing_reqs(missing)
67
+ self.req.prepare_metadata()
68
+
69
+ def _prepare_build_backend(self, finder: PackageFinder) -> None:
70
+ # Isolate in a BuildEnvironment and install the build-time
71
+ # requirements.
72
+ pyproject_requires = self.req.pyproject_requires
73
+ assert pyproject_requires is not None
74
+
75
+ self.req.build_env = BuildEnvironment()
76
+ self.req.build_env.install_requirements(
77
+ finder, pyproject_requires, "overlay", kind="build dependencies"
78
+ )
79
+ conflicting, missing = self.req.build_env.check_requirements(
80
+ self.req.requirements_to_check
81
+ )
82
+ if conflicting:
83
+ self._raise_conflicts("PEP 517/518 supported requirements", conflicting)
84
+ if missing:
85
+ logger.warning(
86
+ "Missing build requirements in pyproject.toml for %s.",
87
+ self.req,
88
+ )
89
+ logger.warning(
90
+ "The project does not specify a build backend, and "
91
+ "pip cannot fall back to setuptools without %s.",
92
+ " and ".join(map(repr, sorted(missing))),
93
+ )
94
+
95
+ def _get_build_requires_wheel(self) -> Iterable[str]:
96
+ with self.req.build_env:
97
+ runner = runner_with_spinner_message("Getting requirements to build wheel")
98
+ backend = self.req.pep517_backend
99
+ assert backend is not None
100
+ with backend.subprocess_runner(runner):
101
+ return backend.get_requires_for_build_wheel()
102
+
103
+ def _get_build_requires_editable(self) -> Iterable[str]:
104
+ with self.req.build_env:
105
+ runner = runner_with_spinner_message(
106
+ "Getting requirements to build editable"
107
+ )
108
+ backend = self.req.pep517_backend
109
+ assert backend is not None
110
+ with backend.subprocess_runner(runner):
111
+ return backend.get_requires_for_build_editable()
112
+
113
+ def _install_build_reqs(self, finder: PackageFinder) -> None:
114
+ # Install any extra build dependencies that the backend requests.
115
+ # This must be done in a second pass, as the pyproject.toml
116
+ # dependencies must be installed before we can call the backend.
117
+ if (
118
+ self.req.editable
119
+ and self.req.permit_editable_wheels
120
+ and self.req.supports_pyproject_editable()
121
+ ):
122
+ build_reqs = self._get_build_requires_editable()
123
+ else:
124
+ build_reqs = self._get_build_requires_wheel()
125
+ conflicting, missing = self.req.build_env.check_requirements(build_reqs)
126
+ if conflicting:
127
+ self._raise_conflicts("the backend dependencies", conflicting)
128
+ self.req.build_env.install_requirements(
129
+ finder, missing, "normal", kind="backend dependencies"
130
+ )
131
+
132
+ def _raise_conflicts(
133
+ self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]]
134
+ ) -> None:
135
+ format_string = (
136
+ "Some build dependencies for {requirement} "
137
+ "conflict with {conflicting_with}: {description}."
138
+ )
139
+ error_message = format_string.format(
140
+ requirement=self.req,
141
+ conflicting_with=conflicting_with,
142
+ description=", ".join(
143
+ f"{installed} is incompatible with {wanted}"
144
+ for installed, wanted in sorted(conflicting_reqs)
145
+ ),
146
+ )
147
+ raise InstallationError(error_message)
148
+
149
+ def _raise_missing_reqs(self, missing: Set[str]) -> None:
150
+ format_string = (
151
+ "Some build dependencies for {requirement} are missing: {missing}."
152
+ )
153
+ error_message = format_string.format(
154
+ requirement=self.req, missing=", ".join(map(repr, sorted(missing)))
155
+ )
156
+ raise InstallationError(error_message)
.venv/lib/python3.11/site-packages/pip/_internal/distributions/wheel.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional
2
+
3
+ from pip._vendor.packaging.utils import canonicalize_name
4
+
5
+ from pip._internal.distributions.base import AbstractDistribution
6
+ from pip._internal.index.package_finder import PackageFinder
7
+ from pip._internal.metadata import (
8
+ BaseDistribution,
9
+ FilesystemWheel,
10
+ get_wheel_distribution,
11
+ )
12
+
13
+
14
+ class WheelDistribution(AbstractDistribution):
15
+ """Represents a wheel distribution.
16
+
17
+ This does not need any preparation as wheels can be directly unpacked.
18
+ """
19
+
20
+ @property
21
+ def build_tracker_id(self) -> Optional[str]:
22
+ return None
23
+
24
+ def get_metadata_distribution(self) -> BaseDistribution:
25
+ """Loads the metadata from the wheel file into memory and returns a
26
+ Distribution that uses it, not relying on the wheel file or
27
+ requirement.
28
+ """
29
+ assert self.req.local_file_path, "Set as part of preparation during download"
30
+ assert self.req.name, "Wheels are never unnamed"
31
+ wheel = FilesystemWheel(self.req.local_file_path)
32
+ return get_wheel_distribution(wheel, canonicalize_name(self.req.name))
33
+
34
+ def prepare_distribution_metadata(
35
+ self,
36
+ finder: PackageFinder,
37
+ build_isolation: bool,
38
+ check_build_deps: bool,
39
+ ) -> None:
40
+ pass
.venv/lib/python3.11/site-packages/pip/_internal/models/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ """A package that contains models that represent entities.
2
+ """
.venv/lib/python3.11/site-packages/pip/_internal/models/candidate.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pip._vendor.packaging.version import parse as parse_version
2
+
3
+ from pip._internal.models.link import Link
4
+ from pip._internal.utils.models import KeyBasedCompareMixin
5
+
6
+
7
+ class InstallationCandidate(KeyBasedCompareMixin):
8
+ """Represents a potential "candidate" for installation."""
9
+
10
+ __slots__ = ["name", "version", "link"]
11
+
12
+ def __init__(self, name: str, version: str, link: Link) -> None:
13
+ self.name = name
14
+ self.version = parse_version(version)
15
+ self.link = link
16
+
17
+ super().__init__(
18
+ key=(self.name, self.version, self.link),
19
+ defining_class=InstallationCandidate,
20
+ )
21
+
22
+ def __repr__(self) -> str:
23
+ return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
24
+ self.name,
25
+ self.version,
26
+ self.link,
27
+ )
28
+
29
+ def __str__(self) -> str:
30
+ return f"{self.name!r} candidate (version {self.version} at {self.link})"
.venv/lib/python3.11/site-packages/pip/_internal/models/direct_url.py ADDED
@@ -0,0 +1,235 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ PEP 610 """
2
+ import json
3
+ import re
4
+ import urllib.parse
5
+ from typing import Any, Dict, Iterable, Optional, Type, TypeVar, Union
6
+
7
+ __all__ = [
8
+ "DirectUrl",
9
+ "DirectUrlValidationError",
10
+ "DirInfo",
11
+ "ArchiveInfo",
12
+ "VcsInfo",
13
+ ]
14
+
15
+ T = TypeVar("T")
16
+
17
+ DIRECT_URL_METADATA_NAME = "direct_url.json"
18
+ ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$")
19
+
20
+
21
+ class DirectUrlValidationError(Exception):
22
+ pass
23
+
24
+
25
+ def _get(
26
+ d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
27
+ ) -> Optional[T]:
28
+ """Get value from dictionary and verify expected type."""
29
+ if key not in d:
30
+ return default
31
+ value = d[key]
32
+ if not isinstance(value, expected_type):
33
+ raise DirectUrlValidationError(
34
+ f"{value!r} has unexpected type for {key} (expected {expected_type})"
35
+ )
36
+ return value
37
+
38
+
39
+ def _get_required(
40
+ d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
41
+ ) -> T:
42
+ value = _get(d, expected_type, key, default)
43
+ if value is None:
44
+ raise DirectUrlValidationError(f"{key} must have a value")
45
+ return value
46
+
47
+
48
+ def _exactly_one_of(infos: Iterable[Optional["InfoType"]]) -> "InfoType":
49
+ infos = [info for info in infos if info is not None]
50
+ if not infos:
51
+ raise DirectUrlValidationError(
52
+ "missing one of archive_info, dir_info, vcs_info"
53
+ )
54
+ if len(infos) > 1:
55
+ raise DirectUrlValidationError(
56
+ "more than one of archive_info, dir_info, vcs_info"
57
+ )
58
+ assert infos[0] is not None
59
+ return infos[0]
60
+
61
+
62
+ def _filter_none(**kwargs: Any) -> Dict[str, Any]:
63
+ """Make dict excluding None values."""
64
+ return {k: v for k, v in kwargs.items() if v is not None}
65
+
66
+
67
+ class VcsInfo:
68
+ name = "vcs_info"
69
+
70
+ def __init__(
71
+ self,
72
+ vcs: str,
73
+ commit_id: str,
74
+ requested_revision: Optional[str] = None,
75
+ ) -> None:
76
+ self.vcs = vcs
77
+ self.requested_revision = requested_revision
78
+ self.commit_id = commit_id
79
+
80
+ @classmethod
81
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]:
82
+ if d is None:
83
+ return None
84
+ return cls(
85
+ vcs=_get_required(d, str, "vcs"),
86
+ commit_id=_get_required(d, str, "commit_id"),
87
+ requested_revision=_get(d, str, "requested_revision"),
88
+ )
89
+
90
+ def _to_dict(self) -> Dict[str, Any]:
91
+ return _filter_none(
92
+ vcs=self.vcs,
93
+ requested_revision=self.requested_revision,
94
+ commit_id=self.commit_id,
95
+ )
96
+
97
+
98
+ class ArchiveInfo:
99
+ name = "archive_info"
100
+
101
+ def __init__(
102
+ self,
103
+ hash: Optional[str] = None,
104
+ hashes: Optional[Dict[str, str]] = None,
105
+ ) -> None:
106
+ # set hashes before hash, since the hash setter will further populate hashes
107
+ self.hashes = hashes
108
+ self.hash = hash
109
+
110
+ @property
111
+ def hash(self) -> Optional[str]:
112
+ return self._hash
113
+
114
+ @hash.setter
115
+ def hash(self, value: Optional[str]) -> None:
116
+ if value is not None:
117
+ # Auto-populate the hashes key to upgrade to the new format automatically.
118
+ # We don't back-populate the legacy hash key from hashes.
119
+ try:
120
+ hash_name, hash_value = value.split("=", 1)
121
+ except ValueError:
122
+ raise DirectUrlValidationError(
123
+ f"invalid archive_info.hash format: {value!r}"
124
+ )
125
+ if self.hashes is None:
126
+ self.hashes = {hash_name: hash_value}
127
+ elif hash_name not in self.hashes:
128
+ self.hashes = self.hashes.copy()
129
+ self.hashes[hash_name] = hash_value
130
+ self._hash = value
131
+
132
+ @classmethod
133
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]:
134
+ if d is None:
135
+ return None
136
+ return cls(hash=_get(d, str, "hash"), hashes=_get(d, dict, "hashes"))
137
+
138
+ def _to_dict(self) -> Dict[str, Any]:
139
+ return _filter_none(hash=self.hash, hashes=self.hashes)
140
+
141
+
142
+ class DirInfo:
143
+ name = "dir_info"
144
+
145
+ def __init__(
146
+ self,
147
+ editable: bool = False,
148
+ ) -> None:
149
+ self.editable = editable
150
+
151
+ @classmethod
152
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]:
153
+ if d is None:
154
+ return None
155
+ return cls(editable=_get_required(d, bool, "editable", default=False))
156
+
157
+ def _to_dict(self) -> Dict[str, Any]:
158
+ return _filter_none(editable=self.editable or None)
159
+
160
+
161
+ InfoType = Union[ArchiveInfo, DirInfo, VcsInfo]
162
+
163
+
164
+ class DirectUrl:
165
+ def __init__(
166
+ self,
167
+ url: str,
168
+ info: InfoType,
169
+ subdirectory: Optional[str] = None,
170
+ ) -> None:
171
+ self.url = url
172
+ self.info = info
173
+ self.subdirectory = subdirectory
174
+
175
+ def _remove_auth_from_netloc(self, netloc: str) -> str:
176
+ if "@" not in netloc:
177
+ return netloc
178
+ user_pass, netloc_no_user_pass = netloc.split("@", 1)
179
+ if (
180
+ isinstance(self.info, VcsInfo)
181
+ and self.info.vcs == "git"
182
+ and user_pass == "git"
183
+ ):
184
+ return netloc
185
+ if ENV_VAR_RE.match(user_pass):
186
+ return netloc
187
+ return netloc_no_user_pass
188
+
189
+ @property
190
+ def redacted_url(self) -> str:
191
+ """url with user:password part removed unless it is formed with
192
+ environment variables as specified in PEP 610, or it is ``git``
193
+ in the case of a git URL.
194
+ """
195
+ purl = urllib.parse.urlsplit(self.url)
196
+ netloc = self._remove_auth_from_netloc(purl.netloc)
197
+ surl = urllib.parse.urlunsplit(
198
+ (purl.scheme, netloc, purl.path, purl.query, purl.fragment)
199
+ )
200
+ return surl
201
+
202
+ def validate(self) -> None:
203
+ self.from_dict(self.to_dict())
204
+
205
+ @classmethod
206
+ def from_dict(cls, d: Dict[str, Any]) -> "DirectUrl":
207
+ return DirectUrl(
208
+ url=_get_required(d, str, "url"),
209
+ subdirectory=_get(d, str, "subdirectory"),
210
+ info=_exactly_one_of(
211
+ [
212
+ ArchiveInfo._from_dict(_get(d, dict, "archive_info")),
213
+ DirInfo._from_dict(_get(d, dict, "dir_info")),
214
+ VcsInfo._from_dict(_get(d, dict, "vcs_info")),
215
+ ]
216
+ ),
217
+ )
218
+
219
+ def to_dict(self) -> Dict[str, Any]:
220
+ res = _filter_none(
221
+ url=self.redacted_url,
222
+ subdirectory=self.subdirectory,
223
+ )
224
+ res[self.info.name] = self.info._to_dict()
225
+ return res
226
+
227
+ @classmethod
228
+ def from_json(cls, s: str) -> "DirectUrl":
229
+ return cls.from_dict(json.loads(s))
230
+
231
+ def to_json(self) -> str:
232
+ return json.dumps(self.to_dict(), sort_keys=True)
233
+
234
+ def is_local_editable(self) -> bool:
235
+ return isinstance(self.info, DirInfo) and self.info.editable
.venv/lib/python3.11/site-packages/pip/_internal/models/format_control.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import FrozenSet, Optional, Set
2
+
3
+ from pip._vendor.packaging.utils import canonicalize_name
4
+
5
+ from pip._internal.exceptions import CommandError
6
+
7
+
8
+ class FormatControl:
9
+ """Helper for managing formats from which a package can be installed."""
10
+
11
+ __slots__ = ["no_binary", "only_binary"]
12
+
13
+ def __init__(
14
+ self,
15
+ no_binary: Optional[Set[str]] = None,
16
+ only_binary: Optional[Set[str]] = None,
17
+ ) -> None:
18
+ if no_binary is None:
19
+ no_binary = set()
20
+ if only_binary is None:
21
+ only_binary = set()
22
+
23
+ self.no_binary = no_binary
24
+ self.only_binary = only_binary
25
+
26
+ def __eq__(self, other: object) -> bool:
27
+ if not isinstance(other, self.__class__):
28
+ return NotImplemented
29
+
30
+ if self.__slots__ != other.__slots__:
31
+ return False
32
+
33
+ return all(getattr(self, k) == getattr(other, k) for k in self.__slots__)
34
+
35
+ def __repr__(self) -> str:
36
+ return f"{self.__class__.__name__}({self.no_binary}, {self.only_binary})"
37
+
38
+ @staticmethod
39
+ def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None:
40
+ if value.startswith("-"):
41
+ raise CommandError(
42
+ "--no-binary / --only-binary option requires 1 argument."
43
+ )
44
+ new = value.split(",")
45
+ while ":all:" in new:
46
+ other.clear()
47
+ target.clear()
48
+ target.add(":all:")
49
+ del new[: new.index(":all:") + 1]
50
+ # Without a none, we want to discard everything as :all: covers it
51
+ if ":none:" not in new:
52
+ return
53
+ for name in new:
54
+ if name == ":none:":
55
+ target.clear()
56
+ continue
57
+ name = canonicalize_name(name)
58
+ other.discard(name)
59
+ target.add(name)
60
+
61
+ def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]:
62
+ result = {"binary", "source"}
63
+ if canonical_name in self.only_binary:
64
+ result.discard("source")
65
+ elif canonical_name in self.no_binary:
66
+ result.discard("binary")
67
+ elif ":all:" in self.only_binary:
68
+ result.discard("source")
69
+ elif ":all:" in self.no_binary:
70
+ result.discard("binary")
71
+ return frozenset(result)
72
+
73
+ def disallow_binaries(self) -> None:
74
+ self.handle_mutual_excludes(
75
+ ":all:",
76
+ self.no_binary,
77
+ self.only_binary,
78
+ )
.venv/lib/python3.11/site-packages/pip/_internal/models/index.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import urllib.parse
2
+
3
+
4
+ class PackageIndex:
5
+ """Represents a Package Index and provides easier access to endpoints"""
6
+
7
+ __slots__ = ["url", "netloc", "simple_url", "pypi_url", "file_storage_domain"]
8
+
9
+ def __init__(self, url: str, file_storage_domain: str) -> None:
10
+ super().__init__()
11
+ self.url = url
12
+ self.netloc = urllib.parse.urlsplit(url).netloc
13
+ self.simple_url = self._url_for_path("simple")
14
+ self.pypi_url = self._url_for_path("pypi")
15
+
16
+ # This is part of a temporary hack used to block installs of PyPI
17
+ # packages which depend on external urls only necessary until PyPI can
18
+ # block such packages themselves
19
+ self.file_storage_domain = file_storage_domain
20
+
21
+ def _url_for_path(self, path: str) -> str:
22
+ return urllib.parse.urljoin(self.url, path)
23
+
24
+
25
+ PyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org")
26
+ TestPyPI = PackageIndex(
27
+ "https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org"
28
+ )
.venv/lib/python3.11/site-packages/pip/_internal/models/installation_report.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any, Dict, Sequence
2
+
3
+ from pip._vendor.packaging.markers import default_environment
4
+
5
+ from pip import __version__
6
+ from pip._internal.req.req_install import InstallRequirement
7
+
8
+
9
+ class InstallationReport:
10
+ def __init__(self, install_requirements: Sequence[InstallRequirement]):
11
+ self._install_requirements = install_requirements
12
+
13
+ @classmethod
14
+ def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]:
15
+ assert ireq.download_info, f"No download_info for {ireq}"
16
+ res = {
17
+ # PEP 610 json for the download URL. download_info.archive_info.hashes may
18
+ # be absent when the requirement was installed from the wheel cache
19
+ # and the cache entry was populated by an older pip version that did not
20
+ # record origin.json.
21
+ "download_info": ireq.download_info.to_dict(),
22
+ # is_direct is true if the requirement was a direct URL reference (which
23
+ # includes editable requirements), and false if the requirement was
24
+ # downloaded from a PEP 503 index or --find-links.
25
+ "is_direct": ireq.is_direct,
26
+ # is_yanked is true if the requirement was yanked from the index, but
27
+ # was still selected by pip to conform to PEP 592.
28
+ "is_yanked": ireq.link.is_yanked if ireq.link else False,
29
+ # requested is true if the requirement was specified by the user (aka
30
+ # top level requirement), and false if it was installed as a dependency of a
31
+ # requirement. https://peps.python.org/pep-0376/#requested
32
+ "requested": ireq.user_supplied,
33
+ # PEP 566 json encoding for metadata
34
+ # https://www.python.org/dev/peps/pep-0566/#json-compatible-metadata
35
+ "metadata": ireq.get_dist().metadata_dict,
36
+ }
37
+ if ireq.user_supplied and ireq.extras:
38
+ # For top level requirements, the list of requested extras, if any.
39
+ res["requested_extras"] = sorted(ireq.extras)
40
+ return res
41
+
42
+ def to_dict(self) -> Dict[str, Any]:
43
+ return {
44
+ "version": "1",
45
+ "pip_version": __version__,
46
+ "install": [
47
+ self._install_req_to_dict(ireq) for ireq in self._install_requirements
48
+ ],
49
+ # https://peps.python.org/pep-0508/#environment-markers
50
+ # TODO: currently, the resolver uses the default environment to evaluate
51
+ # environment markers, so that is what we report here. In the future, it
52
+ # should also take into account options such as --python-version or
53
+ # --platform, perhaps under the form of an environment_override field?
54
+ # https://github.com/pypa/pip/issues/11198
55
+ "environment": default_environment(),
56
+ }
.venv/lib/python3.11/site-packages/pip/_internal/models/link.py ADDED
@@ -0,0 +1,579 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import itertools
3
+ import logging
4
+ import os
5
+ import posixpath
6
+ import re
7
+ import urllib.parse
8
+ from dataclasses import dataclass
9
+ from typing import (
10
+ TYPE_CHECKING,
11
+ Any,
12
+ Dict,
13
+ List,
14
+ Mapping,
15
+ NamedTuple,
16
+ Optional,
17
+ Tuple,
18
+ Union,
19
+ )
20
+
21
+ from pip._internal.utils.deprecation import deprecated
22
+ from pip._internal.utils.filetypes import WHEEL_EXTENSION
23
+ from pip._internal.utils.hashes import Hashes
24
+ from pip._internal.utils.misc import (
25
+ pairwise,
26
+ redact_auth_from_url,
27
+ split_auth_from_netloc,
28
+ splitext,
29
+ )
30
+ from pip._internal.utils.models import KeyBasedCompareMixin
31
+ from pip._internal.utils.urls import path_to_url, url_to_path
32
+
33
+ if TYPE_CHECKING:
34
+ from pip._internal.index.collector import IndexContent
35
+
36
+ logger = logging.getLogger(__name__)
37
+
38
+
39
+ # Order matters, earlier hashes have a precedence over later hashes for what
40
+ # we will pick to use.
41
+ _SUPPORTED_HASHES = ("sha512", "sha384", "sha256", "sha224", "sha1", "md5")
42
+
43
+
44
+ @dataclass(frozen=True)
45
+ class LinkHash:
46
+ """Links to content may have embedded hash values. This class parses those.
47
+
48
+ `name` must be any member of `_SUPPORTED_HASHES`.
49
+
50
+ This class can be converted to and from `ArchiveInfo`. While ArchiveInfo intends to
51
+ be JSON-serializable to conform to PEP 610, this class contains the logic for
52
+ parsing a hash name and value for correctness, and then checking whether that hash
53
+ conforms to a schema with `.is_hash_allowed()`."""
54
+
55
+ name: str
56
+ value: str
57
+
58
+ _hash_url_fragment_re = re.compile(
59
+ # NB: we do not validate that the second group (.*) is a valid hex
60
+ # digest. Instead, we simply keep that string in this class, and then check it
61
+ # against Hashes when hash-checking is needed. This is easier to debug than
62
+ # proactively discarding an invalid hex digest, as we handle incorrect hashes
63
+ # and malformed hashes in the same place.
64
+ r"[#&]({choices})=([^&]*)".format(
65
+ choices="|".join(re.escape(hash_name) for hash_name in _SUPPORTED_HASHES)
66
+ ),
67
+ )
68
+
69
+ def __post_init__(self) -> None:
70
+ assert self.name in _SUPPORTED_HASHES
71
+
72
+ @classmethod
73
+ @functools.lru_cache(maxsize=None)
74
+ def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]:
75
+ """Search a string for a checksum algorithm name and encoded output value."""
76
+ match = cls._hash_url_fragment_re.search(url)
77
+ if match is None:
78
+ return None
79
+ name, value = match.groups()
80
+ return cls(name=name, value=value)
81
+
82
+ def as_dict(self) -> Dict[str, str]:
83
+ return {self.name: self.value}
84
+
85
+ def as_hashes(self) -> Hashes:
86
+ """Return a Hashes instance which checks only for the current hash."""
87
+ return Hashes({self.name: [self.value]})
88
+
89
+ def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
90
+ """
91
+ Return True if the current hash is allowed by `hashes`.
92
+ """
93
+ if hashes is None:
94
+ return False
95
+ return hashes.is_hash_allowed(self.name, hex_digest=self.value)
96
+
97
+
98
+ @dataclass(frozen=True)
99
+ class MetadataFile:
100
+ """Information about a core metadata file associated with a distribution."""
101
+
102
+ hashes: Optional[Dict[str, str]]
103
+
104
+ def __post_init__(self) -> None:
105
+ if self.hashes is not None:
106
+ assert all(name in _SUPPORTED_HASHES for name in self.hashes)
107
+
108
+
109
+ def supported_hashes(hashes: Optional[Dict[str, str]]) -> Optional[Dict[str, str]]:
110
+ # Remove any unsupported hash types from the mapping. If this leaves no
111
+ # supported hashes, return None
112
+ if hashes is None:
113
+ return None
114
+ hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES}
115
+ if not hashes:
116
+ return None
117
+ return hashes
118
+
119
+
120
+ def _clean_url_path_part(part: str) -> str:
121
+ """
122
+ Clean a "part" of a URL path (i.e. after splitting on "@" characters).
123
+ """
124
+ # We unquote prior to quoting to make sure nothing is double quoted.
125
+ return urllib.parse.quote(urllib.parse.unquote(part))
126
+
127
+
128
+ def _clean_file_url_path(part: str) -> str:
129
+ """
130
+ Clean the first part of a URL path that corresponds to a local
131
+ filesystem path (i.e. the first part after splitting on "@" characters).
132
+ """
133
+ # We unquote prior to quoting to make sure nothing is double quoted.
134
+ # Also, on Windows the path part might contain a drive letter which
135
+ # should not be quoted. On Linux where drive letters do not
136
+ # exist, the colon should be quoted. We rely on urllib.request
137
+ # to do the right thing here.
138
+ return urllib.request.pathname2url(urllib.request.url2pathname(part))
139
+
140
+
141
+ # percent-encoded: /
142
+ _reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE)
143
+
144
+
145
+ def _clean_url_path(path: str, is_local_path: bool) -> str:
146
+ """
147
+ Clean the path portion of a URL.
148
+ """
149
+ if is_local_path:
150
+ clean_func = _clean_file_url_path
151
+ else:
152
+ clean_func = _clean_url_path_part
153
+
154
+ # Split on the reserved characters prior to cleaning so that
155
+ # revision strings in VCS URLs are properly preserved.
156
+ parts = _reserved_chars_re.split(path)
157
+
158
+ cleaned_parts = []
159
+ for to_clean, reserved in pairwise(itertools.chain(parts, [""])):
160
+ cleaned_parts.append(clean_func(to_clean))
161
+ # Normalize %xx escapes (e.g. %2f -> %2F)
162
+ cleaned_parts.append(reserved.upper())
163
+
164
+ return "".join(cleaned_parts)
165
+
166
+
167
+ def _ensure_quoted_url(url: str) -> str:
168
+ """
169
+ Make sure a link is fully quoted.
170
+ For example, if ' ' occurs in the URL, it will be replaced with "%20",
171
+ and without double-quoting other characters.
172
+ """
173
+ # Split the URL into parts according to the general structure
174
+ # `scheme://netloc/path;parameters?query#fragment`.
175
+ result = urllib.parse.urlparse(url)
176
+ # If the netloc is empty, then the URL refers to a local filesystem path.
177
+ is_local_path = not result.netloc
178
+ path = _clean_url_path(result.path, is_local_path=is_local_path)
179
+ return urllib.parse.urlunparse(result._replace(path=path))
180
+
181
+
182
+ class Link(KeyBasedCompareMixin):
183
+ """Represents a parsed link from a Package Index's simple URL"""
184
+
185
+ __slots__ = [
186
+ "_parsed_url",
187
+ "_url",
188
+ "_hashes",
189
+ "comes_from",
190
+ "requires_python",
191
+ "yanked_reason",
192
+ "metadata_file_data",
193
+ "cache_link_parsing",
194
+ "egg_fragment",
195
+ ]
196
+
197
+ def __init__(
198
+ self,
199
+ url: str,
200
+ comes_from: Optional[Union[str, "IndexContent"]] = None,
201
+ requires_python: Optional[str] = None,
202
+ yanked_reason: Optional[str] = None,
203
+ metadata_file_data: Optional[MetadataFile] = None,
204
+ cache_link_parsing: bool = True,
205
+ hashes: Optional[Mapping[str, str]] = None,
206
+ ) -> None:
207
+ """
208
+ :param url: url of the resource pointed to (href of the link)
209
+ :param comes_from: instance of IndexContent where the link was found,
210
+ or string.
211
+ :param requires_python: String containing the `Requires-Python`
212
+ metadata field, specified in PEP 345. This may be specified by
213
+ a data-requires-python attribute in the HTML link tag, as
214
+ described in PEP 503.
215
+ :param yanked_reason: the reason the file has been yanked, if the
216
+ file has been yanked, or None if the file hasn't been yanked.
217
+ This is the value of the "data-yanked" attribute, if present, in
218
+ a simple repository HTML link. If the file has been yanked but
219
+ no reason was provided, this should be the empty string. See
220
+ PEP 592 for more information and the specification.
221
+ :param metadata_file_data: the metadata attached to the file, or None if
222
+ no such metadata is provided. This argument, if not None, indicates
223
+ that a separate metadata file exists, and also optionally supplies
224
+ hashes for that file.
225
+ :param cache_link_parsing: A flag that is used elsewhere to determine
226
+ whether resources retrieved from this link should be cached. PyPI
227
+ URLs should generally have this set to False, for example.
228
+ :param hashes: A mapping of hash names to digests to allow us to
229
+ determine the validity of a download.
230
+ """
231
+
232
+ # The comes_from, requires_python, and metadata_file_data arguments are
233
+ # only used by classmethods of this class, and are not used in client
234
+ # code directly.
235
+
236
+ # url can be a UNC windows share
237
+ if url.startswith("\\\\"):
238
+ url = path_to_url(url)
239
+
240
+ self._parsed_url = urllib.parse.urlsplit(url)
241
+ # Store the url as a private attribute to prevent accidentally
242
+ # trying to set a new value.
243
+ self._url = url
244
+
245
+ link_hash = LinkHash.find_hash_url_fragment(url)
246
+ hashes_from_link = {} if link_hash is None else link_hash.as_dict()
247
+ if hashes is None:
248
+ self._hashes = hashes_from_link
249
+ else:
250
+ self._hashes = {**hashes, **hashes_from_link}
251
+
252
+ self.comes_from = comes_from
253
+ self.requires_python = requires_python if requires_python else None
254
+ self.yanked_reason = yanked_reason
255
+ self.metadata_file_data = metadata_file_data
256
+
257
+ super().__init__(key=url, defining_class=Link)
258
+
259
+ self.cache_link_parsing = cache_link_parsing
260
+ self.egg_fragment = self._egg_fragment()
261
+
262
+ @classmethod
263
+ def from_json(
264
+ cls,
265
+ file_data: Dict[str, Any],
266
+ page_url: str,
267
+ ) -> Optional["Link"]:
268
+ """
269
+ Convert an pypi json document from a simple repository page into a Link.
270
+ """
271
+ file_url = file_data.get("url")
272
+ if file_url is None:
273
+ return None
274
+
275
+ url = _ensure_quoted_url(urllib.parse.urljoin(page_url, file_url))
276
+ pyrequire = file_data.get("requires-python")
277
+ yanked_reason = file_data.get("yanked")
278
+ hashes = file_data.get("hashes", {})
279
+
280
+ # PEP 714: Indexes must use the name core-metadata, but
281
+ # clients should support the old name as a fallback for compatibility.
282
+ metadata_info = file_data.get("core-metadata")
283
+ if metadata_info is None:
284
+ metadata_info = file_data.get("dist-info-metadata")
285
+
286
+ # The metadata info value may be a boolean, or a dict of hashes.
287
+ if isinstance(metadata_info, dict):
288
+ # The file exists, and hashes have been supplied
289
+ metadata_file_data = MetadataFile(supported_hashes(metadata_info))
290
+ elif metadata_info:
291
+ # The file exists, but there are no hashes
292
+ metadata_file_data = MetadataFile(None)
293
+ else:
294
+ # False or not present: the file does not exist
295
+ metadata_file_data = None
296
+
297
+ # The Link.yanked_reason expects an empty string instead of a boolean.
298
+ if yanked_reason and not isinstance(yanked_reason, str):
299
+ yanked_reason = ""
300
+ # The Link.yanked_reason expects None instead of False.
301
+ elif not yanked_reason:
302
+ yanked_reason = None
303
+
304
+ return cls(
305
+ url,
306
+ comes_from=page_url,
307
+ requires_python=pyrequire,
308
+ yanked_reason=yanked_reason,
309
+ hashes=hashes,
310
+ metadata_file_data=metadata_file_data,
311
+ )
312
+
313
+ @classmethod
314
+ def from_element(
315
+ cls,
316
+ anchor_attribs: Dict[str, Optional[str]],
317
+ page_url: str,
318
+ base_url: str,
319
+ ) -> Optional["Link"]:
320
+ """
321
+ Convert an anchor element's attributes in a simple repository page to a Link.
322
+ """
323
+ href = anchor_attribs.get("href")
324
+ if not href:
325
+ return None
326
+
327
+ url = _ensure_quoted_url(urllib.parse.urljoin(base_url, href))
328
+ pyrequire = anchor_attribs.get("data-requires-python")
329
+ yanked_reason = anchor_attribs.get("data-yanked")
330
+
331
+ # PEP 714: Indexes must use the name data-core-metadata, but
332
+ # clients should support the old name as a fallback for compatibility.
333
+ metadata_info = anchor_attribs.get("data-core-metadata")
334
+ if metadata_info is None:
335
+ metadata_info = anchor_attribs.get("data-dist-info-metadata")
336
+ # The metadata info value may be the string "true", or a string of
337
+ # the form "hashname=hashval"
338
+ if metadata_info == "true":
339
+ # The file exists, but there are no hashes
340
+ metadata_file_data = MetadataFile(None)
341
+ elif metadata_info is None:
342
+ # The file does not exist
343
+ metadata_file_data = None
344
+ else:
345
+ # The file exists, and hashes have been supplied
346
+ hashname, sep, hashval = metadata_info.partition("=")
347
+ if sep == "=":
348
+ metadata_file_data = MetadataFile(supported_hashes({hashname: hashval}))
349
+ else:
350
+ # Error - data is wrong. Treat as no hashes supplied.
351
+ logger.debug(
352
+ "Index returned invalid data-dist-info-metadata value: %s",
353
+ metadata_info,
354
+ )
355
+ metadata_file_data = MetadataFile(None)
356
+
357
+ return cls(
358
+ url,
359
+ comes_from=page_url,
360
+ requires_python=pyrequire,
361
+ yanked_reason=yanked_reason,
362
+ metadata_file_data=metadata_file_data,
363
+ )
364
+
365
+ def __str__(self) -> str:
366
+ if self.requires_python:
367
+ rp = f" (requires-python:{self.requires_python})"
368
+ else:
369
+ rp = ""
370
+ if self.comes_from:
371
+ return f"{redact_auth_from_url(self._url)} (from {self.comes_from}){rp}"
372
+ else:
373
+ return redact_auth_from_url(str(self._url))
374
+
375
+ def __repr__(self) -> str:
376
+ return f"<Link {self}>"
377
+
378
+ @property
379
+ def url(self) -> str:
380
+ return self._url
381
+
382
+ @property
383
+ def filename(self) -> str:
384
+ path = self.path.rstrip("/")
385
+ name = posixpath.basename(path)
386
+ if not name:
387
+ # Make sure we don't leak auth information if the netloc
388
+ # includes a username and password.
389
+ netloc, user_pass = split_auth_from_netloc(self.netloc)
390
+ return netloc
391
+
392
+ name = urllib.parse.unquote(name)
393
+ assert name, f"URL {self._url!r} produced no filename"
394
+ return name
395
+
396
+ @property
397
+ def file_path(self) -> str:
398
+ return url_to_path(self.url)
399
+
400
+ @property
401
+ def scheme(self) -> str:
402
+ return self._parsed_url.scheme
403
+
404
+ @property
405
+ def netloc(self) -> str:
406
+ """
407
+ This can contain auth information.
408
+ """
409
+ return self._parsed_url.netloc
410
+
411
+ @property
412
+ def path(self) -> str:
413
+ return urllib.parse.unquote(self._parsed_url.path)
414
+
415
+ def splitext(self) -> Tuple[str, str]:
416
+ return splitext(posixpath.basename(self.path.rstrip("/")))
417
+
418
+ @property
419
+ def ext(self) -> str:
420
+ return self.splitext()[1]
421
+
422
+ @property
423
+ def url_without_fragment(self) -> str:
424
+ scheme, netloc, path, query, fragment = self._parsed_url
425
+ return urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
426
+
427
+ _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)")
428
+
429
+ # Per PEP 508.
430
+ _project_name_re = re.compile(
431
+ r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
432
+ )
433
+
434
+ def _egg_fragment(self) -> Optional[str]:
435
+ match = self._egg_fragment_re.search(self._url)
436
+ if not match:
437
+ return None
438
+
439
+ # An egg fragment looks like a PEP 508 project name, along with
440
+ # an optional extras specifier. Anything else is invalid.
441
+ project_name = match.group(1)
442
+ if not self._project_name_re.match(project_name):
443
+ deprecated(
444
+ reason=f"{self} contains an egg fragment with a non-PEP 508 name",
445
+ replacement="to use the req @ url syntax, and remove the egg fragment",
446
+ gone_in="25.0",
447
+ issue=11617,
448
+ )
449
+
450
+ return project_name
451
+
452
+ _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)")
453
+
454
+ @property
455
+ def subdirectory_fragment(self) -> Optional[str]:
456
+ match = self._subdirectory_fragment_re.search(self._url)
457
+ if not match:
458
+ return None
459
+ return match.group(1)
460
+
461
+ def metadata_link(self) -> Optional["Link"]:
462
+ """Return a link to the associated core metadata file (if any)."""
463
+ if self.metadata_file_data is None:
464
+ return None
465
+ metadata_url = f"{self.url_without_fragment}.metadata"
466
+ if self.metadata_file_data.hashes is None:
467
+ return Link(metadata_url)
468
+ return Link(metadata_url, hashes=self.metadata_file_data.hashes)
469
+
470
+ def as_hashes(self) -> Hashes:
471
+ return Hashes({k: [v] for k, v in self._hashes.items()})
472
+
473
+ @property
474
+ def hash(self) -> Optional[str]:
475
+ return next(iter(self._hashes.values()), None)
476
+
477
+ @property
478
+ def hash_name(self) -> Optional[str]:
479
+ return next(iter(self._hashes), None)
480
+
481
+ @property
482
+ def show_url(self) -> str:
483
+ return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0])
484
+
485
+ @property
486
+ def is_file(self) -> bool:
487
+ return self.scheme == "file"
488
+
489
+ def is_existing_dir(self) -> bool:
490
+ return self.is_file and os.path.isdir(self.file_path)
491
+
492
+ @property
493
+ def is_wheel(self) -> bool:
494
+ return self.ext == WHEEL_EXTENSION
495
+
496
+ @property
497
+ def is_vcs(self) -> bool:
498
+ from pip._internal.vcs import vcs
499
+
500
+ return self.scheme in vcs.all_schemes
501
+
502
+ @property
503
+ def is_yanked(self) -> bool:
504
+ return self.yanked_reason is not None
505
+
506
+ @property
507
+ def has_hash(self) -> bool:
508
+ return bool(self._hashes)
509
+
510
+ def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
511
+ """
512
+ Return True if the link has a hash and it is allowed by `hashes`.
513
+ """
514
+ if hashes is None:
515
+ return False
516
+ return any(hashes.is_hash_allowed(k, v) for k, v in self._hashes.items())
517
+
518
+
519
+ class _CleanResult(NamedTuple):
520
+ """Convert link for equivalency check.
521
+
522
+ This is used in the resolver to check whether two URL-specified requirements
523
+ likely point to the same distribution and can be considered equivalent. This
524
+ equivalency logic avoids comparing URLs literally, which can be too strict
525
+ (e.g. "a=1&b=2" vs "b=2&a=1") and produce conflicts unexpecting to users.
526
+
527
+ Currently this does three things:
528
+
529
+ 1. Drop the basic auth part. This is technically wrong since a server can
530
+ serve different content based on auth, but if it does that, it is even
531
+ impossible to guarantee two URLs without auth are equivalent, since
532
+ the user can input different auth information when prompted. So the
533
+ practical solution is to assume the auth doesn't affect the response.
534
+ 2. Parse the query to avoid the ordering issue. Note that ordering under the
535
+ same key in the query are NOT cleaned; i.e. "a=1&a=2" and "a=2&a=1" are
536
+ still considered different.
537
+ 3. Explicitly drop most of the fragment part, except ``subdirectory=`` and
538
+ hash values, since it should have no impact the downloaded content. Note
539
+ that this drops the "egg=" part historically used to denote the requested
540
+ project (and extras), which is wrong in the strictest sense, but too many
541
+ people are supplying it inconsistently to cause superfluous resolution
542
+ conflicts, so we choose to also ignore them.
543
+ """
544
+
545
+ parsed: urllib.parse.SplitResult
546
+ query: Dict[str, List[str]]
547
+ subdirectory: str
548
+ hashes: Dict[str, str]
549
+
550
+
551
+ def _clean_link(link: Link) -> _CleanResult:
552
+ parsed = link._parsed_url
553
+ netloc = parsed.netloc.rsplit("@", 1)[-1]
554
+ # According to RFC 8089, an empty host in file: means localhost.
555
+ if parsed.scheme == "file" and not netloc:
556
+ netloc = "localhost"
557
+ fragment = urllib.parse.parse_qs(parsed.fragment)
558
+ if "egg" in fragment:
559
+ logger.debug("Ignoring egg= fragment in %s", link)
560
+ try:
561
+ # If there are multiple subdirectory values, use the first one.
562
+ # This matches the behavior of Link.subdirectory_fragment.
563
+ subdirectory = fragment["subdirectory"][0]
564
+ except (IndexError, KeyError):
565
+ subdirectory = ""
566
+ # If there are multiple hash values under the same algorithm, use the
567
+ # first one. This matches the behavior of Link.hash_value.
568
+ hashes = {k: fragment[k][0] for k in _SUPPORTED_HASHES if k in fragment}
569
+ return _CleanResult(
570
+ parsed=parsed._replace(netloc=netloc, query="", fragment=""),
571
+ query=urllib.parse.parse_qs(parsed.query),
572
+ subdirectory=subdirectory,
573
+ hashes=hashes,
574
+ )
575
+
576
+
577
+ @functools.lru_cache(maxsize=None)
578
+ def links_equivalent(link1: Link, link2: Link) -> bool:
579
+ return _clean_link(link1) == _clean_link(link2)