prasb commited on
Commit
dae6971
·
verified ·
1 Parent(s): 0337776

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/cli/autocompletion.py +172 -0
  3. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/cli/req_command.py +505 -0
  4. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__init__.py +132 -0
  5. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/debug.py +201 -0
  6. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/freeze.py +108 -0
  7. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/hash.py +59 -0
  8. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/index.py +139 -0
  9. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/inspect.py +92 -0
  10. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/locations/__init__.py +467 -0
  11. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/locations/_sysconfig.py +213 -0
  12. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/__init__.py +128 -0
  13. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-38.pyc +0 -0
  14. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/__pycache__/base.cpython-38.pyc +0 -0
  15. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/_json.py +84 -0
  16. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/base.py +702 -0
  17. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/importlib/__init__.py +6 -0
  18. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-38.pyc +0 -0
  19. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-38.pyc +0 -0
  20. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-38.pyc +0 -0
  21. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-38.pyc +0 -0
  22. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/importlib/_compat.py +55 -0
  23. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/importlib/_dists.py +227 -0
  24. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/importlib/_envs.py +189 -0
  25. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/pkg_resources.py +278 -0
  26. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-38.pyc +0 -0
  27. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-38.pyc +0 -0
  28. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-38.pyc +0 -0
  29. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/target_python.cpython-38.pyc +0 -0
  30. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-38.pyc +0 -0
  31. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/check.cpython-38.pyc +0 -0
  32. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-38.pyc +0 -0
  33. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-38.pyc +0 -0
  34. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/build/__init__.py +0 -0
  35. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-38.pyc +0 -0
  36. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/build/build_tracker.py +139 -0
  37. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/build/metadata.py +39 -0
  38. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/build/metadata_editable.py +41 -0
  39. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/build/metadata_legacy.py +74 -0
  40. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/build/wheel.py +37 -0
  41. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/build/wheel_editable.py +46 -0
  42. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/build/wheel_legacy.py +102 -0
  43. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/install/__init__.py +2 -0
  44. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-38.pyc +0 -0
  45. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-38.pyc +0 -0
  46. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-38.pyc +0 -0
  47. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/install/editable_legacy.py +46 -0
  48. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/install/wheel.py +734 -0
  49. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/resolution/__init__.py +0 -0
  50. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/resolution/base.py +20 -0
.gitattributes CHANGED
@@ -359,3 +359,4 @@ my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/opencv_pyth
359
  my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_vendor/distlib/w64.exe filter=lfs diff=lfs merge=lfs -text
360
  my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/opencv_python.libs/libQt5XcbQpa-5b2d853e.so.5.15.0 filter=lfs diff=lfs merge=lfs -text
361
  my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_vendor/distlib/t64-arm.exe filter=lfs diff=lfs merge=lfs -text
 
 
359
  my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_vendor/distlib/w64.exe filter=lfs diff=lfs merge=lfs -text
360
  my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/opencv_python.libs/libQt5XcbQpa-5b2d853e.so.5.15.0 filter=lfs diff=lfs merge=lfs -text
361
  my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_vendor/distlib/t64-arm.exe filter=lfs diff=lfs merge=lfs -text
362
+ my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_vendor/distlib/t64.exe filter=lfs diff=lfs merge=lfs -text
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/cli/autocompletion.py ADDED
@@ -0,0 +1,172 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Logic that powers autocompletion installed by ``pip completion``.
2
+ """
3
+
4
+ import optparse
5
+ import os
6
+ import sys
7
+ from itertools import chain
8
+ from typing import Any, Iterable, List, Optional
9
+
10
+ from pip._internal.cli.main_parser import create_main_parser
11
+ from pip._internal.commands import commands_dict, create_command
12
+ from pip._internal.metadata import get_default_environment
13
+
14
+
15
+ def autocomplete() -> None:
16
+ """Entry Point for completion of main and subcommand options."""
17
+ # Don't complete if user hasn't sourced bash_completion file.
18
+ if "PIP_AUTO_COMPLETE" not in os.environ:
19
+ return
20
+ cwords = os.environ["COMP_WORDS"].split()[1:]
21
+ cword = int(os.environ["COMP_CWORD"])
22
+ try:
23
+ current = cwords[cword - 1]
24
+ except IndexError:
25
+ current = ""
26
+
27
+ parser = create_main_parser()
28
+ subcommands = list(commands_dict)
29
+ options = []
30
+
31
+ # subcommand
32
+ subcommand_name: Optional[str] = None
33
+ for word in cwords:
34
+ if word in subcommands:
35
+ subcommand_name = word
36
+ break
37
+ # subcommand options
38
+ if subcommand_name is not None:
39
+ # special case: 'help' subcommand has no options
40
+ if subcommand_name == "help":
41
+ sys.exit(1)
42
+ # special case: list locally installed dists for show and uninstall
43
+ should_list_installed = not current.startswith("-") and subcommand_name in [
44
+ "show",
45
+ "uninstall",
46
+ ]
47
+ if should_list_installed:
48
+ env = get_default_environment()
49
+ lc = current.lower()
50
+ installed = [
51
+ dist.canonical_name
52
+ for dist in env.iter_installed_distributions(local_only=True)
53
+ if dist.canonical_name.startswith(lc)
54
+ and dist.canonical_name not in cwords[1:]
55
+ ]
56
+ # if there are no dists installed, fall back to option completion
57
+ if installed:
58
+ for dist in installed:
59
+ print(dist)
60
+ sys.exit(1)
61
+
62
+ should_list_installables = (
63
+ not current.startswith("-") and subcommand_name == "install"
64
+ )
65
+ if should_list_installables:
66
+ for path in auto_complete_paths(current, "path"):
67
+ print(path)
68
+ sys.exit(1)
69
+
70
+ subcommand = create_command(subcommand_name)
71
+
72
+ for opt in subcommand.parser.option_list_all:
73
+ if opt.help != optparse.SUPPRESS_HELP:
74
+ options += [
75
+ (opt_str, opt.nargs) for opt_str in opt._long_opts + opt._short_opts
76
+ ]
77
+
78
+ # filter out previously specified options from available options
79
+ prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
80
+ options = [(x, v) for (x, v) in options if x not in prev_opts]
81
+ # filter options by current input
82
+ options = [(k, v) for k, v in options if k.startswith(current)]
83
+ # get completion type given cwords and available subcommand options
84
+ completion_type = get_path_completion_type(
85
+ cwords,
86
+ cword,
87
+ subcommand.parser.option_list_all,
88
+ )
89
+ # get completion files and directories if ``completion_type`` is
90
+ # ``<file>``, ``<dir>`` or ``<path>``
91
+ if completion_type:
92
+ paths = auto_complete_paths(current, completion_type)
93
+ options = [(path, 0) for path in paths]
94
+ for option in options:
95
+ opt_label = option[0]
96
+ # append '=' to options which require args
97
+ if option[1] and option[0][:2] == "--":
98
+ opt_label += "="
99
+ print(opt_label)
100
+ else:
101
+ # show main parser options only when necessary
102
+
103
+ opts = [i.option_list for i in parser.option_groups]
104
+ opts.append(parser.option_list)
105
+ flattened_opts = chain.from_iterable(opts)
106
+ if current.startswith("-"):
107
+ for opt in flattened_opts:
108
+ if opt.help != optparse.SUPPRESS_HELP:
109
+ subcommands += opt._long_opts + opt._short_opts
110
+ else:
111
+ # get completion type given cwords and all available options
112
+ completion_type = get_path_completion_type(cwords, cword, flattened_opts)
113
+ if completion_type:
114
+ subcommands = list(auto_complete_paths(current, completion_type))
115
+
116
+ print(" ".join([x for x in subcommands if x.startswith(current)]))
117
+ sys.exit(1)
118
+
119
+
120
+ def get_path_completion_type(
121
+ cwords: List[str], cword: int, opts: Iterable[Any]
122
+ ) -> Optional[str]:
123
+ """Get the type of path completion (``file``, ``dir``, ``path`` or None)
124
+
125
+ :param cwords: same as the environmental variable ``COMP_WORDS``
126
+ :param cword: same as the environmental variable ``COMP_CWORD``
127
+ :param opts: The available options to check
128
+ :return: path completion type (``file``, ``dir``, ``path`` or None)
129
+ """
130
+ if cword < 2 or not cwords[cword - 2].startswith("-"):
131
+ return None
132
+ for opt in opts:
133
+ if opt.help == optparse.SUPPRESS_HELP:
134
+ continue
135
+ for o in str(opt).split("/"):
136
+ if cwords[cword - 2].split("=")[0] == o:
137
+ if not opt.metavar or any(
138
+ x in ("path", "file", "dir") for x in opt.metavar.split("/")
139
+ ):
140
+ return opt.metavar
141
+ return None
142
+
143
+
144
+ def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]:
145
+ """If ``completion_type`` is ``file`` or ``path``, list all regular files
146
+ and directories starting with ``current``; otherwise only list directories
147
+ starting with ``current``.
148
+
149
+ :param current: The word to be completed
150
+ :param completion_type: path completion type(``file``, ``path`` or ``dir``)
151
+ :return: A generator of regular files and/or directories
152
+ """
153
+ directory, filename = os.path.split(current)
154
+ current_path = os.path.abspath(directory)
155
+ # Don't complete paths if they can't be accessed
156
+ if not os.access(current_path, os.R_OK):
157
+ return
158
+ filename = os.path.normcase(filename)
159
+ # list all files that start with ``filename``
160
+ file_list = (
161
+ x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename)
162
+ )
163
+ for f in file_list:
164
+ opt = os.path.join(current_path, f)
165
+ comp_file = os.path.normcase(os.path.join(directory, f))
166
+ # complete regular files when there is not ``<dir>`` after option
167
+ # complete directories when there is ``<file>``, ``<path>`` or
168
+ # ``<dir>``after option
169
+ if completion_type != "dir" and os.path.isfile(opt):
170
+ yield comp_file
171
+ elif os.path.isdir(opt):
172
+ yield os.path.join(comp_file, "")
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/cli/req_command.py ADDED
@@ -0,0 +1,505 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Contains the Command base classes that depend on PipSession.
2
+
3
+ The classes in this module are in a separate module so the commands not
4
+ needing download / PackageFinder capability don't unnecessarily import the
5
+ PackageFinder machinery and all its vendored dependencies, etc.
6
+ """
7
+
8
+ import logging
9
+ import os
10
+ import sys
11
+ from functools import partial
12
+ from optparse import Values
13
+ from typing import TYPE_CHECKING, Any, List, Optional, Tuple
14
+
15
+ from pip._internal.cache import WheelCache
16
+ from pip._internal.cli import cmdoptions
17
+ from pip._internal.cli.base_command import Command
18
+ from pip._internal.cli.command_context import CommandContextMixIn
19
+ from pip._internal.exceptions import CommandError, PreviousBuildDirError
20
+ from pip._internal.index.collector import LinkCollector
21
+ from pip._internal.index.package_finder import PackageFinder
22
+ from pip._internal.models.selection_prefs import SelectionPreferences
23
+ from pip._internal.models.target_python import TargetPython
24
+ from pip._internal.network.session import PipSession
25
+ from pip._internal.operations.build.build_tracker import BuildTracker
26
+ from pip._internal.operations.prepare import RequirementPreparer
27
+ from pip._internal.req.constructors import (
28
+ install_req_from_editable,
29
+ install_req_from_line,
30
+ install_req_from_parsed_requirement,
31
+ install_req_from_req_string,
32
+ )
33
+ from pip._internal.req.req_file import parse_requirements
34
+ from pip._internal.req.req_install import InstallRequirement
35
+ from pip._internal.resolution.base import BaseResolver
36
+ from pip._internal.self_outdated_check import pip_self_version_check
37
+ from pip._internal.utils.temp_dir import (
38
+ TempDirectory,
39
+ TempDirectoryTypeRegistry,
40
+ tempdir_kinds,
41
+ )
42
+ from pip._internal.utils.virtualenv import running_under_virtualenv
43
+
44
+ if TYPE_CHECKING:
45
+ from ssl import SSLContext
46
+
47
+ logger = logging.getLogger(__name__)
48
+
49
+
50
+ def _create_truststore_ssl_context() -> Optional["SSLContext"]:
51
+ if sys.version_info < (3, 10):
52
+ raise CommandError("The truststore feature is only available for Python 3.10+")
53
+
54
+ try:
55
+ import ssl
56
+ except ImportError:
57
+ logger.warning("Disabling truststore since ssl support is missing")
58
+ return None
59
+
60
+ try:
61
+ from pip._vendor import truststore
62
+ except ImportError as e:
63
+ raise CommandError(f"The truststore feature is unavailable: {e}")
64
+
65
+ return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
66
+
67
+
68
+ class SessionCommandMixin(CommandContextMixIn):
69
+
70
+ """
71
+ A class mixin for command classes needing _build_session().
72
+ """
73
+
74
+ def __init__(self) -> None:
75
+ super().__init__()
76
+ self._session: Optional[PipSession] = None
77
+
78
+ @classmethod
79
+ def _get_index_urls(cls, options: Values) -> Optional[List[str]]:
80
+ """Return a list of index urls from user-provided options."""
81
+ index_urls = []
82
+ if not getattr(options, "no_index", False):
83
+ url = getattr(options, "index_url", None)
84
+ if url:
85
+ index_urls.append(url)
86
+ urls = getattr(options, "extra_index_urls", None)
87
+ if urls:
88
+ index_urls.extend(urls)
89
+ # Return None rather than an empty list
90
+ return index_urls or None
91
+
92
+ def get_default_session(self, options: Values) -> PipSession:
93
+ """Get a default-managed session."""
94
+ if self._session is None:
95
+ self._session = self.enter_context(self._build_session(options))
96
+ # there's no type annotation on requests.Session, so it's
97
+ # automatically ContextManager[Any] and self._session becomes Any,
98
+ # then https://github.com/python/mypy/issues/7696 kicks in
99
+ assert self._session is not None
100
+ return self._session
101
+
102
+ def _build_session(
103
+ self,
104
+ options: Values,
105
+ retries: Optional[int] = None,
106
+ timeout: Optional[int] = None,
107
+ fallback_to_certifi: bool = False,
108
+ ) -> PipSession:
109
+ cache_dir = options.cache_dir
110
+ assert not cache_dir or os.path.isabs(cache_dir)
111
+
112
+ if "truststore" in options.features_enabled:
113
+ try:
114
+ ssl_context = _create_truststore_ssl_context()
115
+ except Exception:
116
+ if not fallback_to_certifi:
117
+ raise
118
+ ssl_context = None
119
+ else:
120
+ ssl_context = None
121
+
122
+ session = PipSession(
123
+ cache=os.path.join(cache_dir, "http-v2") if cache_dir else None,
124
+ retries=retries if retries is not None else options.retries,
125
+ trusted_hosts=options.trusted_hosts,
126
+ index_urls=self._get_index_urls(options),
127
+ ssl_context=ssl_context,
128
+ )
129
+
130
+ # Handle custom ca-bundles from the user
131
+ if options.cert:
132
+ session.verify = options.cert
133
+
134
+ # Handle SSL client certificate
135
+ if options.client_cert:
136
+ session.cert = options.client_cert
137
+
138
+ # Handle timeouts
139
+ if options.timeout or timeout:
140
+ session.timeout = timeout if timeout is not None else options.timeout
141
+
142
+ # Handle configured proxies
143
+ if options.proxy:
144
+ session.proxies = {
145
+ "http": options.proxy,
146
+ "https": options.proxy,
147
+ }
148
+
149
+ # Determine if we can prompt the user for authentication or not
150
+ session.auth.prompting = not options.no_input
151
+ session.auth.keyring_provider = options.keyring_provider
152
+
153
+ return session
154
+
155
+
156
+ class IndexGroupCommand(Command, SessionCommandMixin):
157
+
158
+ """
159
+ Abstract base class for commands with the index_group options.
160
+
161
+ This also corresponds to the commands that permit the pip version check.
162
+ """
163
+
164
+ def handle_pip_version_check(self, options: Values) -> None:
165
+ """
166
+ Do the pip version check if not disabled.
167
+
168
+ This overrides the default behavior of not doing the check.
169
+ """
170
+ # Make sure the index_group options are present.
171
+ assert hasattr(options, "no_index")
172
+
173
+ if options.disable_pip_version_check or options.no_index:
174
+ return
175
+
176
+ # Otherwise, check if we're using the latest version of pip available.
177
+ session = self._build_session(
178
+ options,
179
+ retries=0,
180
+ timeout=min(5, options.timeout),
181
+ # This is set to ensure the function does not fail when truststore is
182
+ # specified in use-feature but cannot be loaded. This usually raises a
183
+ # CommandError and shows a nice user-facing error, but this function is not
184
+ # called in that try-except block.
185
+ fallback_to_certifi=True,
186
+ )
187
+ with session:
188
+ pip_self_version_check(session, options)
189
+
190
+
191
+ KEEPABLE_TEMPDIR_TYPES = [
192
+ tempdir_kinds.BUILD_ENV,
193
+ tempdir_kinds.EPHEM_WHEEL_CACHE,
194
+ tempdir_kinds.REQ_BUILD,
195
+ ]
196
+
197
+
198
+ def warn_if_run_as_root() -> None:
199
+ """Output a warning for sudo users on Unix.
200
+
201
+ In a virtual environment, sudo pip still writes to virtualenv.
202
+ On Windows, users may run pip as Administrator without issues.
203
+ This warning only applies to Unix root users outside of virtualenv.
204
+ """
205
+ if running_under_virtualenv():
206
+ return
207
+ if not hasattr(os, "getuid"):
208
+ return
209
+ # On Windows, there are no "system managed" Python packages. Installing as
210
+ # Administrator via pip is the correct way of updating system environments.
211
+ #
212
+ # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform
213
+ # checks: https://mypy.readthedocs.io/en/stable/common_issues.html
214
+ if sys.platform == "win32" or sys.platform == "cygwin":
215
+ return
216
+
217
+ if os.getuid() != 0:
218
+ return
219
+
220
+ logger.warning(
221
+ "Running pip as the 'root' user can result in broken permissions and "
222
+ "conflicting behaviour with the system package manager. "
223
+ "It is recommended to use a virtual environment instead: "
224
+ "https://pip.pypa.io/warnings/venv"
225
+ )
226
+
227
+
228
+ def with_cleanup(func: Any) -> Any:
229
+ """Decorator for common logic related to managing temporary
230
+ directories.
231
+ """
232
+
233
+ def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None:
234
+ for t in KEEPABLE_TEMPDIR_TYPES:
235
+ registry.set_delete(t, False)
236
+
237
+ def wrapper(
238
+ self: RequirementCommand, options: Values, args: List[Any]
239
+ ) -> Optional[int]:
240
+ assert self.tempdir_registry is not None
241
+ if options.no_clean:
242
+ configure_tempdir_registry(self.tempdir_registry)
243
+
244
+ try:
245
+ return func(self, options, args)
246
+ except PreviousBuildDirError:
247
+ # This kind of conflict can occur when the user passes an explicit
248
+ # build directory with a pre-existing folder. In that case we do
249
+ # not want to accidentally remove it.
250
+ configure_tempdir_registry(self.tempdir_registry)
251
+ raise
252
+
253
+ return wrapper
254
+
255
+
256
+ class RequirementCommand(IndexGroupCommand):
257
+ def __init__(self, *args: Any, **kw: Any) -> None:
258
+ super().__init__(*args, **kw)
259
+
260
+ self.cmd_opts.add_option(cmdoptions.no_clean())
261
+
262
+ @staticmethod
263
+ def determine_resolver_variant(options: Values) -> str:
264
+ """Determines which resolver should be used, based on the given options."""
265
+ if "legacy-resolver" in options.deprecated_features_enabled:
266
+ return "legacy"
267
+
268
+ return "resolvelib"
269
+
270
+ @classmethod
271
+ def make_requirement_preparer(
272
+ cls,
273
+ temp_build_dir: TempDirectory,
274
+ options: Values,
275
+ build_tracker: BuildTracker,
276
+ session: PipSession,
277
+ finder: PackageFinder,
278
+ use_user_site: bool,
279
+ download_dir: Optional[str] = None,
280
+ verbosity: int = 0,
281
+ ) -> RequirementPreparer:
282
+ """
283
+ Create a RequirementPreparer instance for the given parameters.
284
+ """
285
+ temp_build_dir_path = temp_build_dir.path
286
+ assert temp_build_dir_path is not None
287
+ legacy_resolver = False
288
+
289
+ resolver_variant = cls.determine_resolver_variant(options)
290
+ if resolver_variant == "resolvelib":
291
+ lazy_wheel = "fast-deps" in options.features_enabled
292
+ if lazy_wheel:
293
+ logger.warning(
294
+ "pip is using lazily downloaded wheels using HTTP "
295
+ "range requests to obtain dependency information. "
296
+ "This experimental feature is enabled through "
297
+ "--use-feature=fast-deps and it is not ready for "
298
+ "production."
299
+ )
300
+ else:
301
+ legacy_resolver = True
302
+ lazy_wheel = False
303
+ if "fast-deps" in options.features_enabled:
304
+ logger.warning(
305
+ "fast-deps has no effect when used with the legacy resolver."
306
+ )
307
+
308
+ return RequirementPreparer(
309
+ build_dir=temp_build_dir_path,
310
+ src_dir=options.src_dir,
311
+ download_dir=download_dir,
312
+ build_isolation=options.build_isolation,
313
+ check_build_deps=options.check_build_deps,
314
+ build_tracker=build_tracker,
315
+ session=session,
316
+ progress_bar=options.progress_bar,
317
+ finder=finder,
318
+ require_hashes=options.require_hashes,
319
+ use_user_site=use_user_site,
320
+ lazy_wheel=lazy_wheel,
321
+ verbosity=verbosity,
322
+ legacy_resolver=legacy_resolver,
323
+ )
324
+
325
+ @classmethod
326
+ def make_resolver(
327
+ cls,
328
+ preparer: RequirementPreparer,
329
+ finder: PackageFinder,
330
+ options: Values,
331
+ wheel_cache: Optional[WheelCache] = None,
332
+ use_user_site: bool = False,
333
+ ignore_installed: bool = True,
334
+ ignore_requires_python: bool = False,
335
+ force_reinstall: bool = False,
336
+ upgrade_strategy: str = "to-satisfy-only",
337
+ use_pep517: Optional[bool] = None,
338
+ py_version_info: Optional[Tuple[int, ...]] = None,
339
+ ) -> BaseResolver:
340
+ """
341
+ Create a Resolver instance for the given parameters.
342
+ """
343
+ make_install_req = partial(
344
+ install_req_from_req_string,
345
+ isolated=options.isolated_mode,
346
+ use_pep517=use_pep517,
347
+ )
348
+ resolver_variant = cls.determine_resolver_variant(options)
349
+ # The long import name and duplicated invocation is needed to convince
350
+ # Mypy into correctly typechecking. Otherwise it would complain the
351
+ # "Resolver" class being redefined.
352
+ if resolver_variant == "resolvelib":
353
+ import pip._internal.resolution.resolvelib.resolver
354
+
355
+ return pip._internal.resolution.resolvelib.resolver.Resolver(
356
+ preparer=preparer,
357
+ finder=finder,
358
+ wheel_cache=wheel_cache,
359
+ make_install_req=make_install_req,
360
+ use_user_site=use_user_site,
361
+ ignore_dependencies=options.ignore_dependencies,
362
+ ignore_installed=ignore_installed,
363
+ ignore_requires_python=ignore_requires_python,
364
+ force_reinstall=force_reinstall,
365
+ upgrade_strategy=upgrade_strategy,
366
+ py_version_info=py_version_info,
367
+ )
368
+ import pip._internal.resolution.legacy.resolver
369
+
370
+ return pip._internal.resolution.legacy.resolver.Resolver(
371
+ preparer=preparer,
372
+ finder=finder,
373
+ wheel_cache=wheel_cache,
374
+ make_install_req=make_install_req,
375
+ use_user_site=use_user_site,
376
+ ignore_dependencies=options.ignore_dependencies,
377
+ ignore_installed=ignore_installed,
378
+ ignore_requires_python=ignore_requires_python,
379
+ force_reinstall=force_reinstall,
380
+ upgrade_strategy=upgrade_strategy,
381
+ py_version_info=py_version_info,
382
+ )
383
+
384
+ def get_requirements(
385
+ self,
386
+ args: List[str],
387
+ options: Values,
388
+ finder: PackageFinder,
389
+ session: PipSession,
390
+ ) -> List[InstallRequirement]:
391
+ """
392
+ Parse command-line arguments into the corresponding requirements.
393
+ """
394
+ requirements: List[InstallRequirement] = []
395
+ for filename in options.constraints:
396
+ for parsed_req in parse_requirements(
397
+ filename,
398
+ constraint=True,
399
+ finder=finder,
400
+ options=options,
401
+ session=session,
402
+ ):
403
+ req_to_add = install_req_from_parsed_requirement(
404
+ parsed_req,
405
+ isolated=options.isolated_mode,
406
+ user_supplied=False,
407
+ )
408
+ requirements.append(req_to_add)
409
+
410
+ for req in args:
411
+ req_to_add = install_req_from_line(
412
+ req,
413
+ comes_from=None,
414
+ isolated=options.isolated_mode,
415
+ use_pep517=options.use_pep517,
416
+ user_supplied=True,
417
+ config_settings=getattr(options, "config_settings", None),
418
+ )
419
+ requirements.append(req_to_add)
420
+
421
+ for req in options.editables:
422
+ req_to_add = install_req_from_editable(
423
+ req,
424
+ user_supplied=True,
425
+ isolated=options.isolated_mode,
426
+ use_pep517=options.use_pep517,
427
+ config_settings=getattr(options, "config_settings", None),
428
+ )
429
+ requirements.append(req_to_add)
430
+
431
+ # NOTE: options.require_hashes may be set if --require-hashes is True
432
+ for filename in options.requirements:
433
+ for parsed_req in parse_requirements(
434
+ filename, finder=finder, options=options, session=session
435
+ ):
436
+ req_to_add = install_req_from_parsed_requirement(
437
+ parsed_req,
438
+ isolated=options.isolated_mode,
439
+ use_pep517=options.use_pep517,
440
+ user_supplied=True,
441
+ config_settings=parsed_req.options.get("config_settings")
442
+ if parsed_req.options
443
+ else None,
444
+ )
445
+ requirements.append(req_to_add)
446
+
447
+ # If any requirement has hash options, enable hash checking.
448
+ if any(req.has_hash_options for req in requirements):
449
+ options.require_hashes = True
450
+
451
+ if not (args or options.editables or options.requirements):
452
+ opts = {"name": self.name}
453
+ if options.find_links:
454
+ raise CommandError(
455
+ "You must give at least one requirement to {name} "
456
+ '(maybe you meant "pip {name} {links}"?)'.format(
457
+ **dict(opts, links=" ".join(options.find_links))
458
+ )
459
+ )
460
+ else:
461
+ raise CommandError(
462
+ "You must give at least one requirement to {name} "
463
+ '(see "pip help {name}")'.format(**opts)
464
+ )
465
+
466
+ return requirements
467
+
468
+ @staticmethod
469
+ def trace_basic_info(finder: PackageFinder) -> None:
470
+ """
471
+ Trace basic information about the provided objects.
472
+ """
473
+ # Display where finder is looking for packages
474
+ search_scope = finder.search_scope
475
+ locations = search_scope.get_formatted_locations()
476
+ if locations:
477
+ logger.info(locations)
478
+
479
+ def _build_package_finder(
480
+ self,
481
+ options: Values,
482
+ session: PipSession,
483
+ target_python: Optional[TargetPython] = None,
484
+ ignore_requires_python: Optional[bool] = None,
485
+ ) -> PackageFinder:
486
+ """
487
+ Create a package finder appropriate to this requirement command.
488
+
489
+ :param ignore_requires_python: Whether to ignore incompatible
490
+ "Requires-Python" values in links. Defaults to False.
491
+ """
492
+ link_collector = LinkCollector.create(session, options=options)
493
+ selection_prefs = SelectionPreferences(
494
+ allow_yanked=True,
495
+ format_control=options.format_control,
496
+ allow_all_prereleases=options.pre,
497
+ prefer_binary=options.prefer_binary,
498
+ ignore_requires_python=ignore_requires_python,
499
+ )
500
+
501
+ return PackageFinder.create(
502
+ link_collector=link_collector,
503
+ selection_prefs=selection_prefs,
504
+ target_python=target_python,
505
+ )
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/__init__.py ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Package containing all pip commands
3
+ """
4
+
5
+ import importlib
6
+ from collections import namedtuple
7
+ from typing import Any, Dict, Optional
8
+
9
+ from pip._internal.cli.base_command import Command
10
+
11
+ CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary")
12
+
13
+ # This dictionary does a bunch of heavy lifting for help output:
14
+ # - Enables avoiding additional (costly) imports for presenting `--help`.
15
+ # - The ordering matters for help display.
16
+ #
17
+ # Even though the module path starts with the same "pip._internal.commands"
18
+ # prefix, the full path makes testing easier (specifically when modifying
19
+ # `commands_dict` in test setup / teardown).
20
+ commands_dict: Dict[str, CommandInfo] = {
21
+ "install": CommandInfo(
22
+ "pip._internal.commands.install",
23
+ "InstallCommand",
24
+ "Install packages.",
25
+ ),
26
+ "download": CommandInfo(
27
+ "pip._internal.commands.download",
28
+ "DownloadCommand",
29
+ "Download packages.",
30
+ ),
31
+ "uninstall": CommandInfo(
32
+ "pip._internal.commands.uninstall",
33
+ "UninstallCommand",
34
+ "Uninstall packages.",
35
+ ),
36
+ "freeze": CommandInfo(
37
+ "pip._internal.commands.freeze",
38
+ "FreezeCommand",
39
+ "Output installed packages in requirements format.",
40
+ ),
41
+ "inspect": CommandInfo(
42
+ "pip._internal.commands.inspect",
43
+ "InspectCommand",
44
+ "Inspect the python environment.",
45
+ ),
46
+ "list": CommandInfo(
47
+ "pip._internal.commands.list",
48
+ "ListCommand",
49
+ "List installed packages.",
50
+ ),
51
+ "show": CommandInfo(
52
+ "pip._internal.commands.show",
53
+ "ShowCommand",
54
+ "Show information about installed packages.",
55
+ ),
56
+ "check": CommandInfo(
57
+ "pip._internal.commands.check",
58
+ "CheckCommand",
59
+ "Verify installed packages have compatible dependencies.",
60
+ ),
61
+ "config": CommandInfo(
62
+ "pip._internal.commands.configuration",
63
+ "ConfigurationCommand",
64
+ "Manage local and global configuration.",
65
+ ),
66
+ "search": CommandInfo(
67
+ "pip._internal.commands.search",
68
+ "SearchCommand",
69
+ "Search PyPI for packages.",
70
+ ),
71
+ "cache": CommandInfo(
72
+ "pip._internal.commands.cache",
73
+ "CacheCommand",
74
+ "Inspect and manage pip's wheel cache.",
75
+ ),
76
+ "index": CommandInfo(
77
+ "pip._internal.commands.index",
78
+ "IndexCommand",
79
+ "Inspect information available from package indexes.",
80
+ ),
81
+ "wheel": CommandInfo(
82
+ "pip._internal.commands.wheel",
83
+ "WheelCommand",
84
+ "Build wheels from your requirements.",
85
+ ),
86
+ "hash": CommandInfo(
87
+ "pip._internal.commands.hash",
88
+ "HashCommand",
89
+ "Compute hashes of package archives.",
90
+ ),
91
+ "completion": CommandInfo(
92
+ "pip._internal.commands.completion",
93
+ "CompletionCommand",
94
+ "A helper command used for command completion.",
95
+ ),
96
+ "debug": CommandInfo(
97
+ "pip._internal.commands.debug",
98
+ "DebugCommand",
99
+ "Show information useful for debugging.",
100
+ ),
101
+ "help": CommandInfo(
102
+ "pip._internal.commands.help",
103
+ "HelpCommand",
104
+ "Show help for commands.",
105
+ ),
106
+ }
107
+
108
+
109
+ def create_command(name: str, **kwargs: Any) -> Command:
110
+ """
111
+ Create an instance of the Command class with the given name.
112
+ """
113
+ module_path, class_name, summary = commands_dict[name]
114
+ module = importlib.import_module(module_path)
115
+ command_class = getattr(module, class_name)
116
+ command = command_class(name=name, summary=summary, **kwargs)
117
+
118
+ return command
119
+
120
+
121
+ def get_similar_commands(name: str) -> Optional[str]:
122
+ """Command name auto-correct."""
123
+ from difflib import get_close_matches
124
+
125
+ name = name.lower()
126
+
127
+ close_commands = get_close_matches(name, commands_dict.keys())
128
+
129
+ if close_commands:
130
+ return close_commands[0]
131
+ else:
132
+ return None
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/debug.py ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import importlib.resources
2
+ import locale
3
+ import logging
4
+ import os
5
+ import sys
6
+ from optparse import Values
7
+ from types import ModuleType
8
+ from typing import Any, Dict, List, Optional
9
+
10
+ import pip._vendor
11
+ from pip._vendor.certifi import where
12
+ from pip._vendor.packaging.version import parse as parse_version
13
+
14
+ from pip._internal.cli import cmdoptions
15
+ from pip._internal.cli.base_command import Command
16
+ from pip._internal.cli.cmdoptions import make_target_python
17
+ from pip._internal.cli.status_codes import SUCCESS
18
+ from pip._internal.configuration import Configuration
19
+ from pip._internal.metadata import get_environment
20
+ from pip._internal.utils.logging import indent_log
21
+ from pip._internal.utils.misc import get_pip_version
22
+
23
+ logger = logging.getLogger(__name__)
24
+
25
+
26
+ def show_value(name: str, value: Any) -> None:
27
+ logger.info("%s: %s", name, value)
28
+
29
+
30
+ def show_sys_implementation() -> None:
31
+ logger.info("sys.implementation:")
32
+ implementation_name = sys.implementation.name
33
+ with indent_log():
34
+ show_value("name", implementation_name)
35
+
36
+
37
+ def create_vendor_txt_map() -> Dict[str, str]:
38
+ with importlib.resources.open_text("pip._vendor", "vendor.txt") as f:
39
+ # Purge non version specifying lines.
40
+ # Also, remove any space prefix or suffixes (including comments).
41
+ lines = [
42
+ line.strip().split(" ", 1)[0] for line in f.readlines() if "==" in line
43
+ ]
44
+
45
+ # Transform into "module" -> version dict.
46
+ return dict(line.split("==", 1) for line in lines)
47
+
48
+
49
+ def get_module_from_module_name(module_name: str) -> Optional[ModuleType]:
50
+ # Module name can be uppercase in vendor.txt for some reason...
51
+ module_name = module_name.lower().replace("-", "_")
52
+ # PATCH: setuptools is actually only pkg_resources.
53
+ if module_name == "setuptools":
54
+ module_name = "pkg_resources"
55
+
56
+ try:
57
+ __import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)
58
+ return getattr(pip._vendor, module_name)
59
+ except ImportError:
60
+ # We allow 'truststore' to fail to import due
61
+ # to being unavailable on Python 3.9 and earlier.
62
+ if module_name == "truststore" and sys.version_info < (3, 10):
63
+ return None
64
+ raise
65
+
66
+
67
+ def get_vendor_version_from_module(module_name: str) -> Optional[str]:
68
+ module = get_module_from_module_name(module_name)
69
+ version = getattr(module, "__version__", None)
70
+
71
+ if module and not version:
72
+ # Try to find version in debundled module info.
73
+ assert module.__file__ is not None
74
+ env = get_environment([os.path.dirname(module.__file__)])
75
+ dist = env.get_distribution(module_name)
76
+ if dist:
77
+ version = str(dist.version)
78
+
79
+ return version
80
+
81
+
82
+ def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None:
83
+ """Log the actual version and print extra info if there is
84
+ a conflict or if the actual version could not be imported.
85
+ """
86
+ for module_name, expected_version in vendor_txt_versions.items():
87
+ extra_message = ""
88
+ actual_version = get_vendor_version_from_module(module_name)
89
+ if not actual_version:
90
+ extra_message = (
91
+ " (Unable to locate actual module version, using"
92
+ " vendor.txt specified version)"
93
+ )
94
+ actual_version = expected_version
95
+ elif parse_version(actual_version) != parse_version(expected_version):
96
+ extra_message = (
97
+ " (CONFLICT: vendor.txt suggests version should"
98
+ f" be {expected_version})"
99
+ )
100
+ logger.info("%s==%s%s", module_name, actual_version, extra_message)
101
+
102
+
103
+ def show_vendor_versions() -> None:
104
+ logger.info("vendored library versions:")
105
+
106
+ vendor_txt_versions = create_vendor_txt_map()
107
+ with indent_log():
108
+ show_actual_vendor_versions(vendor_txt_versions)
109
+
110
+
111
+ def show_tags(options: Values) -> None:
112
+ tag_limit = 10
113
+
114
+ target_python = make_target_python(options)
115
+ tags = target_python.get_sorted_tags()
116
+
117
+ # Display the target options that were explicitly provided.
118
+ formatted_target = target_python.format_given()
119
+ suffix = ""
120
+ if formatted_target:
121
+ suffix = f" (target: {formatted_target})"
122
+
123
+ msg = f"Compatible tags: {len(tags)}{suffix}"
124
+ logger.info(msg)
125
+
126
+ if options.verbose < 1 and len(tags) > tag_limit:
127
+ tags_limited = True
128
+ tags = tags[:tag_limit]
129
+ else:
130
+ tags_limited = False
131
+
132
+ with indent_log():
133
+ for tag in tags:
134
+ logger.info(str(tag))
135
+
136
+ if tags_limited:
137
+ msg = f"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
138
+ logger.info(msg)
139
+
140
+
141
+ def ca_bundle_info(config: Configuration) -> str:
142
+ levels = {key.split(".", 1)[0] for key, _ in config.items()}
143
+ if not levels:
144
+ return "Not specified"
145
+
146
+ levels_that_override_global = ["install", "wheel", "download"]
147
+ global_overriding_level = [
148
+ level for level in levels if level in levels_that_override_global
149
+ ]
150
+ if not global_overriding_level:
151
+ return "global"
152
+
153
+ if "global" in levels:
154
+ levels.remove("global")
155
+ return ", ".join(levels)
156
+
157
+
158
+ class DebugCommand(Command):
159
+ """
160
+ Display debug information.
161
+ """
162
+
163
+ usage = """
164
+ %prog <options>"""
165
+ ignore_require_venv = True
166
+
167
+ def add_options(self) -> None:
168
+ cmdoptions.add_target_python_options(self.cmd_opts)
169
+ self.parser.insert_option_group(0, self.cmd_opts)
170
+ self.parser.config.load()
171
+
172
+ def run(self, options: Values, args: List[str]) -> int:
173
+ logger.warning(
174
+ "This command is only meant for debugging. "
175
+ "Do not use this with automation for parsing and getting these "
176
+ "details, since the output and options of this command may "
177
+ "change without notice."
178
+ )
179
+ show_value("pip version", get_pip_version())
180
+ show_value("sys.version", sys.version)
181
+ show_value("sys.executable", sys.executable)
182
+ show_value("sys.getdefaultencoding", sys.getdefaultencoding())
183
+ show_value("sys.getfilesystemencoding", sys.getfilesystemencoding())
184
+ show_value(
185
+ "locale.getpreferredencoding",
186
+ locale.getpreferredencoding(),
187
+ )
188
+ show_value("sys.platform", sys.platform)
189
+ show_sys_implementation()
190
+
191
+ show_value("'cert' config value", ca_bundle_info(self.parser.config))
192
+ show_value("REQUESTS_CA_BUNDLE", os.environ.get("REQUESTS_CA_BUNDLE"))
193
+ show_value("CURL_CA_BUNDLE", os.environ.get("CURL_CA_BUNDLE"))
194
+ show_value("pip._vendor.certifi.where()", where())
195
+ show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED)
196
+
197
+ show_vendor_versions()
198
+
199
+ show_tags(options)
200
+
201
+ return SUCCESS
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/freeze.py ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ from optparse import Values
3
+ from typing import AbstractSet, List
4
+
5
+ from pip._internal.cli import cmdoptions
6
+ from pip._internal.cli.base_command import Command
7
+ from pip._internal.cli.status_codes import SUCCESS
8
+ from pip._internal.operations.freeze import freeze
9
+ from pip._internal.utils.compat import stdlib_pkgs
10
+
11
+
12
+ def _should_suppress_build_backends() -> bool:
13
+ return sys.version_info < (3, 12)
14
+
15
+
16
+ def _dev_pkgs() -> AbstractSet[str]:
17
+ pkgs = {"pip"}
18
+
19
+ if _should_suppress_build_backends():
20
+ pkgs |= {"setuptools", "distribute", "wheel"}
21
+
22
+ return pkgs
23
+
24
+
25
+ class FreezeCommand(Command):
26
+ """
27
+ Output installed packages in requirements format.
28
+
29
+ packages are listed in a case-insensitive sorted order.
30
+ """
31
+
32
+ usage = """
33
+ %prog [options]"""
34
+ log_streams = ("ext://sys.stderr", "ext://sys.stderr")
35
+
36
+ def add_options(self) -> None:
37
+ self.cmd_opts.add_option(
38
+ "-r",
39
+ "--requirement",
40
+ dest="requirements",
41
+ action="append",
42
+ default=[],
43
+ metavar="file",
44
+ help=(
45
+ "Use the order in the given requirements file and its "
46
+ "comments when generating output. This option can be "
47
+ "used multiple times."
48
+ ),
49
+ )
50
+ self.cmd_opts.add_option(
51
+ "-l",
52
+ "--local",
53
+ dest="local",
54
+ action="store_true",
55
+ default=False,
56
+ help=(
57
+ "If in a virtualenv that has global access, do not output "
58
+ "globally-installed packages."
59
+ ),
60
+ )
61
+ self.cmd_opts.add_option(
62
+ "--user",
63
+ dest="user",
64
+ action="store_true",
65
+ default=False,
66
+ help="Only output packages installed in user-site.",
67
+ )
68
+ self.cmd_opts.add_option(cmdoptions.list_path())
69
+ self.cmd_opts.add_option(
70
+ "--all",
71
+ dest="freeze_all",
72
+ action="store_true",
73
+ help=(
74
+ "Do not skip these packages in the output:"
75
+ " {}".format(", ".join(_dev_pkgs()))
76
+ ),
77
+ )
78
+ self.cmd_opts.add_option(
79
+ "--exclude-editable",
80
+ dest="exclude_editable",
81
+ action="store_true",
82
+ help="Exclude editable package from output.",
83
+ )
84
+ self.cmd_opts.add_option(cmdoptions.list_exclude())
85
+
86
+ self.parser.insert_option_group(0, self.cmd_opts)
87
+
88
+ def run(self, options: Values, args: List[str]) -> int:
89
+ skip = set(stdlib_pkgs)
90
+ if not options.freeze_all:
91
+ skip.update(_dev_pkgs())
92
+
93
+ if options.excludes:
94
+ skip.update(options.excludes)
95
+
96
+ cmdoptions.check_list_path_option(options)
97
+
98
+ for line in freeze(
99
+ requirement=options.requirements,
100
+ local_only=options.local,
101
+ user_only=options.user,
102
+ paths=options.path,
103
+ isolated=options.isolated_mode,
104
+ skip=skip,
105
+ exclude_editable=options.exclude_editable,
106
+ ):
107
+ sys.stdout.write(line + "\n")
108
+ return SUCCESS
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/hash.py ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import hashlib
2
+ import logging
3
+ import sys
4
+ from optparse import Values
5
+ from typing import List
6
+
7
+ from pip._internal.cli.base_command import Command
8
+ from pip._internal.cli.status_codes import ERROR, SUCCESS
9
+ from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
10
+ from pip._internal.utils.misc import read_chunks, write_output
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ class HashCommand(Command):
16
+ """
17
+ Compute a hash of a local package archive.
18
+
19
+ These can be used with --hash in a requirements file to do repeatable
20
+ installs.
21
+ """
22
+
23
+ usage = "%prog [options] <file> ..."
24
+ ignore_require_venv = True
25
+
26
+ def add_options(self) -> None:
27
+ self.cmd_opts.add_option(
28
+ "-a",
29
+ "--algorithm",
30
+ dest="algorithm",
31
+ choices=STRONG_HASHES,
32
+ action="store",
33
+ default=FAVORITE_HASH,
34
+ help="The hash algorithm to use: one of {}".format(
35
+ ", ".join(STRONG_HASHES)
36
+ ),
37
+ )
38
+ self.parser.insert_option_group(0, self.cmd_opts)
39
+
40
+ def run(self, options: Values, args: List[str]) -> int:
41
+ if not args:
42
+ self.parser.print_usage(sys.stderr)
43
+ return ERROR
44
+
45
+ algorithm = options.algorithm
46
+ for path in args:
47
+ write_output(
48
+ "%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm)
49
+ )
50
+ return SUCCESS
51
+
52
+
53
+ def _hash_of_file(path: str, algorithm: str) -> str:
54
+ """Return the hash digest of a file."""
55
+ with open(path, "rb") as archive:
56
+ hash = hashlib.new(algorithm)
57
+ for chunk in read_chunks(archive):
58
+ hash.update(chunk)
59
+ return hash.hexdigest()
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/index.py ADDED
@@ -0,0 +1,139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from optparse import Values
3
+ from typing import Any, Iterable, List, Optional, Union
4
+
5
+ from pip._vendor.packaging.version import LegacyVersion, Version
6
+
7
+ from pip._internal.cli import cmdoptions
8
+ from pip._internal.cli.req_command import IndexGroupCommand
9
+ from pip._internal.cli.status_codes import ERROR, SUCCESS
10
+ from pip._internal.commands.search import print_dist_installation_info
11
+ from pip._internal.exceptions import CommandError, DistributionNotFound, PipError
12
+ from pip._internal.index.collector import LinkCollector
13
+ from pip._internal.index.package_finder import PackageFinder
14
+ from pip._internal.models.selection_prefs import SelectionPreferences
15
+ from pip._internal.models.target_python import TargetPython
16
+ from pip._internal.network.session import PipSession
17
+ from pip._internal.utils.misc import write_output
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+
22
+ class IndexCommand(IndexGroupCommand):
23
+ """
24
+ Inspect information available from package indexes.
25
+ """
26
+
27
+ ignore_require_venv = True
28
+ usage = """
29
+ %prog versions <package>
30
+ """
31
+
32
+ def add_options(self) -> None:
33
+ cmdoptions.add_target_python_options(self.cmd_opts)
34
+
35
+ self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
36
+ self.cmd_opts.add_option(cmdoptions.pre())
37
+ self.cmd_opts.add_option(cmdoptions.no_binary())
38
+ self.cmd_opts.add_option(cmdoptions.only_binary())
39
+
40
+ index_opts = cmdoptions.make_option_group(
41
+ cmdoptions.index_group,
42
+ self.parser,
43
+ )
44
+
45
+ self.parser.insert_option_group(0, index_opts)
46
+ self.parser.insert_option_group(0, self.cmd_opts)
47
+
48
+ def run(self, options: Values, args: List[str]) -> int:
49
+ handlers = {
50
+ "versions": self.get_available_package_versions,
51
+ }
52
+
53
+ logger.warning(
54
+ "pip index is currently an experimental command. "
55
+ "It may be removed/changed in a future release "
56
+ "without prior warning."
57
+ )
58
+
59
+ # Determine action
60
+ if not args or args[0] not in handlers:
61
+ logger.error(
62
+ "Need an action (%s) to perform.",
63
+ ", ".join(sorted(handlers)),
64
+ )
65
+ return ERROR
66
+
67
+ action = args[0]
68
+
69
+ # Error handling happens here, not in the action-handlers.
70
+ try:
71
+ handlers[action](options, args[1:])
72
+ except PipError as e:
73
+ logger.error(e.args[0])
74
+ return ERROR
75
+
76
+ return SUCCESS
77
+
78
+ def _build_package_finder(
79
+ self,
80
+ options: Values,
81
+ session: PipSession,
82
+ target_python: Optional[TargetPython] = None,
83
+ ignore_requires_python: Optional[bool] = None,
84
+ ) -> PackageFinder:
85
+ """
86
+ Create a package finder appropriate to the index command.
87
+ """
88
+ link_collector = LinkCollector.create(session, options=options)
89
+
90
+ # Pass allow_yanked=False to ignore yanked versions.
91
+ selection_prefs = SelectionPreferences(
92
+ allow_yanked=False,
93
+ allow_all_prereleases=options.pre,
94
+ ignore_requires_python=ignore_requires_python,
95
+ )
96
+
97
+ return PackageFinder.create(
98
+ link_collector=link_collector,
99
+ selection_prefs=selection_prefs,
100
+ target_python=target_python,
101
+ )
102
+
103
+ def get_available_package_versions(self, options: Values, args: List[Any]) -> None:
104
+ if len(args) != 1:
105
+ raise CommandError("You need to specify exactly one argument")
106
+
107
+ target_python = cmdoptions.make_target_python(options)
108
+ query = args[0]
109
+
110
+ with self._build_session(options) as session:
111
+ finder = self._build_package_finder(
112
+ options=options,
113
+ session=session,
114
+ target_python=target_python,
115
+ ignore_requires_python=options.ignore_requires_python,
116
+ )
117
+
118
+ versions: Iterable[Union[LegacyVersion, Version]] = (
119
+ candidate.version for candidate in finder.find_all_candidates(query)
120
+ )
121
+
122
+ if not options.pre:
123
+ # Remove prereleases
124
+ versions = (
125
+ version for version in versions if not version.is_prerelease
126
+ )
127
+ versions = set(versions)
128
+
129
+ if not versions:
130
+ raise DistributionNotFound(
131
+ f"No matching distribution found for {query}"
132
+ )
133
+
134
+ formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)]
135
+ latest = formatted_versions[0]
136
+
137
+ write_output(f"{query} ({latest})")
138
+ write_output("Available versions: {}".format(", ".join(formatted_versions)))
139
+ print_dist_installation_info(query, latest)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/commands/inspect.py ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from optparse import Values
3
+ from typing import Any, Dict, List
4
+
5
+ from pip._vendor.packaging.markers import default_environment
6
+ from pip._vendor.rich import print_json
7
+
8
+ from pip import __version__
9
+ from pip._internal.cli import cmdoptions
10
+ from pip._internal.cli.req_command import Command
11
+ from pip._internal.cli.status_codes import SUCCESS
12
+ from pip._internal.metadata import BaseDistribution, get_environment
13
+ from pip._internal.utils.compat import stdlib_pkgs
14
+ from pip._internal.utils.urls import path_to_url
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ class InspectCommand(Command):
20
+ """
21
+ Inspect the content of a Python environment and produce a report in JSON format.
22
+ """
23
+
24
+ ignore_require_venv = True
25
+ usage = """
26
+ %prog [options]"""
27
+
28
+ def add_options(self) -> None:
29
+ self.cmd_opts.add_option(
30
+ "--local",
31
+ action="store_true",
32
+ default=False,
33
+ help=(
34
+ "If in a virtualenv that has global access, do not list "
35
+ "globally-installed packages."
36
+ ),
37
+ )
38
+ self.cmd_opts.add_option(
39
+ "--user",
40
+ dest="user",
41
+ action="store_true",
42
+ default=False,
43
+ help="Only output packages installed in user-site.",
44
+ )
45
+ self.cmd_opts.add_option(cmdoptions.list_path())
46
+ self.parser.insert_option_group(0, self.cmd_opts)
47
+
48
+ def run(self, options: Values, args: List[str]) -> int:
49
+ cmdoptions.check_list_path_option(options)
50
+ dists = get_environment(options.path).iter_installed_distributions(
51
+ local_only=options.local,
52
+ user_only=options.user,
53
+ skip=set(stdlib_pkgs),
54
+ )
55
+ output = {
56
+ "version": "1",
57
+ "pip_version": __version__,
58
+ "installed": [self._dist_to_dict(dist) for dist in dists],
59
+ "environment": default_environment(),
60
+ # TODO tags? scheme?
61
+ }
62
+ print_json(data=output)
63
+ return SUCCESS
64
+
65
+ def _dist_to_dict(self, dist: BaseDistribution) -> Dict[str, Any]:
66
+ res: Dict[str, Any] = {
67
+ "metadata": dist.metadata_dict,
68
+ "metadata_location": dist.info_location,
69
+ }
70
+ # direct_url. Note that we don't have download_info (as in the installation
71
+ # report) since it is not recorded in installed metadata.
72
+ direct_url = dist.direct_url
73
+ if direct_url is not None:
74
+ res["direct_url"] = direct_url.to_dict()
75
+ else:
76
+ # Emulate direct_url for legacy editable installs.
77
+ editable_project_location = dist.editable_project_location
78
+ if editable_project_location is not None:
79
+ res["direct_url"] = {
80
+ "url": path_to_url(editable_project_location),
81
+ "dir_info": {
82
+ "editable": True,
83
+ },
84
+ }
85
+ # installer
86
+ installer = dist.installer
87
+ if dist.installer:
88
+ res["installer"] = installer
89
+ # requested
90
+ if dist.installed_with_dist_info:
91
+ res["requested"] = dist.requested
92
+ return res
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/locations/__init__.py ADDED
@@ -0,0 +1,467 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import logging
3
+ import os
4
+ import pathlib
5
+ import sys
6
+ import sysconfig
7
+ from typing import Any, Dict, Generator, Optional, Tuple
8
+
9
+ from pip._internal.models.scheme import SCHEME_KEYS, Scheme
10
+ from pip._internal.utils.compat import WINDOWS
11
+ from pip._internal.utils.deprecation import deprecated
12
+ from pip._internal.utils.virtualenv import running_under_virtualenv
13
+
14
+ from . import _sysconfig
15
+ from .base import (
16
+ USER_CACHE_DIR,
17
+ get_major_minor_version,
18
+ get_src_prefix,
19
+ is_osx_framework,
20
+ site_packages,
21
+ user_site,
22
+ )
23
+
24
+ __all__ = [
25
+ "USER_CACHE_DIR",
26
+ "get_bin_prefix",
27
+ "get_bin_user",
28
+ "get_major_minor_version",
29
+ "get_platlib",
30
+ "get_purelib",
31
+ "get_scheme",
32
+ "get_src_prefix",
33
+ "site_packages",
34
+ "user_site",
35
+ ]
36
+
37
+
38
+ logger = logging.getLogger(__name__)
39
+
40
+
41
+ _PLATLIBDIR: str = getattr(sys, "platlibdir", "lib")
42
+
43
+ _USE_SYSCONFIG_DEFAULT = sys.version_info >= (3, 10)
44
+
45
+
46
+ def _should_use_sysconfig() -> bool:
47
+ """This function determines the value of _USE_SYSCONFIG.
48
+
49
+ By default, pip uses sysconfig on Python 3.10+.
50
+ But Python distributors can override this decision by setting:
51
+ sysconfig._PIP_USE_SYSCONFIG = True / False
52
+ Rationale in https://github.com/pypa/pip/issues/10647
53
+
54
+ This is a function for testability, but should be constant during any one
55
+ run.
56
+ """
57
+ return bool(getattr(sysconfig, "_PIP_USE_SYSCONFIG", _USE_SYSCONFIG_DEFAULT))
58
+
59
+
60
+ _USE_SYSCONFIG = _should_use_sysconfig()
61
+
62
+ if not _USE_SYSCONFIG:
63
+ # Import distutils lazily to avoid deprecation warnings,
64
+ # but import it soon enough that it is in memory and available during
65
+ # a pip reinstall.
66
+ from . import _distutils
67
+
68
+ # Be noisy about incompatibilities if this platforms "should" be using
69
+ # sysconfig, but is explicitly opting out and using distutils instead.
70
+ if _USE_SYSCONFIG_DEFAULT and not _USE_SYSCONFIG:
71
+ _MISMATCH_LEVEL = logging.WARNING
72
+ else:
73
+ _MISMATCH_LEVEL = logging.DEBUG
74
+
75
+
76
+ def _looks_like_bpo_44860() -> bool:
77
+ """The resolution to bpo-44860 will change this incorrect platlib.
78
+
79
+ See <https://bugs.python.org/issue44860>.
80
+ """
81
+ from distutils.command.install import INSTALL_SCHEMES
82
+
83
+ try:
84
+ unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"]
85
+ except KeyError:
86
+ return False
87
+ return unix_user_platlib == "$usersite"
88
+
89
+
90
+ def _looks_like_red_hat_patched_platlib_purelib(scheme: Dict[str, str]) -> bool:
91
+ platlib = scheme["platlib"]
92
+ if "/$platlibdir/" in platlib:
93
+ platlib = platlib.replace("/$platlibdir/", f"/{_PLATLIBDIR}/")
94
+ if "/lib64/" not in platlib:
95
+ return False
96
+ unpatched = platlib.replace("/lib64/", "/lib/")
97
+ return unpatched.replace("$platbase/", "$base/") == scheme["purelib"]
98
+
99
+
100
+ @functools.lru_cache(maxsize=None)
101
+ def _looks_like_red_hat_lib() -> bool:
102
+ """Red Hat patches platlib in unix_prefix and unix_home, but not purelib.
103
+
104
+ This is the only way I can see to tell a Red Hat-patched Python.
105
+ """
106
+ from distutils.command.install import INSTALL_SCHEMES
107
+
108
+ return all(
109
+ k in INSTALL_SCHEMES
110
+ and _looks_like_red_hat_patched_platlib_purelib(INSTALL_SCHEMES[k])
111
+ for k in ("unix_prefix", "unix_home")
112
+ )
113
+
114
+
115
+ @functools.lru_cache(maxsize=None)
116
+ def _looks_like_debian_scheme() -> bool:
117
+ """Debian adds two additional schemes."""
118
+ from distutils.command.install import INSTALL_SCHEMES
119
+
120
+ return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES
121
+
122
+
123
+ @functools.lru_cache(maxsize=None)
124
+ def _looks_like_red_hat_scheme() -> bool:
125
+ """Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``.
126
+
127
+ Red Hat's ``00251-change-user-install-location.patch`` changes the install
128
+ command's ``prefix`` and ``exec_prefix`` to append ``"/local"``. This is
129
+ (fortunately?) done quite unconditionally, so we create a default command
130
+ object without any configuration to detect this.
131
+ """
132
+ from distutils.command.install import install
133
+ from distutils.dist import Distribution
134
+
135
+ cmd: Any = install(Distribution())
136
+ cmd.finalize_options()
137
+ return (
138
+ cmd.exec_prefix == f"{os.path.normpath(sys.exec_prefix)}/local"
139
+ and cmd.prefix == f"{os.path.normpath(sys.prefix)}/local"
140
+ )
141
+
142
+
143
+ @functools.lru_cache(maxsize=None)
144
+ def _looks_like_slackware_scheme() -> bool:
145
+ """Slackware patches sysconfig but fails to patch distutils and site.
146
+
147
+ Slackware changes sysconfig's user scheme to use ``"lib64"`` for the lib
148
+ path, but does not do the same to the site module.
149
+ """
150
+ if user_site is None: # User-site not available.
151
+ return False
152
+ try:
153
+ paths = sysconfig.get_paths(scheme="posix_user", expand=False)
154
+ except KeyError: # User-site not available.
155
+ return False
156
+ return "/lib64/" in paths["purelib"] and "/lib64/" not in user_site
157
+
158
+
159
+ @functools.lru_cache(maxsize=None)
160
+ def _looks_like_msys2_mingw_scheme() -> bool:
161
+ """MSYS2 patches distutils and sysconfig to use a UNIX-like scheme.
162
+
163
+ However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is
164
+ likely going to be included in their 3.10 release, so we ignore the warning.
165
+ See msys2/MINGW-packages#9319.
166
+
167
+ MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase,
168
+ and is missing the final ``"site-packages"``.
169
+ """
170
+ paths = sysconfig.get_paths("nt", expand=False)
171
+ return all(
172
+ "Lib" not in p and "lib" in p and not p.endswith("site-packages")
173
+ for p in (paths[key] for key in ("platlib", "purelib"))
174
+ )
175
+
176
+
177
+ def _fix_abiflags(parts: Tuple[str]) -> Generator[str, None, None]:
178
+ ldversion = sysconfig.get_config_var("LDVERSION")
179
+ abiflags = getattr(sys, "abiflags", None)
180
+
181
+ # LDVERSION does not end with sys.abiflags. Just return the path unchanged.
182
+ if not ldversion or not abiflags or not ldversion.endswith(abiflags):
183
+ yield from parts
184
+ return
185
+
186
+ # Strip sys.abiflags from LDVERSION-based path components.
187
+ for part in parts:
188
+ if part.endswith(ldversion):
189
+ part = part[: (0 - len(abiflags))]
190
+ yield part
191
+
192
+
193
+ @functools.lru_cache(maxsize=None)
194
+ def _warn_mismatched(old: pathlib.Path, new: pathlib.Path, *, key: str) -> None:
195
+ issue_url = "https://github.com/pypa/pip/issues/10151"
196
+ message = (
197
+ "Value for %s does not match. Please report this to <%s>"
198
+ "\ndistutils: %s"
199
+ "\nsysconfig: %s"
200
+ )
201
+ logger.log(_MISMATCH_LEVEL, message, key, issue_url, old, new)
202
+
203
+
204
+ def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool:
205
+ if old == new:
206
+ return False
207
+ _warn_mismatched(old, new, key=key)
208
+ return True
209
+
210
+
211
+ @functools.lru_cache(maxsize=None)
212
+ def _log_context(
213
+ *,
214
+ user: bool = False,
215
+ home: Optional[str] = None,
216
+ root: Optional[str] = None,
217
+ prefix: Optional[str] = None,
218
+ ) -> None:
219
+ parts = [
220
+ "Additional context:",
221
+ "user = %r",
222
+ "home = %r",
223
+ "root = %r",
224
+ "prefix = %r",
225
+ ]
226
+
227
+ logger.log(_MISMATCH_LEVEL, "\n".join(parts), user, home, root, prefix)
228
+
229
+
230
+ def get_scheme(
231
+ dist_name: str,
232
+ user: bool = False,
233
+ home: Optional[str] = None,
234
+ root: Optional[str] = None,
235
+ isolated: bool = False,
236
+ prefix: Optional[str] = None,
237
+ ) -> Scheme:
238
+ new = _sysconfig.get_scheme(
239
+ dist_name,
240
+ user=user,
241
+ home=home,
242
+ root=root,
243
+ isolated=isolated,
244
+ prefix=prefix,
245
+ )
246
+ if _USE_SYSCONFIG:
247
+ return new
248
+
249
+ old = _distutils.get_scheme(
250
+ dist_name,
251
+ user=user,
252
+ home=home,
253
+ root=root,
254
+ isolated=isolated,
255
+ prefix=prefix,
256
+ )
257
+
258
+ warning_contexts = []
259
+ for k in SCHEME_KEYS:
260
+ old_v = pathlib.Path(getattr(old, k))
261
+ new_v = pathlib.Path(getattr(new, k))
262
+
263
+ if old_v == new_v:
264
+ continue
265
+
266
+ # distutils incorrectly put PyPy packages under ``site-packages/python``
267
+ # in the ``posix_home`` scheme, but PyPy devs said they expect the
268
+ # directory name to be ``pypy`` instead. So we treat this as a bug fix
269
+ # and not warn about it. See bpo-43307 and python/cpython#24628.
270
+ skip_pypy_special_case = (
271
+ sys.implementation.name == "pypy"
272
+ and home is not None
273
+ and k in ("platlib", "purelib")
274
+ and old_v.parent == new_v.parent
275
+ and old_v.name.startswith("python")
276
+ and new_v.name.startswith("pypy")
277
+ )
278
+ if skip_pypy_special_case:
279
+ continue
280
+
281
+ # sysconfig's ``osx_framework_user`` does not include ``pythonX.Y`` in
282
+ # the ``include`` value, but distutils's ``headers`` does. We'll let
283
+ # CPython decide whether this is a bug or feature. See bpo-43948.
284
+ skip_osx_framework_user_special_case = (
285
+ user
286
+ and is_osx_framework()
287
+ and k == "headers"
288
+ and old_v.parent.parent == new_v.parent
289
+ and old_v.parent.name.startswith("python")
290
+ )
291
+ if skip_osx_framework_user_special_case:
292
+ continue
293
+
294
+ # On Red Hat and derived Linux distributions, distutils is patched to
295
+ # use "lib64" instead of "lib" for platlib.
296
+ if k == "platlib" and _looks_like_red_hat_lib():
297
+ continue
298
+
299
+ # On Python 3.9+, sysconfig's posix_user scheme sets platlib against
300
+ # sys.platlibdir, but distutils's unix_user incorrectly coninutes
301
+ # using the same $usersite for both platlib and purelib. This creates a
302
+ # mismatch when sys.platlibdir is not "lib".
303
+ skip_bpo_44860 = (
304
+ user
305
+ and k == "platlib"
306
+ and not WINDOWS
307
+ and sys.version_info >= (3, 9)
308
+ and _PLATLIBDIR != "lib"
309
+ and _looks_like_bpo_44860()
310
+ )
311
+ if skip_bpo_44860:
312
+ continue
313
+
314
+ # Slackware incorrectly patches posix_user to use lib64 instead of lib,
315
+ # but not usersite to match the location.
316
+ skip_slackware_user_scheme = (
317
+ user
318
+ and k in ("platlib", "purelib")
319
+ and not WINDOWS
320
+ and _looks_like_slackware_scheme()
321
+ )
322
+ if skip_slackware_user_scheme:
323
+ continue
324
+
325
+ # Both Debian and Red Hat patch Python to place the system site under
326
+ # /usr/local instead of /usr. Debian also places lib in dist-packages
327
+ # instead of site-packages, but the /usr/local check should cover it.
328
+ skip_linux_system_special_case = (
329
+ not (user or home or prefix or running_under_virtualenv())
330
+ and old_v.parts[1:3] == ("usr", "local")
331
+ and len(new_v.parts) > 1
332
+ and new_v.parts[1] == "usr"
333
+ and (len(new_v.parts) < 3 or new_v.parts[2] != "local")
334
+ and (_looks_like_red_hat_scheme() or _looks_like_debian_scheme())
335
+ )
336
+ if skip_linux_system_special_case:
337
+ continue
338
+
339
+ # On Python 3.7 and earlier, sysconfig does not include sys.abiflags in
340
+ # the "pythonX.Y" part of the path, but distutils does.
341
+ skip_sysconfig_abiflag_bug = (
342
+ sys.version_info < (3, 8)
343
+ and not WINDOWS
344
+ and k in ("headers", "platlib", "purelib")
345
+ and tuple(_fix_abiflags(old_v.parts)) == new_v.parts
346
+ )
347
+ if skip_sysconfig_abiflag_bug:
348
+ continue
349
+
350
+ # MSYS2 MINGW's sysconfig patch does not include the "site-packages"
351
+ # part of the path. This is incorrect and will be fixed in MSYS.
352
+ skip_msys2_mingw_bug = (
353
+ WINDOWS and k in ("platlib", "purelib") and _looks_like_msys2_mingw_scheme()
354
+ )
355
+ if skip_msys2_mingw_bug:
356
+ continue
357
+
358
+ # CPython's POSIX install script invokes pip (via ensurepip) against the
359
+ # interpreter located in the source tree, not the install site. This
360
+ # triggers special logic in sysconfig that's not present in distutils.
361
+ # https://github.com/python/cpython/blob/8c21941ddaf/Lib/sysconfig.py#L178-L194
362
+ skip_cpython_build = (
363
+ sysconfig.is_python_build(check_home=True)
364
+ and not WINDOWS
365
+ and k in ("headers", "include", "platinclude")
366
+ )
367
+ if skip_cpython_build:
368
+ continue
369
+
370
+ warning_contexts.append((old_v, new_v, f"scheme.{k}"))
371
+
372
+ if not warning_contexts:
373
+ return old
374
+
375
+ # Check if this path mismatch is caused by distutils config files. Those
376
+ # files will no longer work once we switch to sysconfig, so this raises a
377
+ # deprecation message for them.
378
+ default_old = _distutils.distutils_scheme(
379
+ dist_name,
380
+ user,
381
+ home,
382
+ root,
383
+ isolated,
384
+ prefix,
385
+ ignore_config_files=True,
386
+ )
387
+ if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS):
388
+ deprecated(
389
+ reason=(
390
+ "Configuring installation scheme with distutils config files "
391
+ "is deprecated and will no longer work in the near future. If you "
392
+ "are using a Homebrew or Linuxbrew Python, please see discussion "
393
+ "at https://github.com/Homebrew/homebrew-core/issues/76621"
394
+ ),
395
+ replacement=None,
396
+ gone_in=None,
397
+ )
398
+ return old
399
+
400
+ # Post warnings about this mismatch so user can report them back.
401
+ for old_v, new_v, key in warning_contexts:
402
+ _warn_mismatched(old_v, new_v, key=key)
403
+ _log_context(user=user, home=home, root=root, prefix=prefix)
404
+
405
+ return old
406
+
407
+
408
+ def get_bin_prefix() -> str:
409
+ new = _sysconfig.get_bin_prefix()
410
+ if _USE_SYSCONFIG:
411
+ return new
412
+
413
+ old = _distutils.get_bin_prefix()
414
+ if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"):
415
+ _log_context()
416
+ return old
417
+
418
+
419
+ def get_bin_user() -> str:
420
+ return _sysconfig.get_scheme("", user=True).scripts
421
+
422
+
423
+ def _looks_like_deb_system_dist_packages(value: str) -> bool:
424
+ """Check if the value is Debian's APT-controlled dist-packages.
425
+
426
+ Debian's ``distutils.sysconfig.get_python_lib()`` implementation returns the
427
+ default package path controlled by APT, but does not patch ``sysconfig`` to
428
+ do the same. This is similar to the bug worked around in ``get_scheme()``,
429
+ but here the default is ``deb_system`` instead of ``unix_local``. Ultimately
430
+ we can't do anything about this Debian bug, and this detection allows us to
431
+ skip the warning when needed.
432
+ """
433
+ if not _looks_like_debian_scheme():
434
+ return False
435
+ if value == "/usr/lib/python3/dist-packages":
436
+ return True
437
+ return False
438
+
439
+
440
+ def get_purelib() -> str:
441
+ """Return the default pure-Python lib location."""
442
+ new = _sysconfig.get_purelib()
443
+ if _USE_SYSCONFIG:
444
+ return new
445
+
446
+ old = _distutils.get_purelib()
447
+ if _looks_like_deb_system_dist_packages(old):
448
+ return old
449
+ if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"):
450
+ _log_context()
451
+ return old
452
+
453
+
454
+ def get_platlib() -> str:
455
+ """Return the default platform-shared lib location."""
456
+ new = _sysconfig.get_platlib()
457
+ if _USE_SYSCONFIG:
458
+ return new
459
+
460
+ from . import _distutils
461
+
462
+ old = _distutils.get_platlib()
463
+ if _looks_like_deb_system_dist_packages(old):
464
+ return old
465
+ if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"):
466
+ _log_context()
467
+ return old
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/locations/_sysconfig.py ADDED
@@ -0,0 +1,213 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os
3
+ import sys
4
+ import sysconfig
5
+ import typing
6
+
7
+ from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationInvalid
8
+ from pip._internal.models.scheme import SCHEME_KEYS, Scheme
9
+ from pip._internal.utils.virtualenv import running_under_virtualenv
10
+
11
+ from .base import change_root, get_major_minor_version, is_osx_framework
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ # Notes on _infer_* functions.
17
+ # Unfortunately ``get_default_scheme()`` didn't exist before 3.10, so there's no
18
+ # way to ask things like "what is the '_prefix' scheme on this platform". These
19
+ # functions try to answer that with some heuristics while accounting for ad-hoc
20
+ # platforms not covered by CPython's default sysconfig implementation. If the
21
+ # ad-hoc implementation does not fully implement sysconfig, we'll fall back to
22
+ # a POSIX scheme.
23
+
24
+ _AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names())
25
+
26
+ _PREFERRED_SCHEME_API = getattr(sysconfig, "get_preferred_scheme", None)
27
+
28
+
29
+ def _should_use_osx_framework_prefix() -> bool:
30
+ """Check for Apple's ``osx_framework_library`` scheme.
31
+
32
+ Python distributed by Apple's Command Line Tools has this special scheme
33
+ that's used when:
34
+
35
+ * This is a framework build.
36
+ * We are installing into the system prefix.
37
+
38
+ This does not account for ``pip install --prefix`` (also means we're not
39
+ installing to the system prefix), which should use ``posix_prefix``, but
40
+ logic here means ``_infer_prefix()`` outputs ``osx_framework_library``. But
41
+ since ``prefix`` is not available for ``sysconfig.get_default_scheme()``,
42
+ which is the stdlib replacement for ``_infer_prefix()``, presumably Apple
43
+ wouldn't be able to magically switch between ``osx_framework_library`` and
44
+ ``posix_prefix``. ``_infer_prefix()`` returning ``osx_framework_library``
45
+ means its behavior is consistent whether we use the stdlib implementation
46
+ or our own, and we deal with this special case in ``get_scheme()`` instead.
47
+ """
48
+ return (
49
+ "osx_framework_library" in _AVAILABLE_SCHEMES
50
+ and not running_under_virtualenv()
51
+ and is_osx_framework()
52
+ )
53
+
54
+
55
+ def _infer_prefix() -> str:
56
+ """Try to find a prefix scheme for the current platform.
57
+
58
+ This tries:
59
+
60
+ * A special ``osx_framework_library`` for Python distributed by Apple's
61
+ Command Line Tools, when not running in a virtual environment.
62
+ * Implementation + OS, used by PyPy on Windows (``pypy_nt``).
63
+ * Implementation without OS, used by PyPy on POSIX (``pypy``).
64
+ * OS + "prefix", used by CPython on POSIX (``posix_prefix``).
65
+ * Just the OS name, used by CPython on Windows (``nt``).
66
+
67
+ If none of the above works, fall back to ``posix_prefix``.
68
+ """
69
+ if _PREFERRED_SCHEME_API:
70
+ return _PREFERRED_SCHEME_API("prefix")
71
+ if _should_use_osx_framework_prefix():
72
+ return "osx_framework_library"
73
+ implementation_suffixed = f"{sys.implementation.name}_{os.name}"
74
+ if implementation_suffixed in _AVAILABLE_SCHEMES:
75
+ return implementation_suffixed
76
+ if sys.implementation.name in _AVAILABLE_SCHEMES:
77
+ return sys.implementation.name
78
+ suffixed = f"{os.name}_prefix"
79
+ if suffixed in _AVAILABLE_SCHEMES:
80
+ return suffixed
81
+ if os.name in _AVAILABLE_SCHEMES: # On Windows, prefx is just called "nt".
82
+ return os.name
83
+ return "posix_prefix"
84
+
85
+
86
+ def _infer_user() -> str:
87
+ """Try to find a user scheme for the current platform."""
88
+ if _PREFERRED_SCHEME_API:
89
+ return _PREFERRED_SCHEME_API("user")
90
+ if is_osx_framework() and not running_under_virtualenv():
91
+ suffixed = "osx_framework_user"
92
+ else:
93
+ suffixed = f"{os.name}_user"
94
+ if suffixed in _AVAILABLE_SCHEMES:
95
+ return suffixed
96
+ if "posix_user" not in _AVAILABLE_SCHEMES: # User scheme unavailable.
97
+ raise UserInstallationInvalid()
98
+ return "posix_user"
99
+
100
+
101
+ def _infer_home() -> str:
102
+ """Try to find a home for the current platform."""
103
+ if _PREFERRED_SCHEME_API:
104
+ return _PREFERRED_SCHEME_API("home")
105
+ suffixed = f"{os.name}_home"
106
+ if suffixed in _AVAILABLE_SCHEMES:
107
+ return suffixed
108
+ return "posix_home"
109
+
110
+
111
+ # Update these keys if the user sets a custom home.
112
+ _HOME_KEYS = [
113
+ "installed_base",
114
+ "base",
115
+ "installed_platbase",
116
+ "platbase",
117
+ "prefix",
118
+ "exec_prefix",
119
+ ]
120
+ if sysconfig.get_config_var("userbase") is not None:
121
+ _HOME_KEYS.append("userbase")
122
+
123
+
124
+ def get_scheme(
125
+ dist_name: str,
126
+ user: bool = False,
127
+ home: typing.Optional[str] = None,
128
+ root: typing.Optional[str] = None,
129
+ isolated: bool = False,
130
+ prefix: typing.Optional[str] = None,
131
+ ) -> Scheme:
132
+ """
133
+ Get the "scheme" corresponding to the input parameters.
134
+
135
+ :param dist_name: the name of the package to retrieve the scheme for, used
136
+ in the headers scheme path
137
+ :param user: indicates to use the "user" scheme
138
+ :param home: indicates to use the "home" scheme
139
+ :param root: root under which other directories are re-based
140
+ :param isolated: ignored, but kept for distutils compatibility (where
141
+ this controls whether the user-site pydistutils.cfg is honored)
142
+ :param prefix: indicates to use the "prefix" scheme and provides the
143
+ base directory for the same
144
+ """
145
+ if user and prefix:
146
+ raise InvalidSchemeCombination("--user", "--prefix")
147
+ if home and prefix:
148
+ raise InvalidSchemeCombination("--home", "--prefix")
149
+
150
+ if home is not None:
151
+ scheme_name = _infer_home()
152
+ elif user:
153
+ scheme_name = _infer_user()
154
+ else:
155
+ scheme_name = _infer_prefix()
156
+
157
+ # Special case: When installing into a custom prefix, use posix_prefix
158
+ # instead of osx_framework_library. See _should_use_osx_framework_prefix()
159
+ # docstring for details.
160
+ if prefix is not None and scheme_name == "osx_framework_library":
161
+ scheme_name = "posix_prefix"
162
+
163
+ if home is not None:
164
+ variables = {k: home for k in _HOME_KEYS}
165
+ elif prefix is not None:
166
+ variables = {k: prefix for k in _HOME_KEYS}
167
+ else:
168
+ variables = {}
169
+
170
+ paths = sysconfig.get_paths(scheme=scheme_name, vars=variables)
171
+
172
+ # Logic here is very arbitrary, we're doing it for compatibility, don't ask.
173
+ # 1. Pip historically uses a special header path in virtual environments.
174
+ # 2. If the distribution name is not known, distutils uses 'UNKNOWN'. We
175
+ # only do the same when not running in a virtual environment because
176
+ # pip's historical header path logic (see point 1) did not do this.
177
+ if running_under_virtualenv():
178
+ if user:
179
+ base = variables.get("userbase", sys.prefix)
180
+ else:
181
+ base = variables.get("base", sys.prefix)
182
+ python_xy = f"python{get_major_minor_version()}"
183
+ paths["include"] = os.path.join(base, "include", "site", python_xy)
184
+ elif not dist_name:
185
+ dist_name = "UNKNOWN"
186
+
187
+ scheme = Scheme(
188
+ platlib=paths["platlib"],
189
+ purelib=paths["purelib"],
190
+ headers=os.path.join(paths["include"], dist_name),
191
+ scripts=paths["scripts"],
192
+ data=paths["data"],
193
+ )
194
+ if root is not None:
195
+ for key in SCHEME_KEYS:
196
+ value = change_root(root, getattr(scheme, key))
197
+ setattr(scheme, key, value)
198
+ return scheme
199
+
200
+
201
+ def get_bin_prefix() -> str:
202
+ # Forcing to use /usr/local/bin for standard macOS framework installs.
203
+ if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/":
204
+ return "/usr/local/bin"
205
+ return sysconfig.get_paths()["scripts"]
206
+
207
+
208
+ def get_purelib() -> str:
209
+ return sysconfig.get_paths()["purelib"]
210
+
211
+
212
+ def get_platlib() -> str:
213
+ return sysconfig.get_paths()["platlib"]
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/__init__.py ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import contextlib
2
+ import functools
3
+ import os
4
+ import sys
5
+ from typing import TYPE_CHECKING, List, Optional, Type, cast
6
+
7
+ from pip._internal.utils.misc import strtobool
8
+
9
+ from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel
10
+
11
+ if TYPE_CHECKING:
12
+ from typing import Literal, Protocol
13
+ else:
14
+ Protocol = object
15
+
16
+ __all__ = [
17
+ "BaseDistribution",
18
+ "BaseEnvironment",
19
+ "FilesystemWheel",
20
+ "MemoryWheel",
21
+ "Wheel",
22
+ "get_default_environment",
23
+ "get_environment",
24
+ "get_wheel_distribution",
25
+ "select_backend",
26
+ ]
27
+
28
+
29
+ def _should_use_importlib_metadata() -> bool:
30
+ """Whether to use the ``importlib.metadata`` or ``pkg_resources`` backend.
31
+
32
+ By default, pip uses ``importlib.metadata`` on Python 3.11+, and
33
+ ``pkg_resourcess`` otherwise. This can be overridden by a couple of ways:
34
+
35
+ * If environment variable ``_PIP_USE_IMPORTLIB_METADATA`` is set, it
36
+ dictates whether ``importlib.metadata`` is used, regardless of Python
37
+ version.
38
+ * On Python 3.11+, Python distributors can patch ``importlib.metadata``
39
+ to add a global constant ``_PIP_USE_IMPORTLIB_METADATA = False``. This
40
+ makes pip use ``pkg_resources`` (unless the user set the aforementioned
41
+ environment variable to *True*).
42
+ """
43
+ with contextlib.suppress(KeyError, ValueError):
44
+ return bool(strtobool(os.environ["_PIP_USE_IMPORTLIB_METADATA"]))
45
+ if sys.version_info < (3, 11):
46
+ return False
47
+ import importlib.metadata
48
+
49
+ return bool(getattr(importlib.metadata, "_PIP_USE_IMPORTLIB_METADATA", True))
50
+
51
+
52
+ class Backend(Protocol):
53
+ NAME: 'Literal["importlib", "pkg_resources"]'
54
+ Distribution: Type[BaseDistribution]
55
+ Environment: Type[BaseEnvironment]
56
+
57
+
58
+ @functools.lru_cache(maxsize=None)
59
+ def select_backend() -> Backend:
60
+ if _should_use_importlib_metadata():
61
+ from . import importlib
62
+
63
+ return cast(Backend, importlib)
64
+ from . import pkg_resources
65
+
66
+ return cast(Backend, pkg_resources)
67
+
68
+
69
+ def get_default_environment() -> BaseEnvironment:
70
+ """Get the default representation for the current environment.
71
+
72
+ This returns an Environment instance from the chosen backend. The default
73
+ Environment instance should be built from ``sys.path`` and may use caching
74
+ to share instance state accorss calls.
75
+ """
76
+ return select_backend().Environment.default()
77
+
78
+
79
+ def get_environment(paths: Optional[List[str]]) -> BaseEnvironment:
80
+ """Get a representation of the environment specified by ``paths``.
81
+
82
+ This returns an Environment instance from the chosen backend based on the
83
+ given import paths. The backend must build a fresh instance representing
84
+ the state of installed distributions when this function is called.
85
+ """
86
+ return select_backend().Environment.from_paths(paths)
87
+
88
+
89
+ def get_directory_distribution(directory: str) -> BaseDistribution:
90
+ """Get the distribution metadata representation in the specified directory.
91
+
92
+ This returns a Distribution instance from the chosen backend based on
93
+ the given on-disk ``.dist-info`` directory.
94
+ """
95
+ return select_backend().Distribution.from_directory(directory)
96
+
97
+
98
+ def get_wheel_distribution(wheel: Wheel, canonical_name: str) -> BaseDistribution:
99
+ """Get the representation of the specified wheel's distribution metadata.
100
+
101
+ This returns a Distribution instance from the chosen backend based on
102
+ the given wheel's ``.dist-info`` directory.
103
+
104
+ :param canonical_name: Normalized project name of the given wheel.
105
+ """
106
+ return select_backend().Distribution.from_wheel(wheel, canonical_name)
107
+
108
+
109
+ def get_metadata_distribution(
110
+ metadata_contents: bytes,
111
+ filename: str,
112
+ canonical_name: str,
113
+ ) -> BaseDistribution:
114
+ """Get the dist representation of the specified METADATA file contents.
115
+
116
+ This returns a Distribution instance from the chosen backend sourced from the data
117
+ in `metadata_contents`.
118
+
119
+ :param metadata_contents: Contents of a METADATA file within a dist, or one served
120
+ via PEP 658.
121
+ :param filename: Filename for the dist this metadata represents.
122
+ :param canonical_name: Normalized project name of the given dist.
123
+ """
124
+ return select_backend().Distribution.from_metadata_file_contents(
125
+ metadata_contents,
126
+ filename,
127
+ canonical_name,
128
+ )
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-38.pyc ADDED
Binary file (4.78 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/__pycache__/base.cpython-38.pyc ADDED
Binary file (27.7 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/_json.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Extracted from https://github.com/pfmoore/pkg_metadata
2
+
3
+ from email.header import Header, decode_header, make_header
4
+ from email.message import Message
5
+ from typing import Any, Dict, List, Union
6
+
7
+ METADATA_FIELDS = [
8
+ # Name, Multiple-Use
9
+ ("Metadata-Version", False),
10
+ ("Name", False),
11
+ ("Version", False),
12
+ ("Dynamic", True),
13
+ ("Platform", True),
14
+ ("Supported-Platform", True),
15
+ ("Summary", False),
16
+ ("Description", False),
17
+ ("Description-Content-Type", False),
18
+ ("Keywords", False),
19
+ ("Home-page", False),
20
+ ("Download-URL", False),
21
+ ("Author", False),
22
+ ("Author-email", False),
23
+ ("Maintainer", False),
24
+ ("Maintainer-email", False),
25
+ ("License", False),
26
+ ("Classifier", True),
27
+ ("Requires-Dist", True),
28
+ ("Requires-Python", False),
29
+ ("Requires-External", True),
30
+ ("Project-URL", True),
31
+ ("Provides-Extra", True),
32
+ ("Provides-Dist", True),
33
+ ("Obsoletes-Dist", True),
34
+ ]
35
+
36
+
37
+ def json_name(field: str) -> str:
38
+ return field.lower().replace("-", "_")
39
+
40
+
41
+ def msg_to_json(msg: Message) -> Dict[str, Any]:
42
+ """Convert a Message object into a JSON-compatible dictionary."""
43
+
44
+ def sanitise_header(h: Union[Header, str]) -> str:
45
+ if isinstance(h, Header):
46
+ chunks = []
47
+ for bytes, encoding in decode_header(h):
48
+ if encoding == "unknown-8bit":
49
+ try:
50
+ # See if UTF-8 works
51
+ bytes.decode("utf-8")
52
+ encoding = "utf-8"
53
+ except UnicodeDecodeError:
54
+ # If not, latin1 at least won't fail
55
+ encoding = "latin1"
56
+ chunks.append((bytes, encoding))
57
+ return str(make_header(chunks))
58
+ return str(h)
59
+
60
+ result = {}
61
+ for field, multi in METADATA_FIELDS:
62
+ if field not in msg:
63
+ continue
64
+ key = json_name(field)
65
+ if multi:
66
+ value: Union[str, List[str]] = [
67
+ sanitise_header(v) for v in msg.get_all(field) # type: ignore
68
+ ]
69
+ else:
70
+ value = sanitise_header(msg.get(field)) # type: ignore
71
+ if key == "keywords":
72
+ # Accept both comma-separated and space-separated
73
+ # forms, for better compatibility with old data.
74
+ if "," in value:
75
+ value = [v.strip() for v in value.split(",")]
76
+ else:
77
+ value = value.split()
78
+ result[key] = value
79
+
80
+ payload = msg.get_payload()
81
+ if payload:
82
+ result["description"] = payload
83
+
84
+ return result
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/base.py ADDED
@@ -0,0 +1,702 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import csv
2
+ import email.message
3
+ import functools
4
+ import json
5
+ import logging
6
+ import pathlib
7
+ import re
8
+ import zipfile
9
+ from typing import (
10
+ IO,
11
+ TYPE_CHECKING,
12
+ Any,
13
+ Collection,
14
+ Container,
15
+ Dict,
16
+ Iterable,
17
+ Iterator,
18
+ List,
19
+ NamedTuple,
20
+ Optional,
21
+ Tuple,
22
+ Union,
23
+ )
24
+
25
+ from pip._vendor.packaging.requirements import Requirement
26
+ from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet
27
+ from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
28
+ from pip._vendor.packaging.version import LegacyVersion, Version
29
+
30
+ from pip._internal.exceptions import NoneMetadataError
31
+ from pip._internal.locations import site_packages, user_site
32
+ from pip._internal.models.direct_url import (
33
+ DIRECT_URL_METADATA_NAME,
34
+ DirectUrl,
35
+ DirectUrlValidationError,
36
+ )
37
+ from pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here.
38
+ from pip._internal.utils.egg_link import egg_link_path_from_sys_path
39
+ from pip._internal.utils.misc import is_local, normalize_path
40
+ from pip._internal.utils.urls import url_to_path
41
+
42
+ from ._json import msg_to_json
43
+
44
+ if TYPE_CHECKING:
45
+ from typing import Protocol
46
+ else:
47
+ Protocol = object
48
+
49
+ DistributionVersion = Union[LegacyVersion, Version]
50
+
51
+ InfoPath = Union[str, pathlib.PurePath]
52
+
53
+ logger = logging.getLogger(__name__)
54
+
55
+
56
+ class BaseEntryPoint(Protocol):
57
+ @property
58
+ def name(self) -> str:
59
+ raise NotImplementedError()
60
+
61
+ @property
62
+ def value(self) -> str:
63
+ raise NotImplementedError()
64
+
65
+ @property
66
+ def group(self) -> str:
67
+ raise NotImplementedError()
68
+
69
+
70
+ def _convert_installed_files_path(
71
+ entry: Tuple[str, ...],
72
+ info: Tuple[str, ...],
73
+ ) -> str:
74
+ """Convert a legacy installed-files.txt path into modern RECORD path.
75
+
76
+ The legacy format stores paths relative to the info directory, while the
77
+ modern format stores paths relative to the package root, e.g. the
78
+ site-packages directory.
79
+
80
+ :param entry: Path parts of the installed-files.txt entry.
81
+ :param info: Path parts of the egg-info directory relative to package root.
82
+ :returns: The converted entry.
83
+
84
+ For best compatibility with symlinks, this does not use ``abspath()`` or
85
+ ``Path.resolve()``, but tries to work with path parts:
86
+
87
+ 1. While ``entry`` starts with ``..``, remove the equal amounts of parts
88
+ from ``info``; if ``info`` is empty, start appending ``..`` instead.
89
+ 2. Join the two directly.
90
+ """
91
+ while entry and entry[0] == "..":
92
+ if not info or info[-1] == "..":
93
+ info += ("..",)
94
+ else:
95
+ info = info[:-1]
96
+ entry = entry[1:]
97
+ return str(pathlib.Path(*info, *entry))
98
+
99
+
100
+ class RequiresEntry(NamedTuple):
101
+ requirement: str
102
+ extra: str
103
+ marker: str
104
+
105
+
106
+ class BaseDistribution(Protocol):
107
+ @classmethod
108
+ def from_directory(cls, directory: str) -> "BaseDistribution":
109
+ """Load the distribution from a metadata directory.
110
+
111
+ :param directory: Path to a metadata directory, e.g. ``.dist-info``.
112
+ """
113
+ raise NotImplementedError()
114
+
115
+ @classmethod
116
+ def from_metadata_file_contents(
117
+ cls,
118
+ metadata_contents: bytes,
119
+ filename: str,
120
+ project_name: str,
121
+ ) -> "BaseDistribution":
122
+ """Load the distribution from the contents of a METADATA file.
123
+
124
+ This is used to implement PEP 658 by generating a "shallow" dist object that can
125
+ be used for resolution without downloading or building the actual dist yet.
126
+
127
+ :param metadata_contents: The contents of a METADATA file.
128
+ :param filename: File name for the dist with this metadata.
129
+ :param project_name: Name of the project this dist represents.
130
+ """
131
+ raise NotImplementedError()
132
+
133
+ @classmethod
134
+ def from_wheel(cls, wheel: "Wheel", name: str) -> "BaseDistribution":
135
+ """Load the distribution from a given wheel.
136
+
137
+ :param wheel: A concrete wheel definition.
138
+ :param name: File name of the wheel.
139
+
140
+ :raises InvalidWheel: Whenever loading of the wheel causes a
141
+ :py:exc:`zipfile.BadZipFile` exception to be thrown.
142
+ :raises UnsupportedWheel: If the wheel is a valid zip, but malformed
143
+ internally.
144
+ """
145
+ raise NotImplementedError()
146
+
147
+ def __repr__(self) -> str:
148
+ return f"{self.raw_name} {self.version} ({self.location})"
149
+
150
+ def __str__(self) -> str:
151
+ return f"{self.raw_name} {self.version}"
152
+
153
+ @property
154
+ def location(self) -> Optional[str]:
155
+ """Where the distribution is loaded from.
156
+
157
+ A string value is not necessarily a filesystem path, since distributions
158
+ can be loaded from other sources, e.g. arbitrary zip archives. ``None``
159
+ means the distribution is created in-memory.
160
+
161
+ Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If
162
+ this is a symbolic link, we want to preserve the relative path between
163
+ it and files in the distribution.
164
+ """
165
+ raise NotImplementedError()
166
+
167
+ @property
168
+ def editable_project_location(self) -> Optional[str]:
169
+ """The project location for editable distributions.
170
+
171
+ This is the directory where pyproject.toml or setup.py is located.
172
+ None if the distribution is not installed in editable mode.
173
+ """
174
+ # TODO: this property is relatively costly to compute, memoize it ?
175
+ direct_url = self.direct_url
176
+ if direct_url:
177
+ if direct_url.is_local_editable():
178
+ return url_to_path(direct_url.url)
179
+ else:
180
+ # Search for an .egg-link file by walking sys.path, as it was
181
+ # done before by dist_is_editable().
182
+ egg_link_path = egg_link_path_from_sys_path(self.raw_name)
183
+ if egg_link_path:
184
+ # TODO: get project location from second line of egg_link file
185
+ # (https://github.com/pypa/pip/issues/10243)
186
+ return self.location
187
+ return None
188
+
189
+ @property
190
+ def installed_location(self) -> Optional[str]:
191
+ """The distribution's "installed" location.
192
+
193
+ This should generally be a ``site-packages`` directory. This is
194
+ usually ``dist.location``, except for legacy develop-installed packages,
195
+ where ``dist.location`` is the source code location, and this is where
196
+ the ``.egg-link`` file is.
197
+
198
+ The returned location is normalized (in particular, with symlinks removed).
199
+ """
200
+ raise NotImplementedError()
201
+
202
+ @property
203
+ def info_location(self) -> Optional[str]:
204
+ """Location of the .[egg|dist]-info directory or file.
205
+
206
+ Similarly to ``location``, a string value is not necessarily a
207
+ filesystem path. ``None`` means the distribution is created in-memory.
208
+
209
+ For a modern .dist-info installation on disk, this should be something
210
+ like ``{location}/{raw_name}-{version}.dist-info``.
211
+
212
+ Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If
213
+ this is a symbolic link, we want to preserve the relative path between
214
+ it and other files in the distribution.
215
+ """
216
+ raise NotImplementedError()
217
+
218
+ @property
219
+ def installed_by_distutils(self) -> bool:
220
+ """Whether this distribution is installed with legacy distutils format.
221
+
222
+ A distribution installed with "raw" distutils not patched by setuptools
223
+ uses one single file at ``info_location`` to store metadata. We need to
224
+ treat this specially on uninstallation.
225
+ """
226
+ info_location = self.info_location
227
+ if not info_location:
228
+ return False
229
+ return pathlib.Path(info_location).is_file()
230
+
231
+ @property
232
+ def installed_as_egg(self) -> bool:
233
+ """Whether this distribution is installed as an egg.
234
+
235
+ This usually indicates the distribution was installed by (older versions
236
+ of) easy_install.
237
+ """
238
+ location = self.location
239
+ if not location:
240
+ return False
241
+ return location.endswith(".egg")
242
+
243
+ @property
244
+ def installed_with_setuptools_egg_info(self) -> bool:
245
+ """Whether this distribution is installed with the ``.egg-info`` format.
246
+
247
+ This usually indicates the distribution was installed with setuptools
248
+ with an old pip version or with ``single-version-externally-managed``.
249
+
250
+ Note that this ensure the metadata store is a directory. distutils can
251
+ also installs an ``.egg-info``, but as a file, not a directory. This
252
+ property is *False* for that case. Also see ``installed_by_distutils``.
253
+ """
254
+ info_location = self.info_location
255
+ if not info_location:
256
+ return False
257
+ if not info_location.endswith(".egg-info"):
258
+ return False
259
+ return pathlib.Path(info_location).is_dir()
260
+
261
+ @property
262
+ def installed_with_dist_info(self) -> bool:
263
+ """Whether this distribution is installed with the "modern format".
264
+
265
+ This indicates a "modern" installation, e.g. storing metadata in the
266
+ ``.dist-info`` directory. This applies to installations made by
267
+ setuptools (but through pip, not directly), or anything using the
268
+ standardized build backend interface (PEP 517).
269
+ """
270
+ info_location = self.info_location
271
+ if not info_location:
272
+ return False
273
+ if not info_location.endswith(".dist-info"):
274
+ return False
275
+ return pathlib.Path(info_location).is_dir()
276
+
277
+ @property
278
+ def canonical_name(self) -> NormalizedName:
279
+ raise NotImplementedError()
280
+
281
+ @property
282
+ def version(self) -> DistributionVersion:
283
+ raise NotImplementedError()
284
+
285
+ @property
286
+ def setuptools_filename(self) -> str:
287
+ """Convert a project name to its setuptools-compatible filename.
288
+
289
+ This is a copy of ``pkg_resources.to_filename()`` for compatibility.
290
+ """
291
+ return self.raw_name.replace("-", "_")
292
+
293
+ @property
294
+ def direct_url(self) -> Optional[DirectUrl]:
295
+ """Obtain a DirectUrl from this distribution.
296
+
297
+ Returns None if the distribution has no `direct_url.json` metadata,
298
+ or if `direct_url.json` is invalid.
299
+ """
300
+ try:
301
+ content = self.read_text(DIRECT_URL_METADATA_NAME)
302
+ except FileNotFoundError:
303
+ return None
304
+ try:
305
+ return DirectUrl.from_json(content)
306
+ except (
307
+ UnicodeDecodeError,
308
+ json.JSONDecodeError,
309
+ DirectUrlValidationError,
310
+ ) as e:
311
+ logger.warning(
312
+ "Error parsing %s for %s: %s",
313
+ DIRECT_URL_METADATA_NAME,
314
+ self.canonical_name,
315
+ e,
316
+ )
317
+ return None
318
+
319
+ @property
320
+ def installer(self) -> str:
321
+ try:
322
+ installer_text = self.read_text("INSTALLER")
323
+ except (OSError, ValueError, NoneMetadataError):
324
+ return "" # Fail silently if the installer file cannot be read.
325
+ for line in installer_text.splitlines():
326
+ cleaned_line = line.strip()
327
+ if cleaned_line:
328
+ return cleaned_line
329
+ return ""
330
+
331
+ @property
332
+ def requested(self) -> bool:
333
+ return self.is_file("REQUESTED")
334
+
335
+ @property
336
+ def editable(self) -> bool:
337
+ return bool(self.editable_project_location)
338
+
339
+ @property
340
+ def local(self) -> bool:
341
+ """If distribution is installed in the current virtual environment.
342
+
343
+ Always True if we're not in a virtualenv.
344
+ """
345
+ if self.installed_location is None:
346
+ return False
347
+ return is_local(self.installed_location)
348
+
349
+ @property
350
+ def in_usersite(self) -> bool:
351
+ if self.installed_location is None or user_site is None:
352
+ return False
353
+ return self.installed_location.startswith(normalize_path(user_site))
354
+
355
+ @property
356
+ def in_site_packages(self) -> bool:
357
+ if self.installed_location is None or site_packages is None:
358
+ return False
359
+ return self.installed_location.startswith(normalize_path(site_packages))
360
+
361
+ def is_file(self, path: InfoPath) -> bool:
362
+ """Check whether an entry in the info directory is a file."""
363
+ raise NotImplementedError()
364
+
365
+ def iter_distutils_script_names(self) -> Iterator[str]:
366
+ """Find distutils 'scripts' entries metadata.
367
+
368
+ If 'scripts' is supplied in ``setup.py``, distutils records those in the
369
+ installed distribution's ``scripts`` directory, a file for each script.
370
+ """
371
+ raise NotImplementedError()
372
+
373
+ def read_text(self, path: InfoPath) -> str:
374
+ """Read a file in the info directory.
375
+
376
+ :raise FileNotFoundError: If ``path`` does not exist in the directory.
377
+ :raise NoneMetadataError: If ``path`` exists in the info directory, but
378
+ cannot be read.
379
+ """
380
+ raise NotImplementedError()
381
+
382
+ def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
383
+ raise NotImplementedError()
384
+
385
+ def _metadata_impl(self) -> email.message.Message:
386
+ raise NotImplementedError()
387
+
388
+ @functools.lru_cache(maxsize=1)
389
+ def _metadata_cached(self) -> email.message.Message:
390
+ # When we drop python 3.7 support, move this to the metadata property and use
391
+ # functools.cached_property instead of lru_cache.
392
+ metadata = self._metadata_impl()
393
+ self._add_egg_info_requires(metadata)
394
+ return metadata
395
+
396
+ @property
397
+ def metadata(self) -> email.message.Message:
398
+ """Metadata of distribution parsed from e.g. METADATA or PKG-INFO.
399
+
400
+ This should return an empty message if the metadata file is unavailable.
401
+
402
+ :raises NoneMetadataError: If the metadata file is available, but does
403
+ not contain valid metadata.
404
+ """
405
+ return self._metadata_cached()
406
+
407
+ @property
408
+ def metadata_dict(self) -> Dict[str, Any]:
409
+ """PEP 566 compliant JSON-serializable representation of METADATA or PKG-INFO.
410
+
411
+ This should return an empty dict if the metadata file is unavailable.
412
+
413
+ :raises NoneMetadataError: If the metadata file is available, but does
414
+ not contain valid metadata.
415
+ """
416
+ return msg_to_json(self.metadata)
417
+
418
+ @property
419
+ def metadata_version(self) -> Optional[str]:
420
+ """Value of "Metadata-Version:" in distribution metadata, if available."""
421
+ return self.metadata.get("Metadata-Version")
422
+
423
+ @property
424
+ def raw_name(self) -> str:
425
+ """Value of "Name:" in distribution metadata."""
426
+ # The metadata should NEVER be missing the Name: key, but if it somehow
427
+ # does, fall back to the known canonical name.
428
+ return self.metadata.get("Name", self.canonical_name)
429
+
430
+ @property
431
+ def requires_python(self) -> SpecifierSet:
432
+ """Value of "Requires-Python:" in distribution metadata.
433
+
434
+ If the key does not exist or contains an invalid value, an empty
435
+ SpecifierSet should be returned.
436
+ """
437
+ value = self.metadata.get("Requires-Python")
438
+ if value is None:
439
+ return SpecifierSet()
440
+ try:
441
+ # Convert to str to satisfy the type checker; this can be a Header object.
442
+ spec = SpecifierSet(str(value))
443
+ except InvalidSpecifier as e:
444
+ message = "Package %r has an invalid Requires-Python: %s"
445
+ logger.warning(message, self.raw_name, e)
446
+ return SpecifierSet()
447
+ return spec
448
+
449
+ def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
450
+ """Dependencies of this distribution.
451
+
452
+ For modern .dist-info distributions, this is the collection of
453
+ "Requires-Dist:" entries in distribution metadata.
454
+ """
455
+ raise NotImplementedError()
456
+
457
+ def iter_provided_extras(self) -> Iterable[str]:
458
+ """Extras provided by this distribution.
459
+
460
+ For modern .dist-info distributions, this is the collection of
461
+ "Provides-Extra:" entries in distribution metadata.
462
+
463
+ The return value of this function is not particularly useful other than
464
+ display purposes due to backward compatibility issues and the extra
465
+ names being poorly normalized prior to PEP 685. If you want to perform
466
+ logic operations on extras, use :func:`is_extra_provided` instead.
467
+ """
468
+ raise NotImplementedError()
469
+
470
+ def is_extra_provided(self, extra: str) -> bool:
471
+ """Check whether an extra is provided by this distribution.
472
+
473
+ This is needed mostly for compatibility issues with pkg_resources not
474
+ following the extra normalization rules defined in PEP 685.
475
+ """
476
+ raise NotImplementedError()
477
+
478
+ def _iter_declared_entries_from_record(self) -> Optional[Iterator[str]]:
479
+ try:
480
+ text = self.read_text("RECORD")
481
+ except FileNotFoundError:
482
+ return None
483
+ # This extra Path-str cast normalizes entries.
484
+ return (str(pathlib.Path(row[0])) for row in csv.reader(text.splitlines()))
485
+
486
+ def _iter_declared_entries_from_legacy(self) -> Optional[Iterator[str]]:
487
+ try:
488
+ text = self.read_text("installed-files.txt")
489
+ except FileNotFoundError:
490
+ return None
491
+ paths = (p for p in text.splitlines(keepends=False) if p)
492
+ root = self.location
493
+ info = self.info_location
494
+ if root is None or info is None:
495
+ return paths
496
+ try:
497
+ info_rel = pathlib.Path(info).relative_to(root)
498
+ except ValueError: # info is not relative to root.
499
+ return paths
500
+ if not info_rel.parts: # info *is* root.
501
+ return paths
502
+ return (
503
+ _convert_installed_files_path(pathlib.Path(p).parts, info_rel.parts)
504
+ for p in paths
505
+ )
506
+
507
+ def iter_declared_entries(self) -> Optional[Iterator[str]]:
508
+ """Iterate through file entries declared in this distribution.
509
+
510
+ For modern .dist-info distributions, this is the files listed in the
511
+ ``RECORD`` metadata file. For legacy setuptools distributions, this
512
+ comes from ``installed-files.txt``, with entries normalized to be
513
+ compatible with the format used by ``RECORD``.
514
+
515
+ :return: An iterator for listed entries, or None if the distribution
516
+ contains neither ``RECORD`` nor ``installed-files.txt``.
517
+ """
518
+ return (
519
+ self._iter_declared_entries_from_record()
520
+ or self._iter_declared_entries_from_legacy()
521
+ )
522
+
523
+ def _iter_requires_txt_entries(self) -> Iterator[RequiresEntry]:
524
+ """Parse a ``requires.txt`` in an egg-info directory.
525
+
526
+ This is an INI-ish format where an egg-info stores dependencies. A
527
+ section name describes extra other environment markers, while each entry
528
+ is an arbitrary string (not a key-value pair) representing a dependency
529
+ as a requirement string (no markers).
530
+
531
+ There is a construct in ``importlib.metadata`` called ``Sectioned`` that
532
+ does mostly the same, but the format is currently considered private.
533
+ """
534
+ try:
535
+ content = self.read_text("requires.txt")
536
+ except FileNotFoundError:
537
+ return
538
+ extra = marker = "" # Section-less entries don't have markers.
539
+ for line in content.splitlines():
540
+ line = line.strip()
541
+ if not line or line.startswith("#"): # Comment; ignored.
542
+ continue
543
+ if line.startswith("[") and line.endswith("]"): # A section header.
544
+ extra, _, marker = line.strip("[]").partition(":")
545
+ continue
546
+ yield RequiresEntry(requirement=line, extra=extra, marker=marker)
547
+
548
+ def _iter_egg_info_extras(self) -> Iterable[str]:
549
+ """Get extras from the egg-info directory."""
550
+ known_extras = {""}
551
+ for entry in self._iter_requires_txt_entries():
552
+ extra = canonicalize_name(entry.extra)
553
+ if extra in known_extras:
554
+ continue
555
+ known_extras.add(extra)
556
+ yield extra
557
+
558
+ def _iter_egg_info_dependencies(self) -> Iterable[str]:
559
+ """Get distribution dependencies from the egg-info directory.
560
+
561
+ To ease parsing, this converts a legacy dependency entry into a PEP 508
562
+ requirement string. Like ``_iter_requires_txt_entries()``, there is code
563
+ in ``importlib.metadata`` that does mostly the same, but not do exactly
564
+ what we need.
565
+
566
+ Namely, ``importlib.metadata`` does not normalize the extra name before
567
+ putting it into the requirement string, which causes marker comparison
568
+ to fail because the dist-info format do normalize. This is consistent in
569
+ all currently available PEP 517 backends, although not standardized.
570
+ """
571
+ for entry in self._iter_requires_txt_entries():
572
+ extra = canonicalize_name(entry.extra)
573
+ if extra and entry.marker:
574
+ marker = f'({entry.marker}) and extra == "{extra}"'
575
+ elif extra:
576
+ marker = f'extra == "{extra}"'
577
+ elif entry.marker:
578
+ marker = entry.marker
579
+ else:
580
+ marker = ""
581
+ if marker:
582
+ yield f"{entry.requirement} ; {marker}"
583
+ else:
584
+ yield entry.requirement
585
+
586
+ def _add_egg_info_requires(self, metadata: email.message.Message) -> None:
587
+ """Add egg-info requires.txt information to the metadata."""
588
+ if not metadata.get_all("Requires-Dist"):
589
+ for dep in self._iter_egg_info_dependencies():
590
+ metadata["Requires-Dist"] = dep
591
+ if not metadata.get_all("Provides-Extra"):
592
+ for extra in self._iter_egg_info_extras():
593
+ metadata["Provides-Extra"] = extra
594
+
595
+
596
+ class BaseEnvironment:
597
+ """An environment containing distributions to introspect."""
598
+
599
+ @classmethod
600
+ def default(cls) -> "BaseEnvironment":
601
+ raise NotImplementedError()
602
+
603
+ @classmethod
604
+ def from_paths(cls, paths: Optional[List[str]]) -> "BaseEnvironment":
605
+ raise NotImplementedError()
606
+
607
+ def get_distribution(self, name: str) -> Optional["BaseDistribution"]:
608
+ """Given a requirement name, return the installed distributions.
609
+
610
+ The name may not be normalized. The implementation must canonicalize
611
+ it for lookup.
612
+ """
613
+ raise NotImplementedError()
614
+
615
+ def _iter_distributions(self) -> Iterator["BaseDistribution"]:
616
+ """Iterate through installed distributions.
617
+
618
+ This function should be implemented by subclass, but never called
619
+ directly. Use the public ``iter_distribution()`` instead, which
620
+ implements additional logic to make sure the distributions are valid.
621
+ """
622
+ raise NotImplementedError()
623
+
624
+ def iter_all_distributions(self) -> Iterator[BaseDistribution]:
625
+ """Iterate through all installed distributions without any filtering."""
626
+ for dist in self._iter_distributions():
627
+ # Make sure the distribution actually comes from a valid Python
628
+ # packaging distribution. Pip's AdjacentTempDirectory leaves folders
629
+ # e.g. ``~atplotlib.dist-info`` if cleanup was interrupted. The
630
+ # valid project name pattern is taken from PEP 508.
631
+ project_name_valid = re.match(
632
+ r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$",
633
+ dist.canonical_name,
634
+ flags=re.IGNORECASE,
635
+ )
636
+ if not project_name_valid:
637
+ logger.warning(
638
+ "Ignoring invalid distribution %s (%s)",
639
+ dist.canonical_name,
640
+ dist.location,
641
+ )
642
+ continue
643
+ yield dist
644
+
645
+ def iter_installed_distributions(
646
+ self,
647
+ local_only: bool = True,
648
+ skip: Container[str] = stdlib_pkgs,
649
+ include_editables: bool = True,
650
+ editables_only: bool = False,
651
+ user_only: bool = False,
652
+ ) -> Iterator[BaseDistribution]:
653
+ """Return a list of installed distributions.
654
+
655
+ This is based on ``iter_all_distributions()`` with additional filtering
656
+ options. Note that ``iter_installed_distributions()`` without arguments
657
+ is *not* equal to ``iter_all_distributions()``, since some of the
658
+ configurations exclude packages by default.
659
+
660
+ :param local_only: If True (default), only return installations
661
+ local to the current virtualenv, if in a virtualenv.
662
+ :param skip: An iterable of canonicalized project names to ignore;
663
+ defaults to ``stdlib_pkgs``.
664
+ :param include_editables: If False, don't report editables.
665
+ :param editables_only: If True, only report editables.
666
+ :param user_only: If True, only report installations in the user
667
+ site directory.
668
+ """
669
+ it = self.iter_all_distributions()
670
+ if local_only:
671
+ it = (d for d in it if d.local)
672
+ if not include_editables:
673
+ it = (d for d in it if not d.editable)
674
+ if editables_only:
675
+ it = (d for d in it if d.editable)
676
+ if user_only:
677
+ it = (d for d in it if d.in_usersite)
678
+ return (d for d in it if d.canonical_name not in skip)
679
+
680
+
681
+ class Wheel(Protocol):
682
+ location: str
683
+
684
+ def as_zipfile(self) -> zipfile.ZipFile:
685
+ raise NotImplementedError()
686
+
687
+
688
+ class FilesystemWheel(Wheel):
689
+ def __init__(self, location: str) -> None:
690
+ self.location = location
691
+
692
+ def as_zipfile(self) -> zipfile.ZipFile:
693
+ return zipfile.ZipFile(self.location, allowZip64=True)
694
+
695
+
696
+ class MemoryWheel(Wheel):
697
+ def __init__(self, location: str, stream: IO[bytes]) -> None:
698
+ self.location = location
699
+ self.stream = stream
700
+
701
+ def as_zipfile(self) -> zipfile.ZipFile:
702
+ return zipfile.ZipFile(self.stream, allowZip64=True)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/importlib/__init__.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from ._dists import Distribution
2
+ from ._envs import Environment
3
+
4
+ __all__ = ["NAME", "Distribution", "Environment"]
5
+
6
+ NAME = "importlib"
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-38.pyc ADDED
Binary file (306 Bytes). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-38.pyc ADDED
Binary file (2.72 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-38.pyc ADDED
Binary file (8.98 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-38.pyc ADDED
Binary file (7.59 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/importlib/_compat.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import importlib.metadata
2
+ from typing import Any, Optional, Protocol, cast
3
+
4
+
5
+ class BadMetadata(ValueError):
6
+ def __init__(self, dist: importlib.metadata.Distribution, *, reason: str) -> None:
7
+ self.dist = dist
8
+ self.reason = reason
9
+
10
+ def __str__(self) -> str:
11
+ return f"Bad metadata in {self.dist} ({self.reason})"
12
+
13
+
14
+ class BasePath(Protocol):
15
+ """A protocol that various path objects conform.
16
+
17
+ This exists because importlib.metadata uses both ``pathlib.Path`` and
18
+ ``zipfile.Path``, and we need a common base for type hints (Union does not
19
+ work well since ``zipfile.Path`` is too new for our linter setup).
20
+
21
+ This does not mean to be exhaustive, but only contains things that present
22
+ in both classes *that we need*.
23
+ """
24
+
25
+ @property
26
+ def name(self) -> str:
27
+ raise NotImplementedError()
28
+
29
+ @property
30
+ def parent(self) -> "BasePath":
31
+ raise NotImplementedError()
32
+
33
+
34
+ def get_info_location(d: importlib.metadata.Distribution) -> Optional[BasePath]:
35
+ """Find the path to the distribution's metadata directory.
36
+
37
+ HACK: This relies on importlib.metadata's private ``_path`` attribute. Not
38
+ all distributions exist on disk, so importlib.metadata is correct to not
39
+ expose the attribute as public. But pip's code base is old and not as clean,
40
+ so we do this to avoid having to rewrite too many things. Hopefully we can
41
+ eliminate this some day.
42
+ """
43
+ return getattr(d, "_path", None)
44
+
45
+
46
+ def get_dist_name(dist: importlib.metadata.Distribution) -> str:
47
+ """Get the distribution's project name.
48
+
49
+ The ``name`` attribute is only available in Python 3.10 or later. We are
50
+ targeting exactly that, but Mypy does not know this.
51
+ """
52
+ name = cast(Any, dist).name
53
+ if not isinstance(name, str):
54
+ raise BadMetadata(dist, reason="invalid metadata entry 'name'")
55
+ return name
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/importlib/_dists.py ADDED
@@ -0,0 +1,227 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import email.message
2
+ import importlib.metadata
3
+ import os
4
+ import pathlib
5
+ import zipfile
6
+ from typing import (
7
+ Collection,
8
+ Dict,
9
+ Iterable,
10
+ Iterator,
11
+ Mapping,
12
+ Optional,
13
+ Sequence,
14
+ cast,
15
+ )
16
+
17
+ from pip._vendor.packaging.requirements import Requirement
18
+ from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
19
+ from pip._vendor.packaging.version import parse as parse_version
20
+
21
+ from pip._internal.exceptions import InvalidWheel, UnsupportedWheel
22
+ from pip._internal.metadata.base import (
23
+ BaseDistribution,
24
+ BaseEntryPoint,
25
+ DistributionVersion,
26
+ InfoPath,
27
+ Wheel,
28
+ )
29
+ from pip._internal.utils.misc import normalize_path
30
+ from pip._internal.utils.temp_dir import TempDirectory
31
+ from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file
32
+
33
+ from ._compat import BasePath, get_dist_name
34
+
35
+
36
+ class WheelDistribution(importlib.metadata.Distribution):
37
+ """An ``importlib.metadata.Distribution`` read from a wheel.
38
+
39
+ Although ``importlib.metadata.PathDistribution`` accepts ``zipfile.Path``,
40
+ its implementation is too "lazy" for pip's needs (we can't keep the ZipFile
41
+ handle open for the entire lifetime of the distribution object).
42
+
43
+ This implementation eagerly reads the entire metadata directory into the
44
+ memory instead, and operates from that.
45
+ """
46
+
47
+ def __init__(
48
+ self,
49
+ files: Mapping[pathlib.PurePosixPath, bytes],
50
+ info_location: pathlib.PurePosixPath,
51
+ ) -> None:
52
+ self._files = files
53
+ self.info_location = info_location
54
+
55
+ @classmethod
56
+ def from_zipfile(
57
+ cls,
58
+ zf: zipfile.ZipFile,
59
+ name: str,
60
+ location: str,
61
+ ) -> "WheelDistribution":
62
+ info_dir, _ = parse_wheel(zf, name)
63
+ paths = (
64
+ (name, pathlib.PurePosixPath(name.split("/", 1)[-1]))
65
+ for name in zf.namelist()
66
+ if name.startswith(f"{info_dir}/")
67
+ )
68
+ files = {
69
+ relpath: read_wheel_metadata_file(zf, fullpath)
70
+ for fullpath, relpath in paths
71
+ }
72
+ info_location = pathlib.PurePosixPath(location, info_dir)
73
+ return cls(files, info_location)
74
+
75
+ def iterdir(self, path: InfoPath) -> Iterator[pathlib.PurePosixPath]:
76
+ # Only allow iterating through the metadata directory.
77
+ if pathlib.PurePosixPath(str(path)) in self._files:
78
+ return iter(self._files)
79
+ raise FileNotFoundError(path)
80
+
81
+ def read_text(self, filename: str) -> Optional[str]:
82
+ try:
83
+ data = self._files[pathlib.PurePosixPath(filename)]
84
+ except KeyError:
85
+ return None
86
+ try:
87
+ text = data.decode("utf-8")
88
+ except UnicodeDecodeError as e:
89
+ wheel = self.info_location.parent
90
+ error = f"Error decoding metadata for {wheel}: {e} in {filename} file"
91
+ raise UnsupportedWheel(error)
92
+ return text
93
+
94
+
95
+ class Distribution(BaseDistribution):
96
+ def __init__(
97
+ self,
98
+ dist: importlib.metadata.Distribution,
99
+ info_location: Optional[BasePath],
100
+ installed_location: Optional[BasePath],
101
+ ) -> None:
102
+ self._dist = dist
103
+ self._info_location = info_location
104
+ self._installed_location = installed_location
105
+
106
+ @classmethod
107
+ def from_directory(cls, directory: str) -> BaseDistribution:
108
+ info_location = pathlib.Path(directory)
109
+ dist = importlib.metadata.Distribution.at(info_location)
110
+ return cls(dist, info_location, info_location.parent)
111
+
112
+ @classmethod
113
+ def from_metadata_file_contents(
114
+ cls,
115
+ metadata_contents: bytes,
116
+ filename: str,
117
+ project_name: str,
118
+ ) -> BaseDistribution:
119
+ # Generate temp dir to contain the metadata file, and write the file contents.
120
+ temp_dir = pathlib.Path(
121
+ TempDirectory(kind="metadata", globally_managed=True).path
122
+ )
123
+ metadata_path = temp_dir / "METADATA"
124
+ metadata_path.write_bytes(metadata_contents)
125
+ # Construct dist pointing to the newly created directory.
126
+ dist = importlib.metadata.Distribution.at(metadata_path.parent)
127
+ return cls(dist, metadata_path.parent, None)
128
+
129
+ @classmethod
130
+ def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution:
131
+ try:
132
+ with wheel.as_zipfile() as zf:
133
+ dist = WheelDistribution.from_zipfile(zf, name, wheel.location)
134
+ except zipfile.BadZipFile as e:
135
+ raise InvalidWheel(wheel.location, name) from e
136
+ except UnsupportedWheel as e:
137
+ raise UnsupportedWheel(f"{name} has an invalid wheel, {e}")
138
+ return cls(dist, dist.info_location, pathlib.PurePosixPath(wheel.location))
139
+
140
+ @property
141
+ def location(self) -> Optional[str]:
142
+ if self._info_location is None:
143
+ return None
144
+ return str(self._info_location.parent)
145
+
146
+ @property
147
+ def info_location(self) -> Optional[str]:
148
+ if self._info_location is None:
149
+ return None
150
+ return str(self._info_location)
151
+
152
+ @property
153
+ def installed_location(self) -> Optional[str]:
154
+ if self._installed_location is None:
155
+ return None
156
+ return normalize_path(str(self._installed_location))
157
+
158
+ def _get_dist_name_from_location(self) -> Optional[str]:
159
+ """Try to get the name from the metadata directory name.
160
+
161
+ This is much faster than reading metadata.
162
+ """
163
+ if self._info_location is None:
164
+ return None
165
+ stem, suffix = os.path.splitext(self._info_location.name)
166
+ if suffix not in (".dist-info", ".egg-info"):
167
+ return None
168
+ return stem.split("-", 1)[0]
169
+
170
+ @property
171
+ def canonical_name(self) -> NormalizedName:
172
+ name = self._get_dist_name_from_location() or get_dist_name(self._dist)
173
+ return canonicalize_name(name)
174
+
175
+ @property
176
+ def version(self) -> DistributionVersion:
177
+ return parse_version(self._dist.version)
178
+
179
+ def is_file(self, path: InfoPath) -> bool:
180
+ return self._dist.read_text(str(path)) is not None
181
+
182
+ def iter_distutils_script_names(self) -> Iterator[str]:
183
+ # A distutils installation is always "flat" (not in e.g. egg form), so
184
+ # if this distribution's info location is NOT a pathlib.Path (but e.g.
185
+ # zipfile.Path), it can never contain any distutils scripts.
186
+ if not isinstance(self._info_location, pathlib.Path):
187
+ return
188
+ for child in self._info_location.joinpath("scripts").iterdir():
189
+ yield child.name
190
+
191
+ def read_text(self, path: InfoPath) -> str:
192
+ content = self._dist.read_text(str(path))
193
+ if content is None:
194
+ raise FileNotFoundError(path)
195
+ return content
196
+
197
+ def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
198
+ # importlib.metadata's EntryPoint structure sasitfies BaseEntryPoint.
199
+ return self._dist.entry_points
200
+
201
+ def _metadata_impl(self) -> email.message.Message:
202
+ # From Python 3.10+, importlib.metadata declares PackageMetadata as the
203
+ # return type. This protocol is unfortunately a disaster now and misses
204
+ # a ton of fields that we need, including get() and get_payload(). We
205
+ # rely on the implementation that the object is actually a Message now,
206
+ # until upstream can improve the protocol. (python/cpython#94952)
207
+ return cast(email.message.Message, self._dist.metadata)
208
+
209
+ def iter_provided_extras(self) -> Iterable[str]:
210
+ return self.metadata.get_all("Provides-Extra", [])
211
+
212
+ def is_extra_provided(self, extra: str) -> bool:
213
+ return any(
214
+ canonicalize_name(provided_extra) == canonicalize_name(extra)
215
+ for provided_extra in self.metadata.get_all("Provides-Extra", [])
216
+ )
217
+
218
+ def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
219
+ contexts: Sequence[Dict[str, str]] = [{"extra": e} for e in extras]
220
+ for req_string in self.metadata.get_all("Requires-Dist", []):
221
+ req = Requirement(req_string)
222
+ if not req.marker:
223
+ yield req
224
+ elif not extras and req.marker.evaluate({"extra": ""}):
225
+ yield req
226
+ elif any(req.marker.evaluate(context) for context in contexts):
227
+ yield req
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/importlib/_envs.py ADDED
@@ -0,0 +1,189 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import importlib.metadata
3
+ import logging
4
+ import os
5
+ import pathlib
6
+ import sys
7
+ import zipfile
8
+ import zipimport
9
+ from typing import Iterator, List, Optional, Sequence, Set, Tuple
10
+
11
+ from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
12
+
13
+ from pip._internal.metadata.base import BaseDistribution, BaseEnvironment
14
+ from pip._internal.models.wheel import Wheel
15
+ from pip._internal.utils.deprecation import deprecated
16
+ from pip._internal.utils.filetypes import WHEEL_EXTENSION
17
+
18
+ from ._compat import BadMetadata, BasePath, get_dist_name, get_info_location
19
+ from ._dists import Distribution
20
+
21
+ logger = logging.getLogger(__name__)
22
+
23
+
24
+ def _looks_like_wheel(location: str) -> bool:
25
+ if not location.endswith(WHEEL_EXTENSION):
26
+ return False
27
+ if not os.path.isfile(location):
28
+ return False
29
+ if not Wheel.wheel_file_re.match(os.path.basename(location)):
30
+ return False
31
+ return zipfile.is_zipfile(location)
32
+
33
+
34
+ class _DistributionFinder:
35
+ """Finder to locate distributions.
36
+
37
+ The main purpose of this class is to memoize found distributions' names, so
38
+ only one distribution is returned for each package name. At lot of pip code
39
+ assumes this (because it is setuptools's behavior), and not doing the same
40
+ can potentially cause a distribution in lower precedence path to override a
41
+ higher precedence one if the caller is not careful.
42
+
43
+ Eventually we probably want to make it possible to see lower precedence
44
+ installations as well. It's useful feature, after all.
45
+ """
46
+
47
+ FoundResult = Tuple[importlib.metadata.Distribution, Optional[BasePath]]
48
+
49
+ def __init__(self) -> None:
50
+ self._found_names: Set[NormalizedName] = set()
51
+
52
+ def _find_impl(self, location: str) -> Iterator[FoundResult]:
53
+ """Find distributions in a location."""
54
+ # Skip looking inside a wheel. Since a package inside a wheel is not
55
+ # always valid (due to .data directories etc.), its .dist-info entry
56
+ # should not be considered an installed distribution.
57
+ if _looks_like_wheel(location):
58
+ return
59
+ # To know exactly where we find a distribution, we have to feed in the
60
+ # paths one by one, instead of dumping the list to importlib.metadata.
61
+ for dist in importlib.metadata.distributions(path=[location]):
62
+ info_location = get_info_location(dist)
63
+ try:
64
+ raw_name = get_dist_name(dist)
65
+ except BadMetadata as e:
66
+ logger.warning("Skipping %s due to %s", info_location, e.reason)
67
+ continue
68
+ normalized_name = canonicalize_name(raw_name)
69
+ if normalized_name in self._found_names:
70
+ continue
71
+ self._found_names.add(normalized_name)
72
+ yield dist, info_location
73
+
74
+ def find(self, location: str) -> Iterator[BaseDistribution]:
75
+ """Find distributions in a location.
76
+
77
+ The path can be either a directory, or a ZIP archive.
78
+ """
79
+ for dist, info_location in self._find_impl(location):
80
+ if info_location is None:
81
+ installed_location: Optional[BasePath] = None
82
+ else:
83
+ installed_location = info_location.parent
84
+ yield Distribution(dist, info_location, installed_location)
85
+
86
+ def find_linked(self, location: str) -> Iterator[BaseDistribution]:
87
+ """Read location in egg-link files and return distributions in there.
88
+
89
+ The path should be a directory; otherwise this returns nothing. This
90
+ follows how setuptools does this for compatibility. The first non-empty
91
+ line in the egg-link is read as a path (resolved against the egg-link's
92
+ containing directory if relative). Distributions found at that linked
93
+ location are returned.
94
+ """
95
+ path = pathlib.Path(location)
96
+ if not path.is_dir():
97
+ return
98
+ for child in path.iterdir():
99
+ if child.suffix != ".egg-link":
100
+ continue
101
+ with child.open() as f:
102
+ lines = (line.strip() for line in f)
103
+ target_rel = next((line for line in lines if line), "")
104
+ if not target_rel:
105
+ continue
106
+ target_location = str(path.joinpath(target_rel))
107
+ for dist, info_location in self._find_impl(target_location):
108
+ yield Distribution(dist, info_location, path)
109
+
110
+ def _find_eggs_in_dir(self, location: str) -> Iterator[BaseDistribution]:
111
+ from pip._vendor.pkg_resources import find_distributions
112
+
113
+ from pip._internal.metadata import pkg_resources as legacy
114
+
115
+ with os.scandir(location) as it:
116
+ for entry in it:
117
+ if not entry.name.endswith(".egg"):
118
+ continue
119
+ for dist in find_distributions(entry.path):
120
+ yield legacy.Distribution(dist)
121
+
122
+ def _find_eggs_in_zip(self, location: str) -> Iterator[BaseDistribution]:
123
+ from pip._vendor.pkg_resources import find_eggs_in_zip
124
+
125
+ from pip._internal.metadata import pkg_resources as legacy
126
+
127
+ try:
128
+ importer = zipimport.zipimporter(location)
129
+ except zipimport.ZipImportError:
130
+ return
131
+ for dist in find_eggs_in_zip(importer, location):
132
+ yield legacy.Distribution(dist)
133
+
134
+ def find_eggs(self, location: str) -> Iterator[BaseDistribution]:
135
+ """Find eggs in a location.
136
+
137
+ This actually uses the old *pkg_resources* backend. We likely want to
138
+ deprecate this so we can eventually remove the *pkg_resources*
139
+ dependency entirely. Before that, this should first emit a deprecation
140
+ warning for some versions when using the fallback since importing
141
+ *pkg_resources* is slow for those who don't need it.
142
+ """
143
+ if os.path.isdir(location):
144
+ yield from self._find_eggs_in_dir(location)
145
+ if zipfile.is_zipfile(location):
146
+ yield from self._find_eggs_in_zip(location)
147
+
148
+
149
+ @functools.lru_cache(maxsize=None) # Warn a distribution exactly once.
150
+ def _emit_egg_deprecation(location: Optional[str]) -> None:
151
+ deprecated(
152
+ reason=f"Loading egg at {location} is deprecated.",
153
+ replacement="to use pip for package installation.",
154
+ gone_in="24.3",
155
+ issue=12330,
156
+ )
157
+
158
+
159
+ class Environment(BaseEnvironment):
160
+ def __init__(self, paths: Sequence[str]) -> None:
161
+ self._paths = paths
162
+
163
+ @classmethod
164
+ def default(cls) -> BaseEnvironment:
165
+ return cls(sys.path)
166
+
167
+ @classmethod
168
+ def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment:
169
+ if paths is None:
170
+ return cls(sys.path)
171
+ return cls(paths)
172
+
173
+ def _iter_distributions(self) -> Iterator[BaseDistribution]:
174
+ finder = _DistributionFinder()
175
+ for location in self._paths:
176
+ yield from finder.find(location)
177
+ for dist in finder.find_eggs(location):
178
+ _emit_egg_deprecation(dist.location)
179
+ yield dist
180
+ # This must go last because that's how pkg_resources tie-breaks.
181
+ yield from finder.find_linked(location)
182
+
183
+ def get_distribution(self, name: str) -> Optional[BaseDistribution]:
184
+ matches = (
185
+ distribution
186
+ for distribution in self.iter_all_distributions()
187
+ if distribution.canonical_name == canonicalize_name(name)
188
+ )
189
+ return next(matches, None)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/metadata/pkg_resources.py ADDED
@@ -0,0 +1,278 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import email.message
2
+ import email.parser
3
+ import logging
4
+ import os
5
+ import zipfile
6
+ from typing import Collection, Iterable, Iterator, List, Mapping, NamedTuple, Optional
7
+
8
+ from pip._vendor import pkg_resources
9
+ from pip._vendor.packaging.requirements import Requirement
10
+ from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
11
+ from pip._vendor.packaging.version import parse as parse_version
12
+
13
+ from pip._internal.exceptions import InvalidWheel, NoneMetadataError, UnsupportedWheel
14
+ from pip._internal.utils.egg_link import egg_link_path_from_location
15
+ from pip._internal.utils.misc import display_path, normalize_path
16
+ from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file
17
+
18
+ from .base import (
19
+ BaseDistribution,
20
+ BaseEntryPoint,
21
+ BaseEnvironment,
22
+ DistributionVersion,
23
+ InfoPath,
24
+ Wheel,
25
+ )
26
+
27
+ __all__ = ["NAME", "Distribution", "Environment"]
28
+
29
+ logger = logging.getLogger(__name__)
30
+
31
+ NAME = "pkg_resources"
32
+
33
+
34
+ class EntryPoint(NamedTuple):
35
+ name: str
36
+ value: str
37
+ group: str
38
+
39
+
40
+ class InMemoryMetadata:
41
+ """IMetadataProvider that reads metadata files from a dictionary.
42
+
43
+ This also maps metadata decoding exceptions to our internal exception type.
44
+ """
45
+
46
+ def __init__(self, metadata: Mapping[str, bytes], wheel_name: str) -> None:
47
+ self._metadata = metadata
48
+ self._wheel_name = wheel_name
49
+
50
+ def has_metadata(self, name: str) -> bool:
51
+ return name in self._metadata
52
+
53
+ def get_metadata(self, name: str) -> str:
54
+ try:
55
+ return self._metadata[name].decode()
56
+ except UnicodeDecodeError as e:
57
+ # Augment the default error with the origin of the file.
58
+ raise UnsupportedWheel(
59
+ f"Error decoding metadata for {self._wheel_name}: {e} in {name} file"
60
+ )
61
+
62
+ def get_metadata_lines(self, name: str) -> Iterable[str]:
63
+ return pkg_resources.yield_lines(self.get_metadata(name))
64
+
65
+ def metadata_isdir(self, name: str) -> bool:
66
+ return False
67
+
68
+ def metadata_listdir(self, name: str) -> List[str]:
69
+ return []
70
+
71
+ def run_script(self, script_name: str, namespace: str) -> None:
72
+ pass
73
+
74
+
75
+ class Distribution(BaseDistribution):
76
+ def __init__(self, dist: pkg_resources.Distribution) -> None:
77
+ self._dist = dist
78
+
79
+ @classmethod
80
+ def from_directory(cls, directory: str) -> BaseDistribution:
81
+ dist_dir = directory.rstrip(os.sep)
82
+
83
+ # Build a PathMetadata object, from path to metadata. :wink:
84
+ base_dir, dist_dir_name = os.path.split(dist_dir)
85
+ metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
86
+
87
+ # Determine the correct Distribution object type.
88
+ if dist_dir.endswith(".egg-info"):
89
+ dist_cls = pkg_resources.Distribution
90
+ dist_name = os.path.splitext(dist_dir_name)[0]
91
+ else:
92
+ assert dist_dir.endswith(".dist-info")
93
+ dist_cls = pkg_resources.DistInfoDistribution
94
+ dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0]
95
+
96
+ dist = dist_cls(base_dir, project_name=dist_name, metadata=metadata)
97
+ return cls(dist)
98
+
99
+ @classmethod
100
+ def from_metadata_file_contents(
101
+ cls,
102
+ metadata_contents: bytes,
103
+ filename: str,
104
+ project_name: str,
105
+ ) -> BaseDistribution:
106
+ metadata_dict = {
107
+ "METADATA": metadata_contents,
108
+ }
109
+ dist = pkg_resources.DistInfoDistribution(
110
+ location=filename,
111
+ metadata=InMemoryMetadata(metadata_dict, filename),
112
+ project_name=project_name,
113
+ )
114
+ return cls(dist)
115
+
116
+ @classmethod
117
+ def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution:
118
+ try:
119
+ with wheel.as_zipfile() as zf:
120
+ info_dir, _ = parse_wheel(zf, name)
121
+ metadata_dict = {
122
+ path.split("/", 1)[-1]: read_wheel_metadata_file(zf, path)
123
+ for path in zf.namelist()
124
+ if path.startswith(f"{info_dir}/")
125
+ }
126
+ except zipfile.BadZipFile as e:
127
+ raise InvalidWheel(wheel.location, name) from e
128
+ except UnsupportedWheel as e:
129
+ raise UnsupportedWheel(f"{name} has an invalid wheel, {e}")
130
+ dist = pkg_resources.DistInfoDistribution(
131
+ location=wheel.location,
132
+ metadata=InMemoryMetadata(metadata_dict, wheel.location),
133
+ project_name=name,
134
+ )
135
+ return cls(dist)
136
+
137
+ @property
138
+ def location(self) -> Optional[str]:
139
+ return self._dist.location
140
+
141
+ @property
142
+ def installed_location(self) -> Optional[str]:
143
+ egg_link = egg_link_path_from_location(self.raw_name)
144
+ if egg_link:
145
+ location = egg_link
146
+ elif self.location:
147
+ location = self.location
148
+ else:
149
+ return None
150
+ return normalize_path(location)
151
+
152
+ @property
153
+ def info_location(self) -> Optional[str]:
154
+ return self._dist.egg_info
155
+
156
+ @property
157
+ def installed_by_distutils(self) -> bool:
158
+ # A distutils-installed distribution is provided by FileMetadata. This
159
+ # provider has a "path" attribute not present anywhere else. Not the
160
+ # best introspection logic, but pip has been doing this for a long time.
161
+ try:
162
+ return bool(self._dist._provider.path)
163
+ except AttributeError:
164
+ return False
165
+
166
+ @property
167
+ def canonical_name(self) -> NormalizedName:
168
+ return canonicalize_name(self._dist.project_name)
169
+
170
+ @property
171
+ def version(self) -> DistributionVersion:
172
+ return parse_version(self._dist.version)
173
+
174
+ def is_file(self, path: InfoPath) -> bool:
175
+ return self._dist.has_metadata(str(path))
176
+
177
+ def iter_distutils_script_names(self) -> Iterator[str]:
178
+ yield from self._dist.metadata_listdir("scripts")
179
+
180
+ def read_text(self, path: InfoPath) -> str:
181
+ name = str(path)
182
+ if not self._dist.has_metadata(name):
183
+ raise FileNotFoundError(name)
184
+ content = self._dist.get_metadata(name)
185
+ if content is None:
186
+ raise NoneMetadataError(self, name)
187
+ return content
188
+
189
+ def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
190
+ for group, entries in self._dist.get_entry_map().items():
191
+ for name, entry_point in entries.items():
192
+ name, _, value = str(entry_point).partition("=")
193
+ yield EntryPoint(name=name.strip(), value=value.strip(), group=group)
194
+
195
+ def _metadata_impl(self) -> email.message.Message:
196
+ """
197
+ :raises NoneMetadataError: if the distribution reports `has_metadata()`
198
+ True but `get_metadata()` returns None.
199
+ """
200
+ if isinstance(self._dist, pkg_resources.DistInfoDistribution):
201
+ metadata_name = "METADATA"
202
+ else:
203
+ metadata_name = "PKG-INFO"
204
+ try:
205
+ metadata = self.read_text(metadata_name)
206
+ except FileNotFoundError:
207
+ if self.location:
208
+ displaying_path = display_path(self.location)
209
+ else:
210
+ displaying_path = repr(self.location)
211
+ logger.warning("No metadata found in %s", displaying_path)
212
+ metadata = ""
213
+ feed_parser = email.parser.FeedParser()
214
+ feed_parser.feed(metadata)
215
+ return feed_parser.close()
216
+
217
+ def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
218
+ if extras: # pkg_resources raises on invalid extras, so we sanitize.
219
+ extras = frozenset(pkg_resources.safe_extra(e) for e in extras)
220
+ extras = extras.intersection(self._dist.extras)
221
+ return self._dist.requires(extras)
222
+
223
+ def iter_provided_extras(self) -> Iterable[str]:
224
+ return self._dist.extras
225
+
226
+ def is_extra_provided(self, extra: str) -> bool:
227
+ return pkg_resources.safe_extra(extra) in self._dist.extras
228
+
229
+
230
+ class Environment(BaseEnvironment):
231
+ def __init__(self, ws: pkg_resources.WorkingSet) -> None:
232
+ self._ws = ws
233
+
234
+ @classmethod
235
+ def default(cls) -> BaseEnvironment:
236
+ return cls(pkg_resources.working_set)
237
+
238
+ @classmethod
239
+ def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment:
240
+ return cls(pkg_resources.WorkingSet(paths))
241
+
242
+ def _iter_distributions(self) -> Iterator[BaseDistribution]:
243
+ for dist in self._ws:
244
+ yield Distribution(dist)
245
+
246
+ def _search_distribution(self, name: str) -> Optional[BaseDistribution]:
247
+ """Find a distribution matching the ``name`` in the environment.
248
+
249
+ This searches from *all* distributions available in the environment, to
250
+ match the behavior of ``pkg_resources.get_distribution()``.
251
+ """
252
+ canonical_name = canonicalize_name(name)
253
+ for dist in self.iter_all_distributions():
254
+ if dist.canonical_name == canonical_name:
255
+ return dist
256
+ return None
257
+
258
+ def get_distribution(self, name: str) -> Optional[BaseDistribution]:
259
+ # Search the distribution by looking through the working set.
260
+ dist = self._search_distribution(name)
261
+ if dist:
262
+ return dist
263
+
264
+ # If distribution could not be found, call working_set.require to
265
+ # update the working set, and try to find the distribution again.
266
+ # This might happen for e.g. when you install a package twice, once
267
+ # using setup.py develop and again using setup.py install. Now when
268
+ # running pip uninstall twice, the package gets removed from the
269
+ # working set in the first uninstall, so we have to populate the
270
+ # working set again so that pip knows about it and the packages gets
271
+ # picked up and is successfully uninstalled the second time too.
272
+ try:
273
+ # We didn't pass in any version specifiers, so this can never
274
+ # raise pkg_resources.VersionConflict.
275
+ self._ws.require(name)
276
+ except pkg_resources.DistributionNotFound:
277
+ return None
278
+ return self._search_distribution(name)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-38.pyc ADDED
Binary file (7.65 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-38.pyc ADDED
Binary file (1.73 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-38.pyc ADDED
Binary file (1.66 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/models/__pycache__/target_python.cpython-38.pyc ADDED
Binary file (3.77 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-38.pyc ADDED
Binary file (155 Bytes). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/check.cpython-38.pyc ADDED
Binary file (5.15 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-38.pyc ADDED
Binary file (6.12 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-38.pyc ADDED
Binary file (15.5 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/build/__init__.py ADDED
File without changes
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-38.pyc ADDED
Binary file (1.16 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/build/build_tracker.py ADDED
@@ -0,0 +1,139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import contextlib
2
+ import hashlib
3
+ import logging
4
+ import os
5
+ from types import TracebackType
6
+ from typing import Dict, Generator, Optional, Set, Type, Union
7
+
8
+ from pip._internal.models.link import Link
9
+ from pip._internal.req.req_install import InstallRequirement
10
+ from pip._internal.utils.temp_dir import TempDirectory
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ @contextlib.contextmanager
16
+ def update_env_context_manager(**changes: str) -> Generator[None, None, None]:
17
+ target = os.environ
18
+
19
+ # Save values from the target and change them.
20
+ non_existent_marker = object()
21
+ saved_values: Dict[str, Union[object, str]] = {}
22
+ for name, new_value in changes.items():
23
+ try:
24
+ saved_values[name] = target[name]
25
+ except KeyError:
26
+ saved_values[name] = non_existent_marker
27
+ target[name] = new_value
28
+
29
+ try:
30
+ yield
31
+ finally:
32
+ # Restore original values in the target.
33
+ for name, original_value in saved_values.items():
34
+ if original_value is non_existent_marker:
35
+ del target[name]
36
+ else:
37
+ assert isinstance(original_value, str) # for mypy
38
+ target[name] = original_value
39
+
40
+
41
+ @contextlib.contextmanager
42
+ def get_build_tracker() -> Generator["BuildTracker", None, None]:
43
+ root = os.environ.get("PIP_BUILD_TRACKER")
44
+ with contextlib.ExitStack() as ctx:
45
+ if root is None:
46
+ root = ctx.enter_context(TempDirectory(kind="build-tracker")).path
47
+ ctx.enter_context(update_env_context_manager(PIP_BUILD_TRACKER=root))
48
+ logger.debug("Initialized build tracking at %s", root)
49
+
50
+ with BuildTracker(root) as tracker:
51
+ yield tracker
52
+
53
+
54
+ class TrackerId(str):
55
+ """Uniquely identifying string provided to the build tracker."""
56
+
57
+
58
+ class BuildTracker:
59
+ """Ensure that an sdist cannot request itself as a setup requirement.
60
+
61
+ When an sdist is prepared, it identifies its setup requirements in the
62
+ context of ``BuildTracker.track()``. If a requirement shows up recursively, this
63
+ raises an exception.
64
+
65
+ This stops fork bombs embedded in malicious packages."""
66
+
67
+ def __init__(self, root: str) -> None:
68
+ self._root = root
69
+ self._entries: Dict[TrackerId, InstallRequirement] = {}
70
+ logger.debug("Created build tracker: %s", self._root)
71
+
72
+ def __enter__(self) -> "BuildTracker":
73
+ logger.debug("Entered build tracker: %s", self._root)
74
+ return self
75
+
76
+ def __exit__(
77
+ self,
78
+ exc_type: Optional[Type[BaseException]],
79
+ exc_val: Optional[BaseException],
80
+ exc_tb: Optional[TracebackType],
81
+ ) -> None:
82
+ self.cleanup()
83
+
84
+ def _entry_path(self, key: TrackerId) -> str:
85
+ hashed = hashlib.sha224(key.encode()).hexdigest()
86
+ return os.path.join(self._root, hashed)
87
+
88
+ def add(self, req: InstallRequirement, key: TrackerId) -> None:
89
+ """Add an InstallRequirement to build tracking."""
90
+
91
+ # Get the file to write information about this requirement.
92
+ entry_path = self._entry_path(key)
93
+
94
+ # Try reading from the file. If it exists and can be read from, a build
95
+ # is already in progress, so a LookupError is raised.
96
+ try:
97
+ with open(entry_path) as fp:
98
+ contents = fp.read()
99
+ except FileNotFoundError:
100
+ pass
101
+ else:
102
+ message = "{} is already being built: {}".format(req.link, contents)
103
+ raise LookupError(message)
104
+
105
+ # If we're here, req should really not be building already.
106
+ assert key not in self._entries
107
+
108
+ # Start tracking this requirement.
109
+ with open(entry_path, "w", encoding="utf-8") as fp:
110
+ fp.write(str(req))
111
+ self._entries[key] = req
112
+
113
+ logger.debug("Added %s to build tracker %r", req, self._root)
114
+
115
+ def remove(self, req: InstallRequirement, key: TrackerId) -> None:
116
+ """Remove an InstallRequirement from build tracking."""
117
+
118
+ # Delete the created file and the corresponding entry.
119
+ os.unlink(self._entry_path(key))
120
+ del self._entries[key]
121
+
122
+ logger.debug("Removed %s from build tracker %r", req, self._root)
123
+
124
+ def cleanup(self) -> None:
125
+ for key, req in list(self._entries.items()):
126
+ self.remove(req, key)
127
+
128
+ logger.debug("Removed build tracker: %r", self._root)
129
+
130
+ @contextlib.contextmanager
131
+ def track(self, req: InstallRequirement, key: str) -> Generator[None, None, None]:
132
+ """Ensure that `key` cannot install itself as a setup requirement.
133
+
134
+ :raises LookupError: If `key` was already provided in a parent invocation of
135
+ the context introduced by this method."""
136
+ tracker_id = TrackerId(key)
137
+ self.add(req, tracker_id)
138
+ yield
139
+ self.remove(req, tracker_id)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/build/metadata.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Metadata generation logic for source distributions.
2
+ """
3
+
4
+ import os
5
+
6
+ from pip._vendor.pyproject_hooks import BuildBackendHookCaller
7
+
8
+ from pip._internal.build_env import BuildEnvironment
9
+ from pip._internal.exceptions import (
10
+ InstallationSubprocessError,
11
+ MetadataGenerationFailed,
12
+ )
13
+ from pip._internal.utils.subprocess import runner_with_spinner_message
14
+ from pip._internal.utils.temp_dir import TempDirectory
15
+
16
+
17
+ def generate_metadata(
18
+ build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str
19
+ ) -> str:
20
+ """Generate metadata using mechanisms described in PEP 517.
21
+
22
+ Returns the generated metadata directory.
23
+ """
24
+ metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
25
+
26
+ metadata_dir = metadata_tmpdir.path
27
+
28
+ with build_env:
29
+ # Note that BuildBackendHookCaller implements a fallback for
30
+ # prepare_metadata_for_build_wheel, so we don't have to
31
+ # consider the possibility that this hook doesn't exist.
32
+ runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)")
33
+ with backend.subprocess_runner(runner):
34
+ try:
35
+ distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir)
36
+ except InstallationSubprocessError as error:
37
+ raise MetadataGenerationFailed(package_details=details) from error
38
+
39
+ return os.path.join(metadata_dir, distinfo_dir)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/build/metadata_editable.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Metadata generation logic for source distributions.
2
+ """
3
+
4
+ import os
5
+
6
+ from pip._vendor.pyproject_hooks import BuildBackendHookCaller
7
+
8
+ from pip._internal.build_env import BuildEnvironment
9
+ from pip._internal.exceptions import (
10
+ InstallationSubprocessError,
11
+ MetadataGenerationFailed,
12
+ )
13
+ from pip._internal.utils.subprocess import runner_with_spinner_message
14
+ from pip._internal.utils.temp_dir import TempDirectory
15
+
16
+
17
+ def generate_editable_metadata(
18
+ build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str
19
+ ) -> str:
20
+ """Generate metadata using mechanisms described in PEP 660.
21
+
22
+ Returns the generated metadata directory.
23
+ """
24
+ metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
25
+
26
+ metadata_dir = metadata_tmpdir.path
27
+
28
+ with build_env:
29
+ # Note that BuildBackendHookCaller implements a fallback for
30
+ # prepare_metadata_for_build_wheel/editable, so we don't have to
31
+ # consider the possibility that this hook doesn't exist.
32
+ runner = runner_with_spinner_message(
33
+ "Preparing editable metadata (pyproject.toml)"
34
+ )
35
+ with backend.subprocess_runner(runner):
36
+ try:
37
+ distinfo_dir = backend.prepare_metadata_for_build_editable(metadata_dir)
38
+ except InstallationSubprocessError as error:
39
+ raise MetadataGenerationFailed(package_details=details) from error
40
+
41
+ return os.path.join(metadata_dir, distinfo_dir)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/build/metadata_legacy.py ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Metadata generation logic for legacy source distributions.
2
+ """
3
+
4
+ import logging
5
+ import os
6
+
7
+ from pip._internal.build_env import BuildEnvironment
8
+ from pip._internal.cli.spinners import open_spinner
9
+ from pip._internal.exceptions import (
10
+ InstallationError,
11
+ InstallationSubprocessError,
12
+ MetadataGenerationFailed,
13
+ )
14
+ from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
15
+ from pip._internal.utils.subprocess import call_subprocess
16
+ from pip._internal.utils.temp_dir import TempDirectory
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ def _find_egg_info(directory: str) -> str:
22
+ """Find an .egg-info subdirectory in `directory`."""
23
+ filenames = [f for f in os.listdir(directory) if f.endswith(".egg-info")]
24
+
25
+ if not filenames:
26
+ raise InstallationError(f"No .egg-info directory found in {directory}")
27
+
28
+ if len(filenames) > 1:
29
+ raise InstallationError(
30
+ "More than one .egg-info directory found in {}".format(directory)
31
+ )
32
+
33
+ return os.path.join(directory, filenames[0])
34
+
35
+
36
+ def generate_metadata(
37
+ build_env: BuildEnvironment,
38
+ setup_py_path: str,
39
+ source_dir: str,
40
+ isolated: bool,
41
+ details: str,
42
+ ) -> str:
43
+ """Generate metadata using setup.py-based defacto mechanisms.
44
+
45
+ Returns the generated metadata directory.
46
+ """
47
+ logger.debug(
48
+ "Running setup.py (path:%s) egg_info for package %s",
49
+ setup_py_path,
50
+ details,
51
+ )
52
+
53
+ egg_info_dir = TempDirectory(kind="pip-egg-info", globally_managed=True).path
54
+
55
+ args = make_setuptools_egg_info_args(
56
+ setup_py_path,
57
+ egg_info_dir=egg_info_dir,
58
+ no_user_config=isolated,
59
+ )
60
+
61
+ with build_env:
62
+ with open_spinner("Preparing metadata (setup.py)") as spinner:
63
+ try:
64
+ call_subprocess(
65
+ args,
66
+ cwd=source_dir,
67
+ command_desc="python setup.py egg_info",
68
+ spinner=spinner,
69
+ )
70
+ except InstallationSubprocessError as error:
71
+ raise MetadataGenerationFailed(package_details=details) from error
72
+
73
+ # Return the .egg-info directory.
74
+ return _find_egg_info(egg_info_dir)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/build/wheel.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os
3
+ from typing import Optional
4
+
5
+ from pip._vendor.pyproject_hooks import BuildBackendHookCaller
6
+
7
+ from pip._internal.utils.subprocess import runner_with_spinner_message
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ def build_wheel_pep517(
13
+ name: str,
14
+ backend: BuildBackendHookCaller,
15
+ metadata_directory: str,
16
+ tempd: str,
17
+ ) -> Optional[str]:
18
+ """Build one InstallRequirement using the PEP 517 build process.
19
+
20
+ Returns path to wheel if successfully built. Otherwise, returns None.
21
+ """
22
+ assert metadata_directory is not None
23
+ try:
24
+ logger.debug("Destination directory: %s", tempd)
25
+
26
+ runner = runner_with_spinner_message(
27
+ f"Building wheel for {name} (pyproject.toml)"
28
+ )
29
+ with backend.subprocess_runner(runner):
30
+ wheel_name = backend.build_wheel(
31
+ tempd,
32
+ metadata_directory=metadata_directory,
33
+ )
34
+ except Exception:
35
+ logger.error("Failed building wheel for %s", name)
36
+ return None
37
+ return os.path.join(tempd, wheel_name)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/build/wheel_editable.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os
3
+ from typing import Optional
4
+
5
+ from pip._vendor.pyproject_hooks import BuildBackendHookCaller, HookMissing
6
+
7
+ from pip._internal.utils.subprocess import runner_with_spinner_message
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ def build_wheel_editable(
13
+ name: str,
14
+ backend: BuildBackendHookCaller,
15
+ metadata_directory: str,
16
+ tempd: str,
17
+ ) -> Optional[str]:
18
+ """Build one InstallRequirement using the PEP 660 build process.
19
+
20
+ Returns path to wheel if successfully built. Otherwise, returns None.
21
+ """
22
+ assert metadata_directory is not None
23
+ try:
24
+ logger.debug("Destination directory: %s", tempd)
25
+
26
+ runner = runner_with_spinner_message(
27
+ f"Building editable for {name} (pyproject.toml)"
28
+ )
29
+ with backend.subprocess_runner(runner):
30
+ try:
31
+ wheel_name = backend.build_editable(
32
+ tempd,
33
+ metadata_directory=metadata_directory,
34
+ )
35
+ except HookMissing as e:
36
+ logger.error(
37
+ "Cannot build editable %s because the build "
38
+ "backend does not have the %s hook",
39
+ name,
40
+ e,
41
+ )
42
+ return None
43
+ except Exception:
44
+ logger.error("Failed building editable for %s", name)
45
+ return None
46
+ return os.path.join(tempd, wheel_name)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/build/wheel_legacy.py ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os.path
3
+ from typing import List, Optional
4
+
5
+ from pip._internal.cli.spinners import open_spinner
6
+ from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args
7
+ from pip._internal.utils.subprocess import call_subprocess, format_command_args
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ def format_command_result(
13
+ command_args: List[str],
14
+ command_output: str,
15
+ ) -> str:
16
+ """Format command information for logging."""
17
+ command_desc = format_command_args(command_args)
18
+ text = f"Command arguments: {command_desc}\n"
19
+
20
+ if not command_output:
21
+ text += "Command output: None"
22
+ elif logger.getEffectiveLevel() > logging.DEBUG:
23
+ text += "Command output: [use --verbose to show]"
24
+ else:
25
+ if not command_output.endswith("\n"):
26
+ command_output += "\n"
27
+ text += f"Command output:\n{command_output}"
28
+
29
+ return text
30
+
31
+
32
+ def get_legacy_build_wheel_path(
33
+ names: List[str],
34
+ temp_dir: str,
35
+ name: str,
36
+ command_args: List[str],
37
+ command_output: str,
38
+ ) -> Optional[str]:
39
+ """Return the path to the wheel in the temporary build directory."""
40
+ # Sort for determinism.
41
+ names = sorted(names)
42
+ if not names:
43
+ msg = ("Legacy build of wheel for {!r} created no files.\n").format(name)
44
+ msg += format_command_result(command_args, command_output)
45
+ logger.warning(msg)
46
+ return None
47
+
48
+ if len(names) > 1:
49
+ msg = (
50
+ "Legacy build of wheel for {!r} created more than one file.\n"
51
+ "Filenames (choosing first): {}\n"
52
+ ).format(name, names)
53
+ msg += format_command_result(command_args, command_output)
54
+ logger.warning(msg)
55
+
56
+ return os.path.join(temp_dir, names[0])
57
+
58
+
59
+ def build_wheel_legacy(
60
+ name: str,
61
+ setup_py_path: str,
62
+ source_dir: str,
63
+ global_options: List[str],
64
+ build_options: List[str],
65
+ tempd: str,
66
+ ) -> Optional[str]:
67
+ """Build one unpacked package using the "legacy" build process.
68
+
69
+ Returns path to wheel if successfully built. Otherwise, returns None.
70
+ """
71
+ wheel_args = make_setuptools_bdist_wheel_args(
72
+ setup_py_path,
73
+ global_options=global_options,
74
+ build_options=build_options,
75
+ destination_dir=tempd,
76
+ )
77
+
78
+ spin_message = f"Building wheel for {name} (setup.py)"
79
+ with open_spinner(spin_message) as spinner:
80
+ logger.debug("Destination directory: %s", tempd)
81
+
82
+ try:
83
+ output = call_subprocess(
84
+ wheel_args,
85
+ command_desc="python setup.py bdist_wheel",
86
+ cwd=source_dir,
87
+ spinner=spinner,
88
+ )
89
+ except Exception:
90
+ spinner.finish("error")
91
+ logger.error("Failed building wheel for %s", name)
92
+ return None
93
+
94
+ names = os.listdir(tempd)
95
+ wheel_path = get_legacy_build_wheel_path(
96
+ names=names,
97
+ temp_dir=tempd,
98
+ name=name,
99
+ command_args=wheel_args,
100
+ command_output=output,
101
+ )
102
+ return wheel_path
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/install/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ """For modules related to installing packages.
2
+ """
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-38.pyc ADDED
Binary file (219 Bytes). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-38.pyc ADDED
Binary file (1.35 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-38.pyc ADDED
Binary file (20.9 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/install/editable_legacy.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Legacy editable installation process, i.e. `setup.py develop`.
2
+ """
3
+ import logging
4
+ from typing import Optional, Sequence
5
+
6
+ from pip._internal.build_env import BuildEnvironment
7
+ from pip._internal.utils.logging import indent_log
8
+ from pip._internal.utils.setuptools_build import make_setuptools_develop_args
9
+ from pip._internal.utils.subprocess import call_subprocess
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ def install_editable(
15
+ *,
16
+ global_options: Sequence[str],
17
+ prefix: Optional[str],
18
+ home: Optional[str],
19
+ use_user_site: bool,
20
+ name: str,
21
+ setup_py_path: str,
22
+ isolated: bool,
23
+ build_env: BuildEnvironment,
24
+ unpacked_source_directory: str,
25
+ ) -> None:
26
+ """Install a package in editable mode. Most arguments are pass-through
27
+ to setuptools.
28
+ """
29
+ logger.info("Running setup.py develop for %s", name)
30
+
31
+ args = make_setuptools_develop_args(
32
+ setup_py_path,
33
+ global_options=global_options,
34
+ no_user_config=isolated,
35
+ prefix=prefix,
36
+ home=home,
37
+ use_user_site=use_user_site,
38
+ )
39
+
40
+ with indent_log():
41
+ with build_env:
42
+ call_subprocess(
43
+ args,
44
+ command_desc="python setup.py develop",
45
+ cwd=unpacked_source_directory,
46
+ )
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/operations/install/wheel.py ADDED
@@ -0,0 +1,734 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Support for installing and building the "wheel" binary package format.
2
+ """
3
+
4
+ import collections
5
+ import compileall
6
+ import contextlib
7
+ import csv
8
+ import importlib
9
+ import logging
10
+ import os.path
11
+ import re
12
+ import shutil
13
+ import sys
14
+ import warnings
15
+ from base64 import urlsafe_b64encode
16
+ from email.message import Message
17
+ from itertools import chain, filterfalse, starmap
18
+ from typing import (
19
+ IO,
20
+ TYPE_CHECKING,
21
+ Any,
22
+ BinaryIO,
23
+ Callable,
24
+ Dict,
25
+ Generator,
26
+ Iterable,
27
+ Iterator,
28
+ List,
29
+ NewType,
30
+ Optional,
31
+ Sequence,
32
+ Set,
33
+ Tuple,
34
+ Union,
35
+ cast,
36
+ )
37
+ from zipfile import ZipFile, ZipInfo
38
+
39
+ from pip._vendor.distlib.scripts import ScriptMaker
40
+ from pip._vendor.distlib.util import get_export_entry
41
+ from pip._vendor.packaging.utils import canonicalize_name
42
+
43
+ from pip._internal.exceptions import InstallationError
44
+ from pip._internal.locations import get_major_minor_version
45
+ from pip._internal.metadata import (
46
+ BaseDistribution,
47
+ FilesystemWheel,
48
+ get_wheel_distribution,
49
+ )
50
+ from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl
51
+ from pip._internal.models.scheme import SCHEME_KEYS, Scheme
52
+ from pip._internal.utils.filesystem import adjacent_tmp_file, replace
53
+ from pip._internal.utils.misc import captured_stdout, ensure_dir, hash_file, partition
54
+ from pip._internal.utils.unpacking import (
55
+ current_umask,
56
+ is_within_directory,
57
+ set_extracted_file_to_default_mode_plus_executable,
58
+ zip_item_is_executable,
59
+ )
60
+ from pip._internal.utils.wheel import parse_wheel
61
+
62
+ if TYPE_CHECKING:
63
+ from typing import Protocol
64
+
65
+ class File(Protocol):
66
+ src_record_path: "RecordPath"
67
+ dest_path: str
68
+ changed: bool
69
+
70
+ def save(self) -> None:
71
+ pass
72
+
73
+
74
+ logger = logging.getLogger(__name__)
75
+
76
+ RecordPath = NewType("RecordPath", str)
77
+ InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]]
78
+
79
+
80
+ def rehash(path: str, blocksize: int = 1 << 20) -> Tuple[str, str]:
81
+ """Return (encoded_digest, length) for path using hashlib.sha256()"""
82
+ h, length = hash_file(path, blocksize)
83
+ digest = "sha256=" + urlsafe_b64encode(h.digest()).decode("latin1").rstrip("=")
84
+ return (digest, str(length))
85
+
86
+
87
+ def csv_io_kwargs(mode: str) -> Dict[str, Any]:
88
+ """Return keyword arguments to properly open a CSV file
89
+ in the given mode.
90
+ """
91
+ return {"mode": mode, "newline": "", "encoding": "utf-8"}
92
+
93
+
94
+ def fix_script(path: str) -> bool:
95
+ """Replace #!python with #!/path/to/python
96
+ Return True if file was changed.
97
+ """
98
+ # XXX RECORD hashes will need to be updated
99
+ assert os.path.isfile(path)
100
+
101
+ with open(path, "rb") as script:
102
+ firstline = script.readline()
103
+ if not firstline.startswith(b"#!python"):
104
+ return False
105
+ exename = sys.executable.encode(sys.getfilesystemencoding())
106
+ firstline = b"#!" + exename + os.linesep.encode("ascii")
107
+ rest = script.read()
108
+ with open(path, "wb") as script:
109
+ script.write(firstline)
110
+ script.write(rest)
111
+ return True
112
+
113
+
114
+ def wheel_root_is_purelib(metadata: Message) -> bool:
115
+ return metadata.get("Root-Is-Purelib", "").lower() == "true"
116
+
117
+
118
+ def get_entrypoints(dist: BaseDistribution) -> Tuple[Dict[str, str], Dict[str, str]]:
119
+ console_scripts = {}
120
+ gui_scripts = {}
121
+ for entry_point in dist.iter_entry_points():
122
+ if entry_point.group == "console_scripts":
123
+ console_scripts[entry_point.name] = entry_point.value
124
+ elif entry_point.group == "gui_scripts":
125
+ gui_scripts[entry_point.name] = entry_point.value
126
+ return console_scripts, gui_scripts
127
+
128
+
129
+ def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]:
130
+ """Determine if any scripts are not on PATH and format a warning.
131
+ Returns a warning message if one or more scripts are not on PATH,
132
+ otherwise None.
133
+ """
134
+ if not scripts:
135
+ return None
136
+
137
+ # Group scripts by the path they were installed in
138
+ grouped_by_dir: Dict[str, Set[str]] = collections.defaultdict(set)
139
+ for destfile in scripts:
140
+ parent_dir = os.path.dirname(destfile)
141
+ script_name = os.path.basename(destfile)
142
+ grouped_by_dir[parent_dir].add(script_name)
143
+
144
+ # We don't want to warn for directories that are on PATH.
145
+ not_warn_dirs = [
146
+ os.path.normcase(os.path.normpath(i)).rstrip(os.sep)
147
+ for i in os.environ.get("PATH", "").split(os.pathsep)
148
+ ]
149
+ # If an executable sits with sys.executable, we don't warn for it.
150
+ # This covers the case of venv invocations without activating the venv.
151
+ not_warn_dirs.append(
152
+ os.path.normcase(os.path.normpath(os.path.dirname(sys.executable)))
153
+ )
154
+ warn_for: Dict[str, Set[str]] = {
155
+ parent_dir: scripts
156
+ for parent_dir, scripts in grouped_by_dir.items()
157
+ if os.path.normcase(os.path.normpath(parent_dir)) not in not_warn_dirs
158
+ }
159
+ if not warn_for:
160
+ return None
161
+
162
+ # Format a message
163
+ msg_lines = []
164
+ for parent_dir, dir_scripts in warn_for.items():
165
+ sorted_scripts: List[str] = sorted(dir_scripts)
166
+ if len(sorted_scripts) == 1:
167
+ start_text = f"script {sorted_scripts[0]} is"
168
+ else:
169
+ start_text = "scripts {} are".format(
170
+ ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
171
+ )
172
+
173
+ msg_lines.append(
174
+ f"The {start_text} installed in '{parent_dir}' which is not on PATH."
175
+ )
176
+
177
+ last_line_fmt = (
178
+ "Consider adding {} to PATH or, if you prefer "
179
+ "to suppress this warning, use --no-warn-script-location."
180
+ )
181
+ if len(msg_lines) == 1:
182
+ msg_lines.append(last_line_fmt.format("this directory"))
183
+ else:
184
+ msg_lines.append(last_line_fmt.format("these directories"))
185
+
186
+ # Add a note if any directory starts with ~
187
+ warn_for_tilde = any(
188
+ i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i
189
+ )
190
+ if warn_for_tilde:
191
+ tilde_warning_msg = (
192
+ "NOTE: The current PATH contains path(s) starting with `~`, "
193
+ "which may not be expanded by all applications."
194
+ )
195
+ msg_lines.append(tilde_warning_msg)
196
+
197
+ # Returns the formatted multiline message
198
+ return "\n".join(msg_lines)
199
+
200
+
201
+ def _normalized_outrows(
202
+ outrows: Iterable[InstalledCSVRow],
203
+ ) -> List[Tuple[str, str, str]]:
204
+ """Normalize the given rows of a RECORD file.
205
+
206
+ Items in each row are converted into str. Rows are then sorted to make
207
+ the value more predictable for tests.
208
+
209
+ Each row is a 3-tuple (path, hash, size) and corresponds to a record of
210
+ a RECORD file (see PEP 376 and PEP 427 for details). For the rows
211
+ passed to this function, the size can be an integer as an int or string,
212
+ or the empty string.
213
+ """
214
+ # Normally, there should only be one row per path, in which case the
215
+ # second and third elements don't come into play when sorting.
216
+ # However, in cases in the wild where a path might happen to occur twice,
217
+ # we don't want the sort operation to trigger an error (but still want
218
+ # determinism). Since the third element can be an int or string, we
219
+ # coerce each element to a string to avoid a TypeError in this case.
220
+ # For additional background, see--
221
+ # https://github.com/pypa/pip/issues/5868
222
+ return sorted(
223
+ (record_path, hash_, str(size)) for record_path, hash_, size in outrows
224
+ )
225
+
226
+
227
+ def _record_to_fs_path(record_path: RecordPath, lib_dir: str) -> str:
228
+ return os.path.join(lib_dir, record_path)
229
+
230
+
231
+ def _fs_to_record_path(path: str, lib_dir: str) -> RecordPath:
232
+ # On Windows, do not handle relative paths if they belong to different
233
+ # logical disks
234
+ if os.path.splitdrive(path)[0].lower() == os.path.splitdrive(lib_dir)[0].lower():
235
+ path = os.path.relpath(path, lib_dir)
236
+
237
+ path = path.replace(os.path.sep, "/")
238
+ return cast("RecordPath", path)
239
+
240
+
241
+ def get_csv_rows_for_installed(
242
+ old_csv_rows: List[List[str]],
243
+ installed: Dict[RecordPath, RecordPath],
244
+ changed: Set[RecordPath],
245
+ generated: List[str],
246
+ lib_dir: str,
247
+ ) -> List[InstalledCSVRow]:
248
+ """
249
+ :param installed: A map from archive RECORD path to installation RECORD
250
+ path.
251
+ """
252
+ installed_rows: List[InstalledCSVRow] = []
253
+ for row in old_csv_rows:
254
+ if len(row) > 3:
255
+ logger.warning("RECORD line has more than three elements: %s", row)
256
+ old_record_path = cast("RecordPath", row[0])
257
+ new_record_path = installed.pop(old_record_path, old_record_path)
258
+ if new_record_path in changed:
259
+ digest, length = rehash(_record_to_fs_path(new_record_path, lib_dir))
260
+ else:
261
+ digest = row[1] if len(row) > 1 else ""
262
+ length = row[2] if len(row) > 2 else ""
263
+ installed_rows.append((new_record_path, digest, length))
264
+ for f in generated:
265
+ path = _fs_to_record_path(f, lib_dir)
266
+ digest, length = rehash(f)
267
+ installed_rows.append((path, digest, length))
268
+ return installed_rows + [
269
+ (installed_record_path, "", "") for installed_record_path in installed.values()
270
+ ]
271
+
272
+
273
+ def get_console_script_specs(console: Dict[str, str]) -> List[str]:
274
+ """
275
+ Given the mapping from entrypoint name to callable, return the relevant
276
+ console script specs.
277
+ """
278
+ # Don't mutate caller's version
279
+ console = console.copy()
280
+
281
+ scripts_to_generate = []
282
+
283
+ # Special case pip and setuptools to generate versioned wrappers
284
+ #
285
+ # The issue is that some projects (specifically, pip and setuptools) use
286
+ # code in setup.py to create "versioned" entry points - pip2.7 on Python
287
+ # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
288
+ # the wheel metadata at build time, and so if the wheel is installed with
289
+ # a *different* version of Python the entry points will be wrong. The
290
+ # correct fix for this is to enhance the metadata to be able to describe
291
+ # such versioned entry points, but that won't happen till Metadata 2.0 is
292
+ # available.
293
+ # In the meantime, projects using versioned entry points will either have
294
+ # incorrect versioned entry points, or they will not be able to distribute
295
+ # "universal" wheels (i.e., they will need a wheel per Python version).
296
+ #
297
+ # Because setuptools and pip are bundled with _ensurepip and virtualenv,
298
+ # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
299
+ # override the versioned entry points in the wheel and generate the
300
+ # correct ones. This code is purely a short-term measure until Metadata 2.0
301
+ # is available.
302
+ #
303
+ # To add the level of hack in this section of code, in order to support
304
+ # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
305
+ # variable which will control which version scripts get installed.
306
+ #
307
+ # ENSUREPIP_OPTIONS=altinstall
308
+ # - Only pipX.Y and easy_install-X.Y will be generated and installed
309
+ # ENSUREPIP_OPTIONS=install
310
+ # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
311
+ # that this option is technically if ENSUREPIP_OPTIONS is set and is
312
+ # not altinstall
313
+ # DEFAULT
314
+ # - The default behavior is to install pip, pipX, pipX.Y, easy_install
315
+ # and easy_install-X.Y.
316
+ pip_script = console.pop("pip", None)
317
+ if pip_script:
318
+ if "ENSUREPIP_OPTIONS" not in os.environ:
319
+ scripts_to_generate.append("pip = " + pip_script)
320
+
321
+ if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
322
+ scripts_to_generate.append(f"pip{sys.version_info[0]} = {pip_script}")
323
+
324
+ scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}")
325
+ # Delete any other versioned pip entry points
326
+ pip_ep = [k for k in console if re.match(r"pip(\d+(\.\d+)?)?$", k)]
327
+ for k in pip_ep:
328
+ del console[k]
329
+ easy_install_script = console.pop("easy_install", None)
330
+ if easy_install_script:
331
+ if "ENSUREPIP_OPTIONS" not in os.environ:
332
+ scripts_to_generate.append("easy_install = " + easy_install_script)
333
+
334
+ scripts_to_generate.append(
335
+ f"easy_install-{get_major_minor_version()} = {easy_install_script}"
336
+ )
337
+ # Delete any other versioned easy_install entry points
338
+ easy_install_ep = [
339
+ k for k in console if re.match(r"easy_install(-\d+\.\d+)?$", k)
340
+ ]
341
+ for k in easy_install_ep:
342
+ del console[k]
343
+
344
+ # Generate the console entry points specified in the wheel
345
+ scripts_to_generate.extend(starmap("{} = {}".format, console.items()))
346
+
347
+ return scripts_to_generate
348
+
349
+
350
+ class ZipBackedFile:
351
+ def __init__(
352
+ self, src_record_path: RecordPath, dest_path: str, zip_file: ZipFile
353
+ ) -> None:
354
+ self.src_record_path = src_record_path
355
+ self.dest_path = dest_path
356
+ self._zip_file = zip_file
357
+ self.changed = False
358
+
359
+ def _getinfo(self) -> ZipInfo:
360
+ return self._zip_file.getinfo(self.src_record_path)
361
+
362
+ def save(self) -> None:
363
+ # directory creation is lazy and after file filtering
364
+ # to ensure we don't install empty dirs; empty dirs can't be
365
+ # uninstalled.
366
+ parent_dir = os.path.dirname(self.dest_path)
367
+ ensure_dir(parent_dir)
368
+
369
+ # When we open the output file below, any existing file is truncated
370
+ # before we start writing the new contents. This is fine in most
371
+ # cases, but can cause a segfault if pip has loaded a shared
372
+ # object (e.g. from pyopenssl through its vendored urllib3)
373
+ # Since the shared object is mmap'd an attempt to call a
374
+ # symbol in it will then cause a segfault. Unlinking the file
375
+ # allows writing of new contents while allowing the process to
376
+ # continue to use the old copy.
377
+ if os.path.exists(self.dest_path):
378
+ os.unlink(self.dest_path)
379
+
380
+ zipinfo = self._getinfo()
381
+
382
+ with self._zip_file.open(zipinfo) as f:
383
+ with open(self.dest_path, "wb") as dest:
384
+ shutil.copyfileobj(f, dest)
385
+
386
+ if zip_item_is_executable(zipinfo):
387
+ set_extracted_file_to_default_mode_plus_executable(self.dest_path)
388
+
389
+
390
+ class ScriptFile:
391
+ def __init__(self, file: "File") -> None:
392
+ self._file = file
393
+ self.src_record_path = self._file.src_record_path
394
+ self.dest_path = self._file.dest_path
395
+ self.changed = False
396
+
397
+ def save(self) -> None:
398
+ self._file.save()
399
+ self.changed = fix_script(self.dest_path)
400
+
401
+
402
+ class MissingCallableSuffix(InstallationError):
403
+ def __init__(self, entry_point: str) -> None:
404
+ super().__init__(
405
+ f"Invalid script entry point: {entry_point} - A callable "
406
+ "suffix is required. Cf https://packaging.python.org/"
407
+ "specifications/entry-points/#use-for-scripts for more "
408
+ "information."
409
+ )
410
+
411
+
412
+ def _raise_for_invalid_entrypoint(specification: str) -> None:
413
+ entry = get_export_entry(specification)
414
+ if entry is not None and entry.suffix is None:
415
+ raise MissingCallableSuffix(str(entry))
416
+
417
+
418
+ class PipScriptMaker(ScriptMaker):
419
+ def make(
420
+ self, specification: str, options: Optional[Dict[str, Any]] = None
421
+ ) -> List[str]:
422
+ _raise_for_invalid_entrypoint(specification)
423
+ return super().make(specification, options)
424
+
425
+
426
+ def _install_wheel(
427
+ name: str,
428
+ wheel_zip: ZipFile,
429
+ wheel_path: str,
430
+ scheme: Scheme,
431
+ pycompile: bool = True,
432
+ warn_script_location: bool = True,
433
+ direct_url: Optional[DirectUrl] = None,
434
+ requested: bool = False,
435
+ ) -> None:
436
+ """Install a wheel.
437
+
438
+ :param name: Name of the project to install
439
+ :param wheel_zip: open ZipFile for wheel being installed
440
+ :param scheme: Distutils scheme dictating the install directories
441
+ :param req_description: String used in place of the requirement, for
442
+ logging
443
+ :param pycompile: Whether to byte-compile installed Python files
444
+ :param warn_script_location: Whether to check that scripts are installed
445
+ into a directory on PATH
446
+ :raises UnsupportedWheel:
447
+ * when the directory holds an unpacked wheel with incompatible
448
+ Wheel-Version
449
+ * when the .dist-info dir does not match the wheel
450
+ """
451
+ info_dir, metadata = parse_wheel(wheel_zip, name)
452
+
453
+ if wheel_root_is_purelib(metadata):
454
+ lib_dir = scheme.purelib
455
+ else:
456
+ lib_dir = scheme.platlib
457
+
458
+ # Record details of the files moved
459
+ # installed = files copied from the wheel to the destination
460
+ # changed = files changed while installing (scripts #! line typically)
461
+ # generated = files newly generated during the install (script wrappers)
462
+ installed: Dict[RecordPath, RecordPath] = {}
463
+ changed: Set[RecordPath] = set()
464
+ generated: List[str] = []
465
+
466
+ def record_installed(
467
+ srcfile: RecordPath, destfile: str, modified: bool = False
468
+ ) -> None:
469
+ """Map archive RECORD paths to installation RECORD paths."""
470
+ newpath = _fs_to_record_path(destfile, lib_dir)
471
+ installed[srcfile] = newpath
472
+ if modified:
473
+ changed.add(newpath)
474
+
475
+ def is_dir_path(path: RecordPath) -> bool:
476
+ return path.endswith("/")
477
+
478
+ def assert_no_path_traversal(dest_dir_path: str, target_path: str) -> None:
479
+ if not is_within_directory(dest_dir_path, target_path):
480
+ message = (
481
+ "The wheel {!r} has a file {!r} trying to install"
482
+ " outside the target directory {!r}"
483
+ )
484
+ raise InstallationError(
485
+ message.format(wheel_path, target_path, dest_dir_path)
486
+ )
487
+
488
+ def root_scheme_file_maker(
489
+ zip_file: ZipFile, dest: str
490
+ ) -> Callable[[RecordPath], "File"]:
491
+ def make_root_scheme_file(record_path: RecordPath) -> "File":
492
+ normed_path = os.path.normpath(record_path)
493
+ dest_path = os.path.join(dest, normed_path)
494
+ assert_no_path_traversal(dest, dest_path)
495
+ return ZipBackedFile(record_path, dest_path, zip_file)
496
+
497
+ return make_root_scheme_file
498
+
499
+ def data_scheme_file_maker(
500
+ zip_file: ZipFile, scheme: Scheme
501
+ ) -> Callable[[RecordPath], "File"]:
502
+ scheme_paths = {key: getattr(scheme, key) for key in SCHEME_KEYS}
503
+
504
+ def make_data_scheme_file(record_path: RecordPath) -> "File":
505
+ normed_path = os.path.normpath(record_path)
506
+ try:
507
+ _, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2)
508
+ except ValueError:
509
+ message = (
510
+ "Unexpected file in {}: {!r}. .data directory contents"
511
+ " should be named like: '<scheme key>/<path>'."
512
+ ).format(wheel_path, record_path)
513
+ raise InstallationError(message)
514
+
515
+ try:
516
+ scheme_path = scheme_paths[scheme_key]
517
+ except KeyError:
518
+ valid_scheme_keys = ", ".join(sorted(scheme_paths))
519
+ message = (
520
+ "Unknown scheme key used in {}: {} (for file {!r}). .data"
521
+ " directory contents should be in subdirectories named"
522
+ " with a valid scheme key ({})"
523
+ ).format(wheel_path, scheme_key, record_path, valid_scheme_keys)
524
+ raise InstallationError(message)
525
+
526
+ dest_path = os.path.join(scheme_path, dest_subpath)
527
+ assert_no_path_traversal(scheme_path, dest_path)
528
+ return ZipBackedFile(record_path, dest_path, zip_file)
529
+
530
+ return make_data_scheme_file
531
+
532
+ def is_data_scheme_path(path: RecordPath) -> bool:
533
+ return path.split("/", 1)[0].endswith(".data")
534
+
535
+ paths = cast(List[RecordPath], wheel_zip.namelist())
536
+ file_paths = filterfalse(is_dir_path, paths)
537
+ root_scheme_paths, data_scheme_paths = partition(is_data_scheme_path, file_paths)
538
+
539
+ make_root_scheme_file = root_scheme_file_maker(wheel_zip, lib_dir)
540
+ files: Iterator[File] = map(make_root_scheme_file, root_scheme_paths)
541
+
542
+ def is_script_scheme_path(path: RecordPath) -> bool:
543
+ parts = path.split("/", 2)
544
+ return len(parts) > 2 and parts[0].endswith(".data") and parts[1] == "scripts"
545
+
546
+ other_scheme_paths, script_scheme_paths = partition(
547
+ is_script_scheme_path, data_scheme_paths
548
+ )
549
+
550
+ make_data_scheme_file = data_scheme_file_maker(wheel_zip, scheme)
551
+ other_scheme_files = map(make_data_scheme_file, other_scheme_paths)
552
+ files = chain(files, other_scheme_files)
553
+
554
+ # Get the defined entry points
555
+ distribution = get_wheel_distribution(
556
+ FilesystemWheel(wheel_path),
557
+ canonicalize_name(name),
558
+ )
559
+ console, gui = get_entrypoints(distribution)
560
+
561
+ def is_entrypoint_wrapper(file: "File") -> bool:
562
+ # EP, EP.exe and EP-script.py are scripts generated for
563
+ # entry point EP by setuptools
564
+ path = file.dest_path
565
+ name = os.path.basename(path)
566
+ if name.lower().endswith(".exe"):
567
+ matchname = name[:-4]
568
+ elif name.lower().endswith("-script.py"):
569
+ matchname = name[:-10]
570
+ elif name.lower().endswith(".pya"):
571
+ matchname = name[:-4]
572
+ else:
573
+ matchname = name
574
+ # Ignore setuptools-generated scripts
575
+ return matchname in console or matchname in gui
576
+
577
+ script_scheme_files: Iterator[File] = map(
578
+ make_data_scheme_file, script_scheme_paths
579
+ )
580
+ script_scheme_files = filterfalse(is_entrypoint_wrapper, script_scheme_files)
581
+ script_scheme_files = map(ScriptFile, script_scheme_files)
582
+ files = chain(files, script_scheme_files)
583
+
584
+ for file in files:
585
+ file.save()
586
+ record_installed(file.src_record_path, file.dest_path, file.changed)
587
+
588
+ def pyc_source_file_paths() -> Generator[str, None, None]:
589
+ # We de-duplicate installation paths, since there can be overlap (e.g.
590
+ # file in .data maps to same location as file in wheel root).
591
+ # Sorting installation paths makes it easier to reproduce and debug
592
+ # issues related to permissions on existing files.
593
+ for installed_path in sorted(set(installed.values())):
594
+ full_installed_path = os.path.join(lib_dir, installed_path)
595
+ if not os.path.isfile(full_installed_path):
596
+ continue
597
+ if not full_installed_path.endswith(".py"):
598
+ continue
599
+ yield full_installed_path
600
+
601
+ def pyc_output_path(path: str) -> str:
602
+ """Return the path the pyc file would have been written to."""
603
+ return importlib.util.cache_from_source(path)
604
+
605
+ # Compile all of the pyc files for the installed files
606
+ if pycompile:
607
+ with captured_stdout() as stdout:
608
+ with warnings.catch_warnings():
609
+ warnings.filterwarnings("ignore")
610
+ for path in pyc_source_file_paths():
611
+ success = compileall.compile_file(path, force=True, quiet=True)
612
+ if success:
613
+ pyc_path = pyc_output_path(path)
614
+ assert os.path.exists(pyc_path)
615
+ pyc_record_path = cast(
616
+ "RecordPath", pyc_path.replace(os.path.sep, "/")
617
+ )
618
+ record_installed(pyc_record_path, pyc_path)
619
+ logger.debug(stdout.getvalue())
620
+
621
+ maker = PipScriptMaker(None, scheme.scripts)
622
+
623
+ # Ensure old scripts are overwritten.
624
+ # See https://github.com/pypa/pip/issues/1800
625
+ maker.clobber = True
626
+
627
+ # Ensure we don't generate any variants for scripts because this is almost
628
+ # never what somebody wants.
629
+ # See https://bitbucket.org/pypa/distlib/issue/35/
630
+ maker.variants = {""}
631
+
632
+ # This is required because otherwise distlib creates scripts that are not
633
+ # executable.
634
+ # See https://bitbucket.org/pypa/distlib/issue/32/
635
+ maker.set_mode = True
636
+
637
+ # Generate the console and GUI entry points specified in the wheel
638
+ scripts_to_generate = get_console_script_specs(console)
639
+
640
+ gui_scripts_to_generate = list(starmap("{} = {}".format, gui.items()))
641
+
642
+ generated_console_scripts = maker.make_multiple(scripts_to_generate)
643
+ generated.extend(generated_console_scripts)
644
+
645
+ generated.extend(maker.make_multiple(gui_scripts_to_generate, {"gui": True}))
646
+
647
+ if warn_script_location:
648
+ msg = message_about_scripts_not_on_PATH(generated_console_scripts)
649
+ if msg is not None:
650
+ logger.warning(msg)
651
+
652
+ generated_file_mode = 0o666 & ~current_umask()
653
+
654
+ @contextlib.contextmanager
655
+ def _generate_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]:
656
+ with adjacent_tmp_file(path, **kwargs) as f:
657
+ yield f
658
+ os.chmod(f.name, generated_file_mode)
659
+ replace(f.name, path)
660
+
661
+ dest_info_dir = os.path.join(lib_dir, info_dir)
662
+
663
+ # Record pip as the installer
664
+ installer_path = os.path.join(dest_info_dir, "INSTALLER")
665
+ with _generate_file(installer_path) as installer_file:
666
+ installer_file.write(b"pip\n")
667
+ generated.append(installer_path)
668
+
669
+ # Record the PEP 610 direct URL reference
670
+ if direct_url is not None:
671
+ direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME)
672
+ with _generate_file(direct_url_path) as direct_url_file:
673
+ direct_url_file.write(direct_url.to_json().encode("utf-8"))
674
+ generated.append(direct_url_path)
675
+
676
+ # Record the REQUESTED file
677
+ if requested:
678
+ requested_path = os.path.join(dest_info_dir, "REQUESTED")
679
+ with open(requested_path, "wb"):
680
+ pass
681
+ generated.append(requested_path)
682
+
683
+ record_text = distribution.read_text("RECORD")
684
+ record_rows = list(csv.reader(record_text.splitlines()))
685
+
686
+ rows = get_csv_rows_for_installed(
687
+ record_rows,
688
+ installed=installed,
689
+ changed=changed,
690
+ generated=generated,
691
+ lib_dir=lib_dir,
692
+ )
693
+
694
+ # Record details of all files installed
695
+ record_path = os.path.join(dest_info_dir, "RECORD")
696
+
697
+ with _generate_file(record_path, **csv_io_kwargs("w")) as record_file:
698
+ # Explicitly cast to typing.IO[str] as a workaround for the mypy error:
699
+ # "writer" has incompatible type "BinaryIO"; expected "_Writer"
700
+ writer = csv.writer(cast("IO[str]", record_file))
701
+ writer.writerows(_normalized_outrows(rows))
702
+
703
+
704
+ @contextlib.contextmanager
705
+ def req_error_context(req_description: str) -> Generator[None, None, None]:
706
+ try:
707
+ yield
708
+ except InstallationError as e:
709
+ message = f"For req: {req_description}. {e.args[0]}"
710
+ raise InstallationError(message) from e
711
+
712
+
713
+ def install_wheel(
714
+ name: str,
715
+ wheel_path: str,
716
+ scheme: Scheme,
717
+ req_description: str,
718
+ pycompile: bool = True,
719
+ warn_script_location: bool = True,
720
+ direct_url: Optional[DirectUrl] = None,
721
+ requested: bool = False,
722
+ ) -> None:
723
+ with ZipFile(wheel_path, allowZip64=True) as z:
724
+ with req_error_context(req_description):
725
+ _install_wheel(
726
+ name=name,
727
+ wheel_zip=z,
728
+ wheel_path=wheel_path,
729
+ scheme=scheme,
730
+ pycompile=pycompile,
731
+ warn_script_location=warn_script_location,
732
+ direct_url=direct_url,
733
+ requested=requested,
734
+ )
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/resolution/__init__.py ADDED
File without changes
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/pip/_internal/resolution/base.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Callable, List, Optional
2
+
3
+ from pip._internal.req.req_install import InstallRequirement
4
+ from pip._internal.req.req_set import RequirementSet
5
+
6
+ InstallRequirementProvider = Callable[
7
+ [str, Optional[InstallRequirement]], InstallRequirement
8
+ ]
9
+
10
+
11
+ class BaseResolver:
12
+ def resolve(
13
+ self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
14
+ ) -> RequirementSet:
15
+ raise NotImplementedError()
16
+
17
+ def get_installation_order(
18
+ self, req_set: RequirementSet
19
+ ) -> List[InstallRequirement]:
20
+ raise NotImplementedError()