content
stringlengths
1
103k
path
stringlengths
8
216
filename
stringlengths
2
179
language
stringclasses
15 values
size_bytes
int64
2
189k
quality_score
float64
0.5
0.95
complexity
float64
0
1
documentation_ratio
float64
0
1
repository
stringclasses
5 values
stars
int64
0
1k
created_date
stringdate
2023-07-10 19:21:08
2025-07-09 19:11:45
license
stringclasses
4 values
is_test
bool
2 classes
file_hash
stringlengths
32
32
SUCCESS = 0\nERROR = 1\nUNKNOWN_ERROR = 2\nVIRTUALENV_NOT_FOUND = 3\nPREVIOUS_BUILD_DIR_ERROR = 4\nNO_MATCHES_FOUND = 23\n
.venv\Lib\site-packages\pip\_internal\cli\status_codes.py
status_codes.py
Python
116
0.7
0
0
node-utils
781
2024-08-14T13:19:00.898146
BSD-3-Clause
false
c28210e327c369c51dc0b66a3e5c04b7
"""Subpackage containing all of pip's command line interface related code"""\n\n# This file intentionally does not import submodules\n
.venv\Lib\site-packages\pip\_internal\cli\__init__.py
__init__.py
Python
131
0.95
0
0.5
python-kit
614
2024-03-27T06:25:00.195659
GPL-3.0
false
08e59bb9e1ebd405345e385508f8c9e6
\n\n
.venv\Lib\site-packages\pip\_internal\cli\__pycache__\autocompletion.cpython-313.pyc
autocompletion.cpython-313.pyc
Other
8,981
0.8
0.011494
0
awesome-app
236
2025-04-04T06:54:56.752104
MIT
false
82ebeab44ff2995eedecd230e0ce9810
\n\n
.venv\Lib\site-packages\pip\_internal\cli\__pycache__\base_command.cpython-313.pyc
base_command.cpython-313.pyc
Other
10,456
0.95
0.011494
0
python-kit
430
2024-02-10T16:18:12.988676
BSD-3-Clause
false
4201dcc4a59954b1d758a6bc2537cbc5
\n\n
.venv\Lib\site-packages\pip\_internal\cli\__pycache__\cmdoptions.cpython-313.pyc
cmdoptions.cpython-313.pyc
Other
31,939
0.95
0.086592
0.002915
awesome-app
946
2025-01-25T09:26:18.285371
BSD-3-Clause
false
b1ee7a7847712c01f282249f80acfdc2
\n\n
.venv\Lib\site-packages\pip\_internal\cli\__pycache__\command_context.cpython-313.pyc
command_context.cpython-313.pyc
Other
1,872
0.8
0
0
vue-tools
213
2023-07-20T04:15:57.424078
BSD-3-Clause
false
add82db1c084c93aeaaad2e1c3207ac1
\n\n
.venv\Lib\site-packages\pip\_internal\cli\__pycache__\index_command.cpython-313.pyc
index_command.cpython-313.pyc
Other
7,500
0.95
0.077922
0
node-utils
34
2024-10-28T04:23:30.012710
MIT
false
85e697e55bdd4f3951f23081aa339491
\n\n
.venv\Lib\site-packages\pip\_internal\cli\__pycache__\main.cpython-313.pyc
main.cpython-313.pyc
Other
2,299
0.8
0
0
vue-tools
580
2023-07-11T13:02:10.405582
GPL-3.0
false
c10bd3796b5042b6d6b535b05347ce79
\n\n
.venv\Lib\site-packages\pip\_internal\cli\__pycache__\main_parser.cpython-313.pyc
main_parser.cpython-313.pyc
Other
5,004
0.8
0.032787
0
node-utils
790
2024-02-24T00:04:17.644906
GPL-3.0
false
18b78d833e1714998de0958d1be7a090
\n\n
.venv\Lib\site-packages\pip\_internal\cli\__pycache__\parser.cpython-313.pyc
parser.cpython-313.pyc
Other
15,386
0.8
0.05102
0
node-utils
801
2023-10-10T22:15:50.720113
BSD-3-Clause
false
9ec0aba686ec476ef4d2f86c74893b83
\n\n
.venv\Lib\site-packages\pip\_internal\cli\__pycache__\progress_bars.cpython-313.pyc
progress_bars.cpython-313.pyc
Other
5,733
0.8
0
0
python-kit
826
2025-06-17T06:48:42.611075
BSD-3-Clause
false
249e9426c97b9f14155316eda80cc105
\n\n
.venv\Lib\site-packages\pip\_internal\cli\__pycache__\req_command.cpython-313.pyc
req_command.cpython-313.pyc
Other
12,914
0.95
0.036145
0
python-kit
170
2024-07-29T13:19:55.510390
MIT
false
6ee426e8005825d5dfac9a03bd8413bd
\n\n
.venv\Lib\site-packages\pip\_internal\cli\__pycache__\spinners.cpython-313.pyc
spinners.cpython-313.pyc
Other
8,178
0.8
0
0
awesome-app
113
2023-07-26T13:28:38.009907
GPL-3.0
false
d2c67c2589cd46cdb44d395cf139e93d
\n\n
.venv\Lib\site-packages\pip\_internal\cli\__pycache__\status_codes.cpython-313.pyc
status_codes.cpython-313.pyc
Other
377
0.7
0
0
awesome-app
825
2025-06-10T03:57:33.650563
GPL-3.0
false
9cf8f86ec41e1f630cc55674326fce27
\n\n
.venv\Lib\site-packages\pip\_internal\cli\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
277
0.7
0
0
react-lib
248
2024-08-26T12:53:46.071677
Apache-2.0
false
65a9de1e0b6ffdcee9598c51afc0af5d
import os\nimport textwrap\nfrom optparse import Values\nfrom typing import Any, List\n\nfrom pip._internal.cli.base_command import Command\nfrom pip._internal.cli.status_codes import ERROR, SUCCESS\nfrom pip._internal.exceptions import CommandError, PipError\nfrom pip._internal.utils import filesystem\nfrom pip._internal.utils.logging import getLogger\nfrom pip._internal.utils.misc import format_size\n\nlogger = getLogger(__name__)\n\n\nclass CacheCommand(Command):\n """\n Inspect and manage pip's wheel cache.\n\n Subcommands:\n\n - dir: Show the cache directory.\n - info: Show information about the cache.\n - list: List filenames of packages stored in the cache.\n - remove: Remove one or more package from the cache.\n - purge: Remove all items from the cache.\n\n ``<pattern>`` can be a glob expression or a package name.\n """\n\n ignore_require_venv = True\n usage = """\n %prog dir\n %prog info\n %prog list [<pattern>] [--format=[human, abspath]]\n %prog remove <pattern>\n %prog purge\n """\n\n def add_options(self) -> None:\n self.cmd_opts.add_option(\n "--format",\n action="store",\n dest="list_format",\n default="human",\n choices=("human", "abspath"),\n help="Select the output format among: human (default) or abspath",\n )\n\n self.parser.insert_option_group(0, self.cmd_opts)\n\n def run(self, options: Values, args: List[str]) -> int:\n handlers = {\n "dir": self.get_cache_dir,\n "info": self.get_cache_info,\n "list": self.list_cache_items,\n "remove": self.remove_cache_items,\n "purge": self.purge_cache,\n }\n\n if not options.cache_dir:\n logger.error("pip cache commands can not function since cache is disabled.")\n return ERROR\n\n # Determine action\n if not args or args[0] not in handlers:\n logger.error(\n "Need an action (%s) to perform.",\n ", ".join(sorted(handlers)),\n )\n return ERROR\n\n action = args[0]\n\n # Error handling happens here, not in the action-handlers.\n try:\n handlers[action](options, args[1:])\n except PipError as e:\n logger.error(e.args[0])\n return ERROR\n\n return SUCCESS\n\n def get_cache_dir(self, options: Values, args: List[Any]) -> None:\n if args:\n raise CommandError("Too many arguments")\n\n logger.info(options.cache_dir)\n\n def get_cache_info(self, options: Values, args: List[Any]) -> None:\n if args:\n raise CommandError("Too many arguments")\n\n num_http_files = len(self._find_http_files(options))\n num_packages = len(self._find_wheels(options, "*"))\n\n http_cache_location = self._cache_dir(options, "http-v2")\n old_http_cache_location = self._cache_dir(options, "http")\n wheels_cache_location = self._cache_dir(options, "wheels")\n http_cache_size = filesystem.format_size(\n filesystem.directory_size(http_cache_location)\n + filesystem.directory_size(old_http_cache_location)\n )\n wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)\n\n message = (\n textwrap.dedent(\n """\n Package index page cache location (pip v23.3+): {http_cache_location}\n Package index page cache location (older pips): {old_http_cache_location}\n Package index page cache size: {http_cache_size}\n Number of HTTP files: {num_http_files}\n Locally built wheels location: {wheels_cache_location}\n Locally built wheels size: {wheels_cache_size}\n Number of locally built wheels: {package_count}\n """ # noqa: E501\n )\n .format(\n http_cache_location=http_cache_location,\n old_http_cache_location=old_http_cache_location,\n http_cache_size=http_cache_size,\n num_http_files=num_http_files,\n wheels_cache_location=wheels_cache_location,\n package_count=num_packages,\n wheels_cache_size=wheels_cache_size,\n )\n .strip()\n )\n\n logger.info(message)\n\n def list_cache_items(self, options: Values, args: List[Any]) -> None:\n if len(args) > 1:\n raise CommandError("Too many arguments")\n\n if args:\n pattern = args[0]\n else:\n pattern = "*"\n\n files = self._find_wheels(options, pattern)\n if options.list_format == "human":\n self.format_for_human(files)\n else:\n self.format_for_abspath(files)\n\n def format_for_human(self, files: List[str]) -> None:\n if not files:\n logger.info("No locally built wheels cached.")\n return\n\n results = []\n for filename in files:\n wheel = os.path.basename(filename)\n size = filesystem.format_file_size(filename)\n results.append(f" - {wheel} ({size})")\n logger.info("Cache contents:\n")\n logger.info("\n".join(sorted(results)))\n\n def format_for_abspath(self, files: List[str]) -> None:\n if files:\n logger.info("\n".join(sorted(files)))\n\n def remove_cache_items(self, options: Values, args: List[Any]) -> None:\n if len(args) > 1:\n raise CommandError("Too many arguments")\n\n if not args:\n raise CommandError("Please provide a pattern")\n\n files = self._find_wheels(options, args[0])\n\n no_matching_msg = "No matching packages"\n if args[0] == "*":\n # Only fetch http files if no specific pattern given\n files += self._find_http_files(options)\n else:\n # Add the pattern to the log message\n no_matching_msg += f' for pattern "{args[0]}"'\n\n if not files:\n logger.warning(no_matching_msg)\n\n bytes_removed = 0\n for filename in files:\n bytes_removed += os.stat(filename).st_size\n os.unlink(filename)\n logger.verbose("Removed %s", filename)\n logger.info("Files removed: %s (%s)", len(files), format_size(bytes_removed))\n\n def purge_cache(self, options: Values, args: List[Any]) -> None:\n if args:\n raise CommandError("Too many arguments")\n\n return self.remove_cache_items(options, ["*"])\n\n def _cache_dir(self, options: Values, subdir: str) -> str:\n return os.path.join(options.cache_dir, subdir)\n\n def _find_http_files(self, options: Values) -> List[str]:\n old_http_dir = self._cache_dir(options, "http")\n new_http_dir = self._cache_dir(options, "http-v2")\n return filesystem.find_files(old_http_dir, "*") + filesystem.find_files(\n new_http_dir, "*"\n )\n\n def _find_wheels(self, options: Values, pattern: str) -> List[str]:\n wheel_dir = self._cache_dir(options, "wheels")\n\n # The wheel filename format, as specified in PEP 427, is:\n # {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl\n #\n # Additionally, non-alphanumeric values in the distribution are\n # normalized to underscores (_), meaning hyphens can never occur\n # before `-{version}`.\n #\n # Given that information:\n # - If the pattern we're given contains a hyphen (-), the user is\n # providing at least the version. Thus, we can just append `*.whl`\n # to match the rest of it.\n # - If the pattern we're given doesn't contain a hyphen (-), the\n # user is only providing the name. Thus, we append `-*.whl` to\n # match the hyphen before the version, followed by anything else.\n #\n # PEP 427: https://www.python.org/dev/peps/pep-0427/\n pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl")\n\n return filesystem.find_files(wheel_dir, pattern)\n
.venv\Lib\site-packages\pip\_internal\commands\cache.py
cache.py
Python
8,107
0.95
0.149123
0.107527
node-utils
136
2024-09-02T16:57:35.231688
GPL-3.0
false
113ccfb17a752fa425015a6e113078ea
import logging\nfrom optparse import Values\nfrom typing import List\n\nfrom pip._internal.cli.base_command import Command\nfrom pip._internal.cli.status_codes import ERROR, SUCCESS\nfrom pip._internal.metadata import get_default_environment\nfrom pip._internal.operations.check import (\n check_package_set,\n check_unsupported,\n create_package_set_from_installed,\n)\nfrom pip._internal.utils.compatibility_tags import get_supported\nfrom pip._internal.utils.misc import write_output\n\nlogger = logging.getLogger(__name__)\n\n\nclass CheckCommand(Command):\n """Verify installed packages have compatible dependencies."""\n\n ignore_require_venv = True\n usage = """\n %prog [options]"""\n\n def run(self, options: Values, args: List[str]) -> int:\n package_set, parsing_probs = create_package_set_from_installed()\n missing, conflicting = check_package_set(package_set)\n unsupported = list(\n check_unsupported(\n get_default_environment().iter_installed_distributions(),\n get_supported(),\n )\n )\n\n for project_name in missing:\n version = package_set[project_name].version\n for dependency in missing[project_name]:\n write_output(\n "%s %s requires %s, which is not installed.",\n project_name,\n version,\n dependency[0],\n )\n\n for project_name in conflicting:\n version = package_set[project_name].version\n for dep_name, dep_version, req in conflicting[project_name]:\n write_output(\n "%s %s has requirement %s, but you have %s %s.",\n project_name,\n version,\n req,\n dep_name,\n dep_version,\n )\n for package in unsupported:\n write_output(\n "%s %s is not supported on this platform",\n package.raw_name,\n package.version,\n )\n if missing or conflicting or parsing_probs or unsupported:\n return ERROR\n else:\n write_output("No broken requirements found.")\n return SUCCESS\n
.venv\Lib\site-packages\pip\_internal\commands\check.py
check.py
Python
2,268
0.85
0.119403
0
react-lib
310
2025-01-01T13:47:07.147568
BSD-3-Clause
false
e1725a81e100d704d1e19d4e54c3647e
import sys\nimport textwrap\nfrom optparse import Values\nfrom typing import List\n\nfrom pip._internal.cli.base_command import Command\nfrom pip._internal.cli.status_codes import SUCCESS\nfrom pip._internal.utils.misc import get_prog\n\nBASE_COMPLETION = """\n# pip {shell} completion start{script}# pip {shell} completion end\n"""\n\nCOMPLETION_SCRIPTS = {\n "bash": """\n _pip_completion()\n {{\n COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\\n COMP_CWORD=$COMP_CWORD \\\n PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) )\n }}\n complete -o default -F _pip_completion {prog}\n """,\n "zsh": """\n #compdef -P pip[0-9.]#\n __pip() {{\n compadd $( COMP_WORDS="$words[*]" \\\n COMP_CWORD=$((CURRENT-1)) \\\n PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )\n }}\n if [[ $zsh_eval_context[-1] == loadautofunc ]]; then\n # autoload from fpath, call function directly\n __pip "$@"\n else\n # eval/source/. command, register function for later\n compdef __pip -P 'pip[0-9.]#'\n fi\n """,\n "fish": """\n function __fish_complete_pip\n set -lx COMP_WORDS \\\n (commandline --current-process --tokenize --cut-at-cursor) \\\n (commandline --current-token --cut-at-cursor)\n set -lx COMP_CWORD (math (count $COMP_WORDS) - 1)\n set -lx PIP_AUTO_COMPLETE 1\n set -l completions\n if string match -q '2.*' $version\n set completions (eval $COMP_WORDS[1])\n else\n set completions ($COMP_WORDS[1])\n end\n string split \\ -- $completions\n end\n complete -fa "(__fish_complete_pip)" -c {prog}\n """,\n "powershell": """\n if ((Test-Path Function:\\TabExpansion) -and -not `\n (Test-Path Function:\\_pip_completeBackup)) {{\n Rename-Item Function:\\TabExpansion _pip_completeBackup\n }}\n function TabExpansion($line, $lastWord) {{\n $lastBlock = [regex]::Split($line, '[|;]')[-1].TrimStart()\n if ($lastBlock.StartsWith("{prog} ")) {{\n $Env:COMP_WORDS=$lastBlock\n $Env:COMP_CWORD=$lastBlock.Split().Length - 1\n $Env:PIP_AUTO_COMPLETE=1\n (& {prog}).Split()\n Remove-Item Env:COMP_WORDS\n Remove-Item Env:COMP_CWORD\n Remove-Item Env:PIP_AUTO_COMPLETE\n }}\n elseif (Test-Path Function:\\_pip_completeBackup) {{\n # Fall back on existing tab expansion\n _pip_completeBackup $line $lastWord\n }}\n }}\n """,\n}\n\n\nclass CompletionCommand(Command):\n """A helper command to be used for command completion."""\n\n ignore_require_venv = True\n\n def add_options(self) -> None:\n self.cmd_opts.add_option(\n "--bash",\n "-b",\n action="store_const",\n const="bash",\n dest="shell",\n help="Emit completion code for bash",\n )\n self.cmd_opts.add_option(\n "--zsh",\n "-z",\n action="store_const",\n const="zsh",\n dest="shell",\n help="Emit completion code for zsh",\n )\n self.cmd_opts.add_option(\n "--fish",\n "-f",\n action="store_const",\n const="fish",\n dest="shell",\n help="Emit completion code for fish",\n )\n self.cmd_opts.add_option(\n "--powershell",\n "-p",\n action="store_const",\n const="powershell",\n dest="shell",\n help="Emit completion code for powershell",\n )\n\n self.parser.insert_option_group(0, self.cmd_opts)\n\n def run(self, options: Values, args: List[str]) -> int:\n """Prints the completion code of the given shell"""\n shells = COMPLETION_SCRIPTS.keys()\n shell_options = ["--" + shell for shell in sorted(shells)]\n if options.shell in shells:\n script = textwrap.dedent(\n COMPLETION_SCRIPTS.get(options.shell, "").format(prog=get_prog())\n )\n print(BASE_COMPLETION.format(script=script, shell=options.shell))\n return SUCCESS\n else:\n sys.stderr.write(\n "ERROR: You must pass {}\n".format(" or ".join(shell_options))\n )\n return SUCCESS\n
.venv\Lib\site-packages\pip\_internal\commands\completion.py
completion.py
Python
4,554
0.95
0.139706
0.03937
vue-tools
580
2024-02-23T03:24:50.749924
MIT
false
a1df2bbdc930a9b831babeb103b6751a
import logging\nimport os\nimport subprocess\nfrom optparse import Values\nfrom typing import Any, List, Optional\n\nfrom pip._internal.cli.base_command import Command\nfrom pip._internal.cli.status_codes import ERROR, SUCCESS\nfrom pip._internal.configuration import (\n Configuration,\n Kind,\n get_configuration_files,\n kinds,\n)\nfrom pip._internal.exceptions import PipError\nfrom pip._internal.utils.logging import indent_log\nfrom pip._internal.utils.misc import get_prog, write_output\n\nlogger = logging.getLogger(__name__)\n\n\nclass ConfigurationCommand(Command):\n """\n Manage local and global configuration.\n\n Subcommands:\n\n - list: List the active configuration (or from the file specified)\n - edit: Edit the configuration file in an editor\n - get: Get the value associated with command.option\n - set: Set the command.option=value\n - unset: Unset the value associated with command.option\n - debug: List the configuration files and values defined under them\n\n Configuration keys should be dot separated command and option name,\n with the special prefix "global" affecting any command. For example,\n "pip config set global.index-url https://example.org/" would configure\n the index url for all commands, but "pip config set download.timeout 10"\n would configure a 10 second timeout only for "pip download" commands.\n\n If none of --user, --global and --site are passed, a virtual\n environment configuration file is used if one is active and the file\n exists. Otherwise, all modifications happen to the user file by\n default.\n """\n\n ignore_require_venv = True\n usage = """\n %prog [<file-option>] list\n %prog [<file-option>] [--editor <editor-path>] edit\n\n %prog [<file-option>] get command.option\n %prog [<file-option>] set command.option value\n %prog [<file-option>] unset command.option\n %prog [<file-option>] debug\n """\n\n def add_options(self) -> None:\n self.cmd_opts.add_option(\n "--editor",\n dest="editor",\n action="store",\n default=None,\n help=(\n "Editor to use to edit the file. Uses VISUAL or EDITOR "\n "environment variables if not provided."\n ),\n )\n\n self.cmd_opts.add_option(\n "--global",\n dest="global_file",\n action="store_true",\n default=False,\n help="Use the system-wide configuration file only",\n )\n\n self.cmd_opts.add_option(\n "--user",\n dest="user_file",\n action="store_true",\n default=False,\n help="Use the user configuration file only",\n )\n\n self.cmd_opts.add_option(\n "--site",\n dest="site_file",\n action="store_true",\n default=False,\n help="Use the current environment configuration file only",\n )\n\n self.parser.insert_option_group(0, self.cmd_opts)\n\n def run(self, options: Values, args: List[str]) -> int:\n handlers = {\n "list": self.list_values,\n "edit": self.open_in_editor,\n "get": self.get_name,\n "set": self.set_name_value,\n "unset": self.unset_name,\n "debug": self.list_config_values,\n }\n\n # Determine action\n if not args or args[0] not in handlers:\n logger.error(\n "Need an action (%s) to perform.",\n ", ".join(sorted(handlers)),\n )\n return ERROR\n\n action = args[0]\n\n # Determine which configuration files are to be loaded\n # Depends on whether the command is modifying.\n try:\n load_only = self._determine_file(\n options, need_value=(action in ["get", "set", "unset", "edit"])\n )\n except PipError as e:\n logger.error(e.args[0])\n return ERROR\n\n # Load a new configuration\n self.configuration = Configuration(\n isolated=options.isolated_mode, load_only=load_only\n )\n self.configuration.load()\n\n # Error handling happens here, not in the action-handlers.\n try:\n handlers[action](options, args[1:])\n except PipError as e:\n logger.error(e.args[0])\n return ERROR\n\n return SUCCESS\n\n def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]:\n file_options = [\n key\n for key, value in (\n (kinds.USER, options.user_file),\n (kinds.GLOBAL, options.global_file),\n (kinds.SITE, options.site_file),\n )\n if value\n ]\n\n if not file_options:\n if not need_value:\n return None\n # Default to user, unless there's a site file.\n elif any(\n os.path.exists(site_config_file)\n for site_config_file in get_configuration_files()[kinds.SITE]\n ):\n return kinds.SITE\n else:\n return kinds.USER\n elif len(file_options) == 1:\n return file_options[0]\n\n raise PipError(\n "Need exactly one file to operate upon "\n "(--user, --site, --global) to perform."\n )\n\n def list_values(self, options: Values, args: List[str]) -> None:\n self._get_n_args(args, "list", n=0)\n\n for key, value in sorted(self.configuration.items()):\n write_output("%s=%r", key, value)\n\n def get_name(self, options: Values, args: List[str]) -> None:\n key = self._get_n_args(args, "get [name]", n=1)\n value = self.configuration.get_value(key)\n\n write_output("%s", value)\n\n def set_name_value(self, options: Values, args: List[str]) -> None:\n key, value = self._get_n_args(args, "set [name] [value]", n=2)\n self.configuration.set_value(key, value)\n\n self._save_configuration()\n\n def unset_name(self, options: Values, args: List[str]) -> None:\n key = self._get_n_args(args, "unset [name]", n=1)\n self.configuration.unset_value(key)\n\n self._save_configuration()\n\n def list_config_values(self, options: Values, args: List[str]) -> None:\n """List config key-value pairs across different config files"""\n self._get_n_args(args, "debug", n=0)\n\n self.print_env_var_values()\n # Iterate over config files and print if they exist, and the\n # key-value pairs present in them if they do\n for variant, files in sorted(self.configuration.iter_config_files()):\n write_output("%s:", variant)\n for fname in files:\n with indent_log():\n file_exists = os.path.exists(fname)\n write_output("%s, exists: %r", fname, file_exists)\n if file_exists:\n self.print_config_file_values(variant)\n\n def print_config_file_values(self, variant: Kind) -> None:\n """Get key-value pairs from the file of a variant"""\n for name, value in self.configuration.get_values_in_config(variant).items():\n with indent_log():\n write_output("%s: %s", name, value)\n\n def print_env_var_values(self) -> None:\n """Get key-values pairs present as environment variables"""\n write_output("%s:", "env_var")\n with indent_log():\n for key, value in sorted(self.configuration.get_environ_vars()):\n env_var = f"PIP_{key.upper()}"\n write_output("%s=%r", env_var, value)\n\n def open_in_editor(self, options: Values, args: List[str]) -> None:\n editor = self._determine_editor(options)\n\n fname = self.configuration.get_file_to_edit()\n if fname is None:\n raise PipError("Could not determine appropriate file.")\n elif '"' in fname:\n # This shouldn't happen, unless we see a username like that.\n # If that happens, we'd appreciate a pull request fixing this.\n raise PipError(\n f'Can not open an editor for a file name containing "\n{fname}'\n )\n\n try:\n subprocess.check_call(f'{editor} "{fname}"', shell=True)\n except FileNotFoundError as e:\n if not e.filename:\n e.filename = editor\n raise\n except subprocess.CalledProcessError as e:\n raise PipError(f"Editor Subprocess exited with exit code {e.returncode}")\n\n def _get_n_args(self, args: List[str], example: str, n: int) -> Any:\n """Helper to make sure the command got the right number of arguments"""\n if len(args) != n:\n msg = (\n f"Got unexpected number of arguments, expected {n}. "\n f'(example: "{get_prog()} config {example}")'\n )\n raise PipError(msg)\n\n if n == 1:\n return args[0]\n else:\n return args\n\n def _save_configuration(self) -> None:\n # We successfully ran a modifying command. Need to save the\n # configuration.\n try:\n self.configuration.save()\n except Exception:\n logger.exception(\n "Unable to save configuration. Please report this as a bug."\n )\n raise PipError("Internal Error.")\n\n def _determine_editor(self, options: Values) -> str:\n if options.editor is not None:\n return options.editor\n elif "VISUAL" in os.environ:\n return os.environ["VISUAL"]\n elif "EDITOR" in os.environ:\n return os.environ["EDITOR"]\n else:\n raise PipError("Could not determine editor to use.")\n
.venv\Lib\site-packages\pip\_internal\commands\configuration.py
configuration.py
Python
9,766
0.95
0.153571
0.050847
react-lib
483
2025-05-14T15:33:38.795763
MIT
false
3694eb7c7165f7d0f192f343d4cb4b7d
import locale\nimport logging\nimport os\nimport sys\nfrom optparse import Values\nfrom types import ModuleType\nfrom typing import Any, Dict, List, Optional\n\nimport pip._vendor\nfrom pip._vendor.certifi import where\nfrom pip._vendor.packaging.version import parse as parse_version\n\nfrom pip._internal.cli import cmdoptions\nfrom pip._internal.cli.base_command import Command\nfrom pip._internal.cli.cmdoptions import make_target_python\nfrom pip._internal.cli.status_codes import SUCCESS\nfrom pip._internal.configuration import Configuration\nfrom pip._internal.metadata import get_environment\nfrom pip._internal.utils.compat import open_text_resource\nfrom pip._internal.utils.logging import indent_log\nfrom pip._internal.utils.misc import get_pip_version\n\nlogger = logging.getLogger(__name__)\n\n\ndef show_value(name: str, value: Any) -> None:\n logger.info("%s: %s", name, value)\n\n\ndef show_sys_implementation() -> None:\n logger.info("sys.implementation:")\n implementation_name = sys.implementation.name\n with indent_log():\n show_value("name", implementation_name)\n\n\ndef create_vendor_txt_map() -> Dict[str, str]:\n with open_text_resource("pip._vendor", "vendor.txt") as f:\n # Purge non version specifying lines.\n # Also, remove any space prefix or suffixes (including comments).\n lines = [\n line.strip().split(" ", 1)[0] for line in f.readlines() if "==" in line\n ]\n\n # Transform into "module" -> version dict.\n return dict(line.split("==", 1) for line in lines)\n\n\ndef get_module_from_module_name(module_name: str) -> Optional[ModuleType]:\n # Module name can be uppercase in vendor.txt for some reason...\n module_name = module_name.lower().replace("-", "_")\n # PATCH: setuptools is actually only pkg_resources.\n if module_name == "setuptools":\n module_name = "pkg_resources"\n\n try:\n __import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)\n return getattr(pip._vendor, module_name)\n except ImportError:\n # We allow 'truststore' to fail to import due\n # to being unavailable on Python 3.9 and earlier.\n if module_name == "truststore" and sys.version_info < (3, 10):\n return None\n raise\n\n\ndef get_vendor_version_from_module(module_name: str) -> Optional[str]:\n module = get_module_from_module_name(module_name)\n version = getattr(module, "__version__", None)\n\n if module and not version:\n # Try to find version in debundled module info.\n assert module.__file__ is not None\n env = get_environment([os.path.dirname(module.__file__)])\n dist = env.get_distribution(module_name)\n if dist:\n version = str(dist.version)\n\n return version\n\n\ndef show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None:\n """Log the actual version and print extra info if there is\n a conflict or if the actual version could not be imported.\n """\n for module_name, expected_version in vendor_txt_versions.items():\n extra_message = ""\n actual_version = get_vendor_version_from_module(module_name)\n if not actual_version:\n extra_message = (\n " (Unable to locate actual module version, using"\n " vendor.txt specified version)"\n )\n actual_version = expected_version\n elif parse_version(actual_version) != parse_version(expected_version):\n extra_message = (\n " (CONFLICT: vendor.txt suggests version should"\n f" be {expected_version})"\n )\n logger.info("%s==%s%s", module_name, actual_version, extra_message)\n\n\ndef show_vendor_versions() -> None:\n logger.info("vendored library versions:")\n\n vendor_txt_versions = create_vendor_txt_map()\n with indent_log():\n show_actual_vendor_versions(vendor_txt_versions)\n\n\ndef show_tags(options: Values) -> None:\n tag_limit = 10\n\n target_python = make_target_python(options)\n tags = target_python.get_sorted_tags()\n\n # Display the target options that were explicitly provided.\n formatted_target = target_python.format_given()\n suffix = ""\n if formatted_target:\n suffix = f" (target: {formatted_target})"\n\n msg = f"Compatible tags: {len(tags)}{suffix}"\n logger.info(msg)\n\n if options.verbose < 1 and len(tags) > tag_limit:\n tags_limited = True\n tags = tags[:tag_limit]\n else:\n tags_limited = False\n\n with indent_log():\n for tag in tags:\n logger.info(str(tag))\n\n if tags_limited:\n msg = f"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"\n logger.info(msg)\n\n\ndef ca_bundle_info(config: Configuration) -> str:\n levels = {key.split(".", 1)[0] for key, _ in config.items()}\n if not levels:\n return "Not specified"\n\n levels_that_override_global = ["install", "wheel", "download"]\n global_overriding_level = [\n level for level in levels if level in levels_that_override_global\n ]\n if not global_overriding_level:\n return "global"\n\n if "global" in levels:\n levels.remove("global")\n return ", ".join(levels)\n\n\nclass DebugCommand(Command):\n """\n Display debug information.\n """\n\n usage = """\n %prog <options>"""\n ignore_require_venv = True\n\n def add_options(self) -> None:\n cmdoptions.add_target_python_options(self.cmd_opts)\n self.parser.insert_option_group(0, self.cmd_opts)\n self.parser.config.load()\n\n def run(self, options: Values, args: List[str]) -> int:\n logger.warning(\n "This command is only meant for debugging. "\n "Do not use this with automation for parsing and getting these "\n "details, since the output and options of this command may "\n "change without notice."\n )\n show_value("pip version", get_pip_version())\n show_value("sys.version", sys.version)\n show_value("sys.executable", sys.executable)\n show_value("sys.getdefaultencoding", sys.getdefaultencoding())\n show_value("sys.getfilesystemencoding", sys.getfilesystemencoding())\n show_value(\n "locale.getpreferredencoding",\n locale.getpreferredencoding(),\n )\n show_value("sys.platform", sys.platform)\n show_sys_implementation()\n\n show_value("'cert' config value", ca_bundle_info(self.parser.config))\n show_value("REQUESTS_CA_BUNDLE", os.environ.get("REQUESTS_CA_BUNDLE"))\n show_value("CURL_CA_BUNDLE", os.environ.get("CURL_CA_BUNDLE"))\n show_value("pip._vendor.certifi.where()", where())\n show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED)\n\n show_vendor_versions()\n\n show_tags(options)\n\n return SUCCESS\n
.venv\Lib\site-packages\pip\_internal\commands\debug.py
debug.py
Python
6,797
0.95
0.18408
0.056962
vue-tools
657
2024-06-04T14:28:47.915235
MIT
false
c193ec946dfa684faaa807b4d9644562
import logging\nimport os\nfrom optparse import Values\nfrom typing import List\n\nfrom pip._internal.cli import cmdoptions\nfrom pip._internal.cli.cmdoptions import make_target_python\nfrom pip._internal.cli.req_command import RequirementCommand, with_cleanup\nfrom pip._internal.cli.status_codes import SUCCESS\nfrom pip._internal.operations.build.build_tracker import get_build_tracker\nfrom pip._internal.req.req_install import check_legacy_setup_py_options\nfrom pip._internal.utils.misc import ensure_dir, normalize_path, write_output\nfrom pip._internal.utils.temp_dir import TempDirectory\n\nlogger = logging.getLogger(__name__)\n\n\nclass DownloadCommand(RequirementCommand):\n """\n Download packages from:\n\n - PyPI (and other indexes) using requirement specifiers.\n - VCS project urls.\n - Local project directories.\n - Local or remote source archives.\n\n pip also supports downloading from "requirements files", which provide\n an easy way to specify a whole environment to be downloaded.\n """\n\n usage = """\n %prog [options] <requirement specifier> [package-index-options] ...\n %prog [options] -r <requirements file> [package-index-options] ...\n %prog [options] <vcs project url> ...\n %prog [options] <local project path> ...\n %prog [options] <archive url/path> ..."""\n\n def add_options(self) -> None:\n self.cmd_opts.add_option(cmdoptions.constraints())\n self.cmd_opts.add_option(cmdoptions.requirements())\n self.cmd_opts.add_option(cmdoptions.no_deps())\n self.cmd_opts.add_option(cmdoptions.global_options())\n self.cmd_opts.add_option(cmdoptions.no_binary())\n self.cmd_opts.add_option(cmdoptions.only_binary())\n self.cmd_opts.add_option(cmdoptions.prefer_binary())\n self.cmd_opts.add_option(cmdoptions.src())\n self.cmd_opts.add_option(cmdoptions.pre())\n self.cmd_opts.add_option(cmdoptions.require_hashes())\n self.cmd_opts.add_option(cmdoptions.progress_bar())\n self.cmd_opts.add_option(cmdoptions.no_build_isolation())\n self.cmd_opts.add_option(cmdoptions.use_pep517())\n self.cmd_opts.add_option(cmdoptions.no_use_pep517())\n self.cmd_opts.add_option(cmdoptions.check_build_deps())\n self.cmd_opts.add_option(cmdoptions.ignore_requires_python())\n\n self.cmd_opts.add_option(\n "-d",\n "--dest",\n "--destination-dir",\n "--destination-directory",\n dest="download_dir",\n metavar="dir",\n default=os.curdir,\n help="Download packages into <dir>.",\n )\n\n cmdoptions.add_target_python_options(self.cmd_opts)\n\n index_opts = cmdoptions.make_option_group(\n cmdoptions.index_group,\n self.parser,\n )\n\n self.parser.insert_option_group(0, index_opts)\n self.parser.insert_option_group(0, self.cmd_opts)\n\n @with_cleanup\n def run(self, options: Values, args: List[str]) -> int:\n options.ignore_installed = True\n # editable doesn't really make sense for `pip download`, but the bowels\n # of the RequirementSet code require that property.\n options.editables = []\n\n cmdoptions.check_dist_restriction(options)\n\n options.download_dir = normalize_path(options.download_dir)\n ensure_dir(options.download_dir)\n\n session = self.get_default_session(options)\n\n target_python = make_target_python(options)\n finder = self._build_package_finder(\n options=options,\n session=session,\n target_python=target_python,\n ignore_requires_python=options.ignore_requires_python,\n )\n\n build_tracker = self.enter_context(get_build_tracker())\n\n directory = TempDirectory(\n delete=not options.no_clean,\n kind="download",\n globally_managed=True,\n )\n\n reqs = self.get_requirements(args, options, finder, session)\n check_legacy_setup_py_options(options, reqs)\n\n preparer = self.make_requirement_preparer(\n temp_build_dir=directory,\n options=options,\n build_tracker=build_tracker,\n session=session,\n finder=finder,\n download_dir=options.download_dir,\n use_user_site=False,\n verbosity=self.verbosity,\n )\n\n resolver = self.make_resolver(\n preparer=preparer,\n finder=finder,\n options=options,\n ignore_requires_python=options.ignore_requires_python,\n use_pep517=options.use_pep517,\n py_version_info=options.python_version,\n )\n\n self.trace_basic_info(finder)\n\n requirement_set = resolver.resolve(reqs, check_supported_wheels=True)\n\n downloaded: List[str] = []\n for req in requirement_set.requirements.values():\n if req.satisfied_by is None:\n assert req.name is not None\n preparer.save_linked_requirement(req)\n downloaded.append(req.name)\n\n preparer.prepare_linked_requirements_more(requirement_set.requirements.values())\n\n if downloaded:\n write_output("Successfully downloaded %s", " ".join(downloaded))\n\n return SUCCESS\n
.venv\Lib\site-packages\pip\_internal\commands\download.py
download.py
Python
5,273
0.95
0.047945
0.016949
node-utils
8
2024-11-23T17:00:03.544786
GPL-3.0
false
dc2d239d493860f2365cb59ffbceee67
import sys\nfrom optparse import Values\nfrom typing import AbstractSet, List\n\nfrom pip._internal.cli import cmdoptions\nfrom pip._internal.cli.base_command import Command\nfrom pip._internal.cli.status_codes import SUCCESS\nfrom pip._internal.operations.freeze import freeze\nfrom pip._internal.utils.compat import stdlib_pkgs\n\n\ndef _should_suppress_build_backends() -> bool:\n return sys.version_info < (3, 12)\n\n\ndef _dev_pkgs() -> AbstractSet[str]:\n pkgs = {"pip"}\n\n if _should_suppress_build_backends():\n pkgs |= {"setuptools", "distribute", "wheel"}\n\n return pkgs\n\n\nclass FreezeCommand(Command):\n """\n Output installed packages in requirements format.\n\n packages are listed in a case-insensitive sorted order.\n """\n\n ignore_require_venv = True\n usage = """\n %prog [options]"""\n\n def add_options(self) -> None:\n self.cmd_opts.add_option(\n "-r",\n "--requirement",\n dest="requirements",\n action="append",\n default=[],\n metavar="file",\n help=(\n "Use the order in the given requirements file and its "\n "comments when generating output. This option can be "\n "used multiple times."\n ),\n )\n self.cmd_opts.add_option(\n "-l",\n "--local",\n dest="local",\n action="store_true",\n default=False,\n help=(\n "If in a virtualenv that has global access, do not output "\n "globally-installed packages."\n ),\n )\n self.cmd_opts.add_option(\n "--user",\n dest="user",\n action="store_true",\n default=False,\n help="Only output packages installed in user-site.",\n )\n self.cmd_opts.add_option(cmdoptions.list_path())\n self.cmd_opts.add_option(\n "--all",\n dest="freeze_all",\n action="store_true",\n help=(\n "Do not skip these packages in the output:"\n " {}".format(", ".join(_dev_pkgs()))\n ),\n )\n self.cmd_opts.add_option(\n "--exclude-editable",\n dest="exclude_editable",\n action="store_true",\n help="Exclude editable package from output.",\n )\n self.cmd_opts.add_option(cmdoptions.list_exclude())\n\n self.parser.insert_option_group(0, self.cmd_opts)\n\n def run(self, options: Values, args: List[str]) -> int:\n skip = set(stdlib_pkgs)\n if not options.freeze_all:\n skip.update(_dev_pkgs())\n\n if options.excludes:\n skip.update(options.excludes)\n\n cmdoptions.check_list_path_option(options)\n\n for line in freeze(\n requirement=options.requirements,\n local_only=options.local,\n user_only=options.user,\n paths=options.path,\n isolated=options.isolated_mode,\n skip=skip,\n exclude_editable=options.exclude_editable,\n ):\n sys.stdout.write(line + "\n")\n return SUCCESS\n
.venv\Lib\site-packages\pip\_internal\commands\freeze.py
freeze.py
Python
3,144
0.85
0.083333
0
python-kit
915
2023-08-24T21:08:36.497557
GPL-3.0
false
f99ccc3962a67a6563d1f2a4da9da188
import hashlib\nimport logging\nimport sys\nfrom optparse import Values\nfrom typing import List\n\nfrom pip._internal.cli.base_command import Command\nfrom pip._internal.cli.status_codes import ERROR, SUCCESS\nfrom pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES\nfrom pip._internal.utils.misc import read_chunks, write_output\n\nlogger = logging.getLogger(__name__)\n\n\nclass HashCommand(Command):\n """\n Compute a hash of a local package archive.\n\n These can be used with --hash in a requirements file to do repeatable\n installs.\n """\n\n usage = "%prog [options] <file> ..."\n ignore_require_venv = True\n\n def add_options(self) -> None:\n self.cmd_opts.add_option(\n "-a",\n "--algorithm",\n dest="algorithm",\n choices=STRONG_HASHES,\n action="store",\n default=FAVORITE_HASH,\n help="The hash algorithm to use: one of {}".format(\n ", ".join(STRONG_HASHES)\n ),\n )\n self.parser.insert_option_group(0, self.cmd_opts)\n\n def run(self, options: Values, args: List[str]) -> int:\n if not args:\n self.parser.print_usage(sys.stderr)\n return ERROR\n\n algorithm = options.algorithm\n for path in args:\n write_output(\n "%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm)\n )\n return SUCCESS\n\n\ndef _hash_of_file(path: str, algorithm: str) -> str:\n """Return the hash digest of a file."""\n with open(path, "rb") as archive:\n hash = hashlib.new(algorithm)\n for chunk in read_chunks(archive):\n hash.update(chunk)\n return hash.hexdigest()\n
.venv\Lib\site-packages\pip\_internal\commands\hash.py
hash.py
Python
1,703
0.85
0.118644
0
awesome-app
896
2025-03-10T03:42:09.818272
MIT
false
0c3c6e30957a74e73c693e1069492566
from optparse import Values\nfrom typing import List\n\nfrom pip._internal.cli.base_command import Command\nfrom pip._internal.cli.status_codes import SUCCESS\nfrom pip._internal.exceptions import CommandError\n\n\nclass HelpCommand(Command):\n """Show help for commands"""\n\n usage = """\n %prog <command>"""\n ignore_require_venv = True\n\n def run(self, options: Values, args: List[str]) -> int:\n from pip._internal.commands import (\n commands_dict,\n create_command,\n get_similar_commands,\n )\n\n try:\n # 'pip help' with no args is handled by pip.__init__.parseopt()\n cmd_name = args[0] # the command we need help for\n except IndexError:\n return SUCCESS\n\n if cmd_name not in commands_dict:\n guess = get_similar_commands(cmd_name)\n\n msg = [f'unknown command "{cmd_name}"']\n if guess:\n msg.append(f'maybe you meant "{guess}"')\n\n raise CommandError(" - ".join(msg))\n\n command = create_command(cmd_name)\n command.parser.print_help()\n\n return SUCCESS\n
.venv\Lib\site-packages\pip\_internal\commands\help.py
help.py
Python
1,132
0.95
0.170732
0.033333
awesome-app
105
2024-08-01T16:38:12.767328
BSD-3-Clause
false
c2be5ef0ef3bd2f4791cf800e12e25a6
import json\nimport logging\nfrom optparse import Values\nfrom typing import Any, Iterable, List, Optional\n\nfrom pip._vendor.packaging.version import Version\n\nfrom pip._internal.cli import cmdoptions\nfrom pip._internal.cli.req_command import IndexGroupCommand\nfrom pip._internal.cli.status_codes import ERROR, SUCCESS\nfrom pip._internal.commands.search import (\n get_installed_distribution,\n print_dist_installation_info,\n)\nfrom pip._internal.exceptions import CommandError, DistributionNotFound, PipError\nfrom pip._internal.index.collector import LinkCollector\nfrom pip._internal.index.package_finder import PackageFinder\nfrom pip._internal.models.selection_prefs import SelectionPreferences\nfrom pip._internal.models.target_python import TargetPython\nfrom pip._internal.network.session import PipSession\nfrom pip._internal.utils.misc import write_output\n\nlogger = logging.getLogger(__name__)\n\n\nclass IndexCommand(IndexGroupCommand):\n """\n Inspect information available from package indexes.\n """\n\n ignore_require_venv = True\n usage = """\n %prog versions <package>\n """\n\n def add_options(self) -> None:\n cmdoptions.add_target_python_options(self.cmd_opts)\n\n self.cmd_opts.add_option(cmdoptions.ignore_requires_python())\n self.cmd_opts.add_option(cmdoptions.pre())\n self.cmd_opts.add_option(cmdoptions.json())\n self.cmd_opts.add_option(cmdoptions.no_binary())\n self.cmd_opts.add_option(cmdoptions.only_binary())\n\n index_opts = cmdoptions.make_option_group(\n cmdoptions.index_group,\n self.parser,\n )\n\n self.parser.insert_option_group(0, index_opts)\n self.parser.insert_option_group(0, self.cmd_opts)\n\n def run(self, options: Values, args: List[str]) -> int:\n handlers = {\n "versions": self.get_available_package_versions,\n }\n\n # Determine action\n if not args or args[0] not in handlers:\n logger.error(\n "Need an action (%s) to perform.",\n ", ".join(sorted(handlers)),\n )\n return ERROR\n\n action = args[0]\n\n # Error handling happens here, not in the action-handlers.\n try:\n handlers[action](options, args[1:])\n except PipError as e:\n logger.error(e.args[0])\n return ERROR\n\n return SUCCESS\n\n def _build_package_finder(\n self,\n options: Values,\n session: PipSession,\n target_python: Optional[TargetPython] = None,\n ignore_requires_python: Optional[bool] = None,\n ) -> PackageFinder:\n """\n Create a package finder appropriate to the index command.\n """\n link_collector = LinkCollector.create(session, options=options)\n\n # Pass allow_yanked=False to ignore yanked versions.\n selection_prefs = SelectionPreferences(\n allow_yanked=False,\n allow_all_prereleases=options.pre,\n ignore_requires_python=ignore_requires_python,\n )\n\n return PackageFinder.create(\n link_collector=link_collector,\n selection_prefs=selection_prefs,\n target_python=target_python,\n )\n\n def get_available_package_versions(self, options: Values, args: List[Any]) -> None:\n if len(args) != 1:\n raise CommandError("You need to specify exactly one argument")\n\n target_python = cmdoptions.make_target_python(options)\n query = args[0]\n\n with self._build_session(options) as session:\n finder = self._build_package_finder(\n options=options,\n session=session,\n target_python=target_python,\n ignore_requires_python=options.ignore_requires_python,\n )\n\n versions: Iterable[Version] = (\n candidate.version for candidate in finder.find_all_candidates(query)\n )\n\n if not options.pre:\n # Remove prereleases\n versions = (\n version for version in versions if not version.is_prerelease\n )\n versions = set(versions)\n\n if not versions:\n raise DistributionNotFound(\n f"No matching distribution found for {query}"\n )\n\n formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)]\n latest = formatted_versions[0]\n\n dist = get_installed_distribution(query)\n\n if options.json:\n structured_output = {\n "name": query,\n "versions": formatted_versions,\n "latest": latest,\n }\n\n if dist is not None:\n structured_output["installed_version"] = str(dist.version)\n\n write_output(json.dumps(structured_output))\n\n else:\n write_output(f"{query} ({latest})")\n write_output("Available versions: {}".format(", ".join(formatted_versions)))\n print_dist_installation_info(latest, dist)\n
.venv\Lib\site-packages\pip\_internal\commands\index.py
index.py
Python
5,068
0.95
0.111111
0.03252
node-utils
214
2025-01-08T16:12:48.842253
MIT
false
5e960e52f792008dc4ef725fe56686e4
import logging\nfrom optparse import Values\nfrom typing import Any, Dict, List\n\nfrom pip._vendor.packaging.markers import default_environment\nfrom pip._vendor.rich import print_json\n\nfrom pip import __version__\nfrom pip._internal.cli import cmdoptions\nfrom pip._internal.cli.base_command import Command\nfrom pip._internal.cli.status_codes import SUCCESS\nfrom pip._internal.metadata import BaseDistribution, get_environment\nfrom pip._internal.utils.compat import stdlib_pkgs\nfrom pip._internal.utils.urls import path_to_url\n\nlogger = logging.getLogger(__name__)\n\n\nclass InspectCommand(Command):\n """\n Inspect the content of a Python environment and produce a report in JSON format.\n """\n\n ignore_require_venv = True\n usage = """\n %prog [options]"""\n\n def add_options(self) -> None:\n self.cmd_opts.add_option(\n "--local",\n action="store_true",\n default=False,\n help=(\n "If in a virtualenv that has global access, do not list "\n "globally-installed packages."\n ),\n )\n self.cmd_opts.add_option(\n "--user",\n dest="user",\n action="store_true",\n default=False,\n help="Only output packages installed in user-site.",\n )\n self.cmd_opts.add_option(cmdoptions.list_path())\n self.parser.insert_option_group(0, self.cmd_opts)\n\n def run(self, options: Values, args: List[str]) -> int:\n cmdoptions.check_list_path_option(options)\n dists = get_environment(options.path).iter_installed_distributions(\n local_only=options.local,\n user_only=options.user,\n skip=set(stdlib_pkgs),\n )\n output = {\n "version": "1",\n "pip_version": __version__,\n "installed": [self._dist_to_dict(dist) for dist in dists],\n "environment": default_environment(),\n # TODO tags? scheme?\n }\n print_json(data=output)\n return SUCCESS\n\n def _dist_to_dict(self, dist: BaseDistribution) -> Dict[str, Any]:\n res: Dict[str, Any] = {\n "metadata": dist.metadata_dict,\n "metadata_location": dist.info_location,\n }\n # direct_url. Note that we don't have download_info (as in the installation\n # report) since it is not recorded in installed metadata.\n direct_url = dist.direct_url\n if direct_url is not None:\n res["direct_url"] = direct_url.to_dict()\n else:\n # Emulate direct_url for legacy editable installs.\n editable_project_location = dist.editable_project_location\n if editable_project_location is not None:\n res["direct_url"] = {\n "url": path_to_url(editable_project_location),\n "dir_info": {\n "editable": True,\n },\n }\n # installer\n installer = dist.installer\n if dist.installer:\n res["installer"] = installer\n # requested\n if dist.installed_with_dist_info:\n res["requested"] = dist.requested\n return res\n
.venv\Lib\site-packages\pip\_internal\commands\inspect.py
inspect.py
Python
3,189
0.95
0.108696
0.072289
node-utils
736
2023-11-03T03:28:20.676142
GPL-3.0
false
b67760babceb3bc5def685c131a89217
import errno\nimport json\nimport operator\nimport os\nimport shutil\nimport site\nfrom optparse import SUPPRESS_HELP, Values\nfrom typing import List, Optional\n\nfrom pip._vendor.packaging.utils import canonicalize_name\nfrom pip._vendor.requests.exceptions import InvalidProxyURL\nfrom pip._vendor.rich import print_json\n\n# Eagerly import self_outdated_check to avoid crashes. Otherwise,\n# this module would be imported *after* pip was replaced, resulting\n# in crashes if the new self_outdated_check module was incompatible\n# with the rest of pip that's already imported, or allowing a\n# wheel to execute arbitrary code on install by replacing\n# self_outdated_check.\nimport pip._internal.self_outdated_check # noqa: F401\nfrom pip._internal.cache import WheelCache\nfrom pip._internal.cli import cmdoptions\nfrom pip._internal.cli.cmdoptions import make_target_python\nfrom pip._internal.cli.req_command import (\n RequirementCommand,\n with_cleanup,\n)\nfrom pip._internal.cli.status_codes import ERROR, SUCCESS\nfrom pip._internal.exceptions import CommandError, InstallationError\nfrom pip._internal.locations import get_scheme\nfrom pip._internal.metadata import get_environment\nfrom pip._internal.models.installation_report import InstallationReport\nfrom pip._internal.operations.build.build_tracker import get_build_tracker\nfrom pip._internal.operations.check import ConflictDetails, check_install_conflicts\nfrom pip._internal.req import install_given_reqs\nfrom pip._internal.req.req_install import (\n InstallRequirement,\n check_legacy_setup_py_options,\n)\nfrom pip._internal.utils.compat import WINDOWS\nfrom pip._internal.utils.filesystem import test_writable_dir\nfrom pip._internal.utils.logging import getLogger\nfrom pip._internal.utils.misc import (\n check_externally_managed,\n ensure_dir,\n get_pip_version,\n protect_pip_from_modification_on_windows,\n warn_if_run_as_root,\n write_output,\n)\nfrom pip._internal.utils.temp_dir import TempDirectory\nfrom pip._internal.utils.virtualenv import (\n running_under_virtualenv,\n virtualenv_no_global,\n)\nfrom pip._internal.wheel_builder import build, should_build_for_install_command\n\nlogger = getLogger(__name__)\n\n\nclass InstallCommand(RequirementCommand):\n """\n Install packages from:\n\n - PyPI (and other indexes) using requirement specifiers.\n - VCS project urls.\n - Local project directories.\n - Local or remote source archives.\n\n pip also supports installing from "requirements files", which provide\n an easy way to specify a whole environment to be installed.\n """\n\n usage = """\n %prog [options] <requirement specifier> [package-index-options] ...\n %prog [options] -r <requirements file> [package-index-options] ...\n %prog [options] [-e] <vcs project url> ...\n %prog [options] [-e] <local project path> ...\n %prog [options] <archive url/path> ..."""\n\n def add_options(self) -> None:\n self.cmd_opts.add_option(cmdoptions.requirements())\n self.cmd_opts.add_option(cmdoptions.constraints())\n self.cmd_opts.add_option(cmdoptions.no_deps())\n self.cmd_opts.add_option(cmdoptions.pre())\n\n self.cmd_opts.add_option(cmdoptions.editable())\n self.cmd_opts.add_option(\n "--dry-run",\n action="store_true",\n dest="dry_run",\n default=False,\n help=(\n "Don't actually install anything, just print what would be. "\n "Can be used in combination with --ignore-installed "\n "to 'resolve' the requirements."\n ),\n )\n self.cmd_opts.add_option(\n "-t",\n "--target",\n dest="target_dir",\n metavar="dir",\n default=None,\n help=(\n "Install packages into <dir>. "\n "By default this will not replace existing files/folders in "\n "<dir>. Use --upgrade to replace existing packages in <dir> "\n "with new versions."\n ),\n )\n cmdoptions.add_target_python_options(self.cmd_opts)\n\n self.cmd_opts.add_option(\n "--user",\n dest="use_user_site",\n action="store_true",\n help=(\n "Install to the Python user install directory for your "\n "platform. Typically ~/.local/, or %APPDATA%\\Python on "\n "Windows. (See the Python documentation for site.USER_BASE "\n "for full details.)"\n ),\n )\n self.cmd_opts.add_option(\n "--no-user",\n dest="use_user_site",\n action="store_false",\n help=SUPPRESS_HELP,\n )\n self.cmd_opts.add_option(\n "--root",\n dest="root_path",\n metavar="dir",\n default=None,\n help="Install everything relative to this alternate root directory.",\n )\n self.cmd_opts.add_option(\n "--prefix",\n dest="prefix_path",\n metavar="dir",\n default=None,\n help=(\n "Installation prefix where lib, bin and other top-level "\n "folders are placed. Note that the resulting installation may "\n "contain scripts and other resources which reference the "\n "Python interpreter of pip, and not that of ``--prefix``. "\n "See also the ``--python`` option if the intention is to "\n "install packages into another (possibly pip-free) "\n "environment."\n ),\n )\n\n self.cmd_opts.add_option(cmdoptions.src())\n\n self.cmd_opts.add_option(\n "-U",\n "--upgrade",\n dest="upgrade",\n action="store_true",\n help=(\n "Upgrade all specified packages to the newest available "\n "version. The handling of dependencies depends on the "\n "upgrade-strategy used."\n ),\n )\n\n self.cmd_opts.add_option(\n "--upgrade-strategy",\n dest="upgrade_strategy",\n default="only-if-needed",\n choices=["only-if-needed", "eager"],\n help=(\n "Determines how dependency upgrading should be handled "\n "[default: %default]. "\n '"eager" - dependencies are upgraded regardless of '\n "whether the currently installed version satisfies the "\n "requirements of the upgraded package(s). "\n '"only-if-needed" - are upgraded only when they do not '\n "satisfy the requirements of the upgraded package(s)."\n ),\n )\n\n self.cmd_opts.add_option(\n "--force-reinstall",\n dest="force_reinstall",\n action="store_true",\n help="Reinstall all packages even if they are already up-to-date.",\n )\n\n self.cmd_opts.add_option(\n "-I",\n "--ignore-installed",\n dest="ignore_installed",\n action="store_true",\n help=(\n "Ignore the installed packages, overwriting them. "\n "This can break your system if the existing package "\n "is of a different version or was installed "\n "with a different package manager!"\n ),\n )\n\n self.cmd_opts.add_option(cmdoptions.ignore_requires_python())\n self.cmd_opts.add_option(cmdoptions.no_build_isolation())\n self.cmd_opts.add_option(cmdoptions.use_pep517())\n self.cmd_opts.add_option(cmdoptions.no_use_pep517())\n self.cmd_opts.add_option(cmdoptions.check_build_deps())\n self.cmd_opts.add_option(cmdoptions.override_externally_managed())\n\n self.cmd_opts.add_option(cmdoptions.config_settings())\n self.cmd_opts.add_option(cmdoptions.global_options())\n\n self.cmd_opts.add_option(\n "--compile",\n action="store_true",\n dest="compile",\n default=True,\n help="Compile Python source files to bytecode",\n )\n\n self.cmd_opts.add_option(\n "--no-compile",\n action="store_false",\n dest="compile",\n help="Do not compile Python source files to bytecode",\n )\n\n self.cmd_opts.add_option(\n "--no-warn-script-location",\n action="store_false",\n dest="warn_script_location",\n default=True,\n help="Do not warn when installing scripts outside PATH",\n )\n self.cmd_opts.add_option(\n "--no-warn-conflicts",\n action="store_false",\n dest="warn_about_conflicts",\n default=True,\n help="Do not warn about broken dependencies",\n )\n self.cmd_opts.add_option(cmdoptions.no_binary())\n self.cmd_opts.add_option(cmdoptions.only_binary())\n self.cmd_opts.add_option(cmdoptions.prefer_binary())\n self.cmd_opts.add_option(cmdoptions.require_hashes())\n self.cmd_opts.add_option(cmdoptions.progress_bar())\n self.cmd_opts.add_option(cmdoptions.root_user_action())\n\n index_opts = cmdoptions.make_option_group(\n cmdoptions.index_group,\n self.parser,\n )\n\n self.parser.insert_option_group(0, index_opts)\n self.parser.insert_option_group(0, self.cmd_opts)\n\n self.cmd_opts.add_option(\n "--report",\n dest="json_report_file",\n metavar="file",\n default=None,\n help=(\n "Generate a JSON file describing what pip did to install "\n "the provided requirements. "\n "Can be used in combination with --dry-run and --ignore-installed "\n "to 'resolve' the requirements. "\n "When - is used as file name it writes to stdout. "\n "When writing to stdout, please combine with the --quiet option "\n "to avoid mixing pip logging output with JSON output."\n ),\n )\n\n @with_cleanup\n def run(self, options: Values, args: List[str]) -> int:\n if options.use_user_site and options.target_dir is not None:\n raise CommandError("Can not combine '--user' and '--target'")\n\n # Check whether the environment we're installing into is externally\n # managed, as specified in PEP 668. Specifying --root, --target, or\n # --prefix disables the check, since there's no reliable way to locate\n # the EXTERNALLY-MANAGED file for those cases. An exception is also\n # made specifically for "--dry-run --report" for convenience.\n installing_into_current_environment = (\n not (options.dry_run and options.json_report_file)\n and options.root_path is None\n and options.target_dir is None\n and options.prefix_path is None\n )\n if (\n installing_into_current_environment\n and not options.override_externally_managed\n ):\n check_externally_managed()\n\n upgrade_strategy = "to-satisfy-only"\n if options.upgrade:\n upgrade_strategy = options.upgrade_strategy\n\n cmdoptions.check_dist_restriction(options, check_target=True)\n\n logger.verbose("Using %s", get_pip_version())\n options.use_user_site = decide_user_install(\n options.use_user_site,\n prefix_path=options.prefix_path,\n target_dir=options.target_dir,\n root_path=options.root_path,\n isolated_mode=options.isolated_mode,\n )\n\n target_temp_dir: Optional[TempDirectory] = None\n target_temp_dir_path: Optional[str] = None\n if options.target_dir:\n options.ignore_installed = True\n options.target_dir = os.path.abspath(options.target_dir)\n if (\n # fmt: off\n os.path.exists(options.target_dir) and\n not os.path.isdir(options.target_dir)\n # fmt: on\n ):\n raise CommandError(\n "Target path exists but is not a directory, will not continue."\n )\n\n # Create a target directory for using with the target option\n target_temp_dir = TempDirectory(kind="target")\n target_temp_dir_path = target_temp_dir.path\n self.enter_context(target_temp_dir)\n\n global_options = options.global_options or []\n\n session = self.get_default_session(options)\n\n target_python = make_target_python(options)\n finder = self._build_package_finder(\n options=options,\n session=session,\n target_python=target_python,\n ignore_requires_python=options.ignore_requires_python,\n )\n build_tracker = self.enter_context(get_build_tracker())\n\n directory = TempDirectory(\n delete=not options.no_clean,\n kind="install",\n globally_managed=True,\n )\n\n try:\n reqs = self.get_requirements(args, options, finder, session)\n check_legacy_setup_py_options(options, reqs)\n\n wheel_cache = WheelCache(options.cache_dir)\n\n # Only when installing is it permitted to use PEP 660.\n # In other circumstances (pip wheel, pip download) we generate\n # regular (i.e. non editable) metadata and wheels.\n for req in reqs:\n req.permit_editable_wheels = True\n\n preparer = self.make_requirement_preparer(\n temp_build_dir=directory,\n options=options,\n build_tracker=build_tracker,\n session=session,\n finder=finder,\n use_user_site=options.use_user_site,\n verbosity=self.verbosity,\n )\n resolver = self.make_resolver(\n preparer=preparer,\n finder=finder,\n options=options,\n wheel_cache=wheel_cache,\n use_user_site=options.use_user_site,\n ignore_installed=options.ignore_installed,\n ignore_requires_python=options.ignore_requires_python,\n force_reinstall=options.force_reinstall,\n upgrade_strategy=upgrade_strategy,\n use_pep517=options.use_pep517,\n py_version_info=options.python_version,\n )\n\n self.trace_basic_info(finder)\n\n requirement_set = resolver.resolve(\n reqs, check_supported_wheels=not options.target_dir\n )\n\n if options.json_report_file:\n report = InstallationReport(requirement_set.requirements_to_install)\n if options.json_report_file == "-":\n print_json(data=report.to_dict())\n else:\n with open(options.json_report_file, "w", encoding="utf-8") as f:\n json.dump(report.to_dict(), f, indent=2, ensure_ascii=False)\n\n if options.dry_run:\n would_install_items = sorted(\n (r.metadata["name"], r.metadata["version"])\n for r in requirement_set.requirements_to_install\n )\n if would_install_items:\n write_output(\n "Would install %s",\n " ".join("-".join(item) for item in would_install_items),\n )\n return SUCCESS\n\n try:\n pip_req = requirement_set.get_requirement("pip")\n except KeyError:\n modifying_pip = False\n else:\n # If we're not replacing an already installed pip,\n # we're not modifying it.\n modifying_pip = pip_req.satisfied_by is None\n protect_pip_from_modification_on_windows(modifying_pip=modifying_pip)\n\n reqs_to_build = [\n r\n for r in requirement_set.requirements_to_install\n if should_build_for_install_command(r)\n ]\n\n _, build_failures = build(\n reqs_to_build,\n wheel_cache=wheel_cache,\n verify=True,\n build_options=[],\n global_options=global_options,\n )\n\n if build_failures:\n raise InstallationError(\n "Failed to build installable wheels for some "\n "pyproject.toml based projects ({})".format(\n ", ".join(r.name for r in build_failures) # type: ignore\n )\n )\n\n to_install = resolver.get_installation_order(requirement_set)\n\n # Check for conflicts in the package set we're installing.\n conflicts: Optional[ConflictDetails] = None\n should_warn_about_conflicts = (\n not options.ignore_dependencies and options.warn_about_conflicts\n )\n if should_warn_about_conflicts:\n conflicts = self._determine_conflicts(to_install)\n\n # Don't warn about script install locations if\n # --target or --prefix has been specified\n warn_script_location = options.warn_script_location\n if options.target_dir or options.prefix_path:\n warn_script_location = False\n\n installed = install_given_reqs(\n to_install,\n global_options,\n root=options.root_path,\n home=target_temp_dir_path,\n prefix=options.prefix_path,\n warn_script_location=warn_script_location,\n use_user_site=options.use_user_site,\n pycompile=options.compile,\n progress_bar=options.progress_bar,\n )\n\n lib_locations = get_lib_location_guesses(\n user=options.use_user_site,\n home=target_temp_dir_path,\n root=options.root_path,\n prefix=options.prefix_path,\n isolated=options.isolated_mode,\n )\n env = get_environment(lib_locations)\n\n # Display a summary of installed packages, with extra care to\n # display a package name as it was requested by the user.\n installed.sort(key=operator.attrgetter("name"))\n summary = []\n installed_versions = {}\n for distribution in env.iter_all_distributions():\n installed_versions[distribution.canonical_name] = distribution.version\n for package in installed:\n display_name = package.name\n version = installed_versions.get(canonicalize_name(display_name), None)\n if version:\n text = f"{display_name}-{version}"\n else:\n text = display_name\n summary.append(text)\n\n if conflicts is not None:\n self._warn_about_conflicts(\n conflicts,\n resolver_variant=self.determine_resolver_variant(options),\n )\n\n installed_desc = " ".join(summary)\n if installed_desc:\n write_output(\n "Successfully installed %s",\n installed_desc,\n )\n except OSError as error:\n show_traceback = self.verbosity >= 1\n\n message = create_os_error_message(\n error,\n show_traceback,\n options.use_user_site,\n )\n logger.error(message, exc_info=show_traceback)\n\n return ERROR\n\n if options.target_dir:\n assert target_temp_dir\n self._handle_target_dir(\n options.target_dir, target_temp_dir, options.upgrade\n )\n if options.root_user_action == "warn":\n warn_if_run_as_root()\n return SUCCESS\n\n def _handle_target_dir(\n self, target_dir: str, target_temp_dir: TempDirectory, upgrade: bool\n ) -> None:\n ensure_dir(target_dir)\n\n # Checking both purelib and platlib directories for installed\n # packages to be moved to target directory\n lib_dir_list = []\n\n # Checking both purelib and platlib directories for installed\n # packages to be moved to target directory\n scheme = get_scheme("", home=target_temp_dir.path)\n purelib_dir = scheme.purelib\n platlib_dir = scheme.platlib\n data_dir = scheme.data\n\n if os.path.exists(purelib_dir):\n lib_dir_list.append(purelib_dir)\n if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:\n lib_dir_list.append(platlib_dir)\n if os.path.exists(data_dir):\n lib_dir_list.append(data_dir)\n\n for lib_dir in lib_dir_list:\n for item in os.listdir(lib_dir):\n if lib_dir == data_dir:\n ddir = os.path.join(data_dir, item)\n if any(s.startswith(ddir) for s in lib_dir_list[:-1]):\n continue\n target_item_dir = os.path.join(target_dir, item)\n if os.path.exists(target_item_dir):\n if not upgrade:\n logger.warning(\n "Target directory %s already exists. Specify "\n "--upgrade to force replacement.",\n target_item_dir,\n )\n continue\n if os.path.islink(target_item_dir):\n logger.warning(\n "Target directory %s already exists and is "\n "a link. pip will not automatically replace "\n "links, please remove if replacement is "\n "desired.",\n target_item_dir,\n )\n continue\n if os.path.isdir(target_item_dir):\n shutil.rmtree(target_item_dir)\n else:\n os.remove(target_item_dir)\n\n shutil.move(os.path.join(lib_dir, item), target_item_dir)\n\n def _determine_conflicts(\n self, to_install: List[InstallRequirement]\n ) -> Optional[ConflictDetails]:\n try:\n return check_install_conflicts(to_install)\n except Exception:\n logger.exception(\n "Error while checking for conflicts. Please file an issue on "\n "pip's issue tracker: https://github.com/pypa/pip/issues/new"\n )\n return None\n\n def _warn_about_conflicts(\n self, conflict_details: ConflictDetails, resolver_variant: str\n ) -> None:\n package_set, (missing, conflicting) = conflict_details\n if not missing and not conflicting:\n return\n\n parts: List[str] = []\n if resolver_variant == "legacy":\n parts.append(\n "pip's legacy dependency resolver does not consider dependency "\n "conflicts when selecting packages. This behaviour is the "\n "source of the following dependency conflicts."\n )\n else:\n assert resolver_variant == "resolvelib"\n parts.append(\n "pip's dependency resolver does not currently take into account "\n "all the packages that are installed. This behaviour is the "\n "source of the following dependency conflicts."\n )\n\n # NOTE: There is some duplication here, with commands/check.py\n for project_name in missing:\n version = package_set[project_name][0]\n for dependency in missing[project_name]:\n message = (\n f"{project_name} {version} requires {dependency[1]}, "\n "which is not installed."\n )\n parts.append(message)\n\n for project_name in conflicting:\n version = package_set[project_name][0]\n for dep_name, dep_version, req in conflicting[project_name]:\n message = (\n "{name} {version} requires {requirement}, but {you} have "\n "{dep_name} {dep_version} which is incompatible."\n ).format(\n name=project_name,\n version=version,\n requirement=req,\n dep_name=dep_name,\n dep_version=dep_version,\n you=("you" if resolver_variant == "resolvelib" else "you'll"),\n )\n parts.append(message)\n\n logger.critical("\n".join(parts))\n\n\ndef get_lib_location_guesses(\n user: bool = False,\n home: Optional[str] = None,\n root: Optional[str] = None,\n isolated: bool = False,\n prefix: Optional[str] = None,\n) -> List[str]:\n scheme = get_scheme(\n "",\n user=user,\n home=home,\n root=root,\n isolated=isolated,\n prefix=prefix,\n )\n return [scheme.purelib, scheme.platlib]\n\n\ndef site_packages_writable(root: Optional[str], isolated: bool) -> bool:\n return all(\n test_writable_dir(d)\n for d in set(get_lib_location_guesses(root=root, isolated=isolated))\n )\n\n\ndef decide_user_install(\n use_user_site: Optional[bool],\n prefix_path: Optional[str] = None,\n target_dir: Optional[str] = None,\n root_path: Optional[str] = None,\n isolated_mode: bool = False,\n) -> bool:\n """Determine whether to do a user install based on the input options.\n\n If use_user_site is False, no additional checks are done.\n If use_user_site is True, it is checked for compatibility with other\n options.\n If use_user_site is None, the default behaviour depends on the environment,\n which is provided by the other arguments.\n """\n # In some cases (config from tox), use_user_site can be set to an integer\n # rather than a bool, which 'use_user_site is False' wouldn't catch.\n if (use_user_site is not None) and (not use_user_site):\n logger.debug("Non-user install by explicit request")\n return False\n\n if use_user_site:\n if prefix_path:\n raise CommandError(\n "Can not combine '--user' and '--prefix' as they imply "\n "different installation locations"\n )\n if virtualenv_no_global():\n raise InstallationError(\n "Can not perform a '--user' install. User site-packages "\n "are not visible in this virtualenv."\n )\n logger.debug("User install by explicit request")\n return True\n\n # If we are here, user installs have not been explicitly requested/avoided\n assert use_user_site is None\n\n # user install incompatible with --prefix/--target\n if prefix_path or target_dir:\n logger.debug("Non-user install due to --prefix or --target option")\n return False\n\n # If user installs are not enabled, choose a non-user install\n if not site.ENABLE_USER_SITE:\n logger.debug("Non-user install because user site-packages disabled")\n return False\n\n # If we have permission for a non-user install, do that,\n # otherwise do a user install.\n if site_packages_writable(root=root_path, isolated=isolated_mode):\n logger.debug("Non-user install because site-packages writeable")\n return False\n\n logger.info(\n "Defaulting to user installation because normal site-packages "\n "is not writeable"\n )\n return True\n\n\ndef create_os_error_message(\n error: OSError, show_traceback: bool, using_user_site: bool\n) -> str:\n """Format an error message for an OSError\n\n It may occur anytime during the execution of the install command.\n """\n parts = []\n\n # Mention the error if we are not going to show a traceback\n parts.append("Could not install packages due to an OSError")\n if not show_traceback:\n parts.append(": ")\n parts.append(str(error))\n else:\n parts.append(".")\n\n # Spilt the error indication from a helper message (if any)\n parts[-1] += "\n"\n\n # Suggest useful actions to the user:\n # (1) using user site-packages or (2) verifying the permissions\n if error.errno == errno.EACCES:\n user_option_part = "Consider using the `--user` option"\n permissions_part = "Check the permissions"\n\n if not running_under_virtualenv() and not using_user_site:\n parts.extend(\n [\n user_option_part,\n " or ",\n permissions_part.lower(),\n ]\n )\n else:\n parts.append(permissions_part)\n parts.append(".\n")\n\n # Suggest to check "pip config debug" in case of invalid proxy\n if type(error) is InvalidProxyURL:\n parts.append(\n 'Consider checking your local proxy configuration with "pip config debug"'\n )\n parts.append(".\n")\n\n # Suggest the user to enable Long Paths if path length is\n # more than 260\n if (\n WINDOWS\n and error.errno == errno.ENOENT\n and error.filename\n and len(error.filename) > 260\n ):\n parts.append(\n "HINT: This error might have occurred since "\n "this system does not have Windows Long Path "\n "support enabled. You can find information on "\n "how to enable this at "\n "https://pip.pypa.io/warnings/enable-long-paths\n"\n )\n\n return "".join(parts).strip() + "\n"\n
.venv\Lib\site-packages\pip\_internal\commands\install.py
install.py
Python
29,757
0.95
0.124842
0.061429
react-lib
563
2023-08-11T22:38:57.787891
BSD-3-Clause
false
eb098f4a4d5a238ff1cd409adf6783ff
import json\nimport logging\nfrom email.parser import Parser\nfrom optparse import Values\nfrom typing import TYPE_CHECKING, Generator, List, Optional, Sequence, Tuple, cast\n\nfrom pip._vendor.packaging.utils import canonicalize_name\nfrom pip._vendor.packaging.version import Version\n\nfrom pip._internal.cli import cmdoptions\nfrom pip._internal.cli.index_command import IndexGroupCommand\nfrom pip._internal.cli.status_codes import SUCCESS\nfrom pip._internal.exceptions import CommandError\nfrom pip._internal.metadata import BaseDistribution, get_environment\nfrom pip._internal.models.selection_prefs import SelectionPreferences\nfrom pip._internal.utils.compat import stdlib_pkgs\nfrom pip._internal.utils.misc import tabulate, write_output\n\nif TYPE_CHECKING:\n from pip._internal.index.package_finder import PackageFinder\n from pip._internal.network.session import PipSession\n\n class _DistWithLatestInfo(BaseDistribution):\n """Give the distribution object a couple of extra fields.\n\n These will be populated during ``get_outdated()``. This is dirty but\n makes the rest of the code much cleaner.\n """\n\n latest_version: Version\n latest_filetype: str\n\n _ProcessedDists = Sequence[_DistWithLatestInfo]\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass ListCommand(IndexGroupCommand):\n """\n List installed packages, including editables.\n\n Packages are listed in a case-insensitive sorted order.\n """\n\n ignore_require_venv = True\n usage = """\n %prog [options]"""\n\n def add_options(self) -> None:\n self.cmd_opts.add_option(\n "-o",\n "--outdated",\n action="store_true",\n default=False,\n help="List outdated packages",\n )\n self.cmd_opts.add_option(\n "-u",\n "--uptodate",\n action="store_true",\n default=False,\n help="List uptodate packages",\n )\n self.cmd_opts.add_option(\n "-e",\n "--editable",\n action="store_true",\n default=False,\n help="List editable projects.",\n )\n self.cmd_opts.add_option(\n "-l",\n "--local",\n action="store_true",\n default=False,\n help=(\n "If in a virtualenv that has global access, do not list "\n "globally-installed packages."\n ),\n )\n self.cmd_opts.add_option(\n "--user",\n dest="user",\n action="store_true",\n default=False,\n help="Only output packages installed in user-site.",\n )\n self.cmd_opts.add_option(cmdoptions.list_path())\n self.cmd_opts.add_option(\n "--pre",\n action="store_true",\n default=False,\n help=(\n "Include pre-release and development versions. By default, "\n "pip only finds stable versions."\n ),\n )\n\n self.cmd_opts.add_option(\n "--format",\n action="store",\n dest="list_format",\n default="columns",\n choices=("columns", "freeze", "json"),\n help=(\n "Select the output format among: columns (default), freeze, or json. "\n "The 'freeze' format cannot be used with the --outdated option."\n ),\n )\n\n self.cmd_opts.add_option(\n "--not-required",\n action="store_true",\n dest="not_required",\n help="List packages that are not dependencies of installed packages.",\n )\n\n self.cmd_opts.add_option(\n "--exclude-editable",\n action="store_false",\n dest="include_editable",\n help="Exclude editable package from output.",\n )\n self.cmd_opts.add_option(\n "--include-editable",\n action="store_true",\n dest="include_editable",\n help="Include editable package in output.",\n default=True,\n )\n self.cmd_opts.add_option(cmdoptions.list_exclude())\n index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser)\n\n self.parser.insert_option_group(0, index_opts)\n self.parser.insert_option_group(0, self.cmd_opts)\n\n def handle_pip_version_check(self, options: Values) -> None:\n if options.outdated or options.uptodate:\n super().handle_pip_version_check(options)\n\n def _build_package_finder(\n self, options: Values, session: "PipSession"\n ) -> "PackageFinder":\n """\n Create a package finder appropriate to this list command.\n """\n # Lazy import the heavy index modules as most list invocations won't need 'em.\n from pip._internal.index.collector import LinkCollector\n from pip._internal.index.package_finder import PackageFinder\n\n link_collector = LinkCollector.create(session, options=options)\n\n # Pass allow_yanked=False to ignore yanked versions.\n selection_prefs = SelectionPreferences(\n allow_yanked=False,\n allow_all_prereleases=options.pre,\n )\n\n return PackageFinder.create(\n link_collector=link_collector,\n selection_prefs=selection_prefs,\n )\n\n def run(self, options: Values, args: List[str]) -> int:\n if options.outdated and options.uptodate:\n raise CommandError("Options --outdated and --uptodate cannot be combined.")\n\n if options.outdated and options.list_format == "freeze":\n raise CommandError(\n "List format 'freeze' cannot be used with the --outdated option."\n )\n\n cmdoptions.check_list_path_option(options)\n\n skip = set(stdlib_pkgs)\n if options.excludes:\n skip.update(canonicalize_name(n) for n in options.excludes)\n\n packages: _ProcessedDists = [\n cast("_DistWithLatestInfo", d)\n for d in get_environment(options.path).iter_installed_distributions(\n local_only=options.local,\n user_only=options.user,\n editables_only=options.editable,\n include_editables=options.include_editable,\n skip=skip,\n )\n ]\n\n # get_not_required must be called firstly in order to find and\n # filter out all dependencies correctly. Otherwise a package\n # can't be identified as requirement because some parent packages\n # could be filtered out before.\n if options.not_required:\n packages = self.get_not_required(packages, options)\n\n if options.outdated:\n packages = self.get_outdated(packages, options)\n elif options.uptodate:\n packages = self.get_uptodate(packages, options)\n\n self.output_package_listing(packages, options)\n return SUCCESS\n\n def get_outdated(\n self, packages: "_ProcessedDists", options: Values\n ) -> "_ProcessedDists":\n return [\n dist\n for dist in self.iter_packages_latest_infos(packages, options)\n if dist.latest_version > dist.version\n ]\n\n def get_uptodate(\n self, packages: "_ProcessedDists", options: Values\n ) -> "_ProcessedDists":\n return [\n dist\n for dist in self.iter_packages_latest_infos(packages, options)\n if dist.latest_version == dist.version\n ]\n\n def get_not_required(\n self, packages: "_ProcessedDists", options: Values\n ) -> "_ProcessedDists":\n dep_keys = {\n canonicalize_name(dep.name)\n for dist in packages\n for dep in (dist.iter_dependencies() or ())\n }\n\n # Create a set to remove duplicate packages, and cast it to a list\n # to keep the return type consistent with get_outdated and\n # get_uptodate\n return list({pkg for pkg in packages if pkg.canonical_name not in dep_keys})\n\n def iter_packages_latest_infos(\n self, packages: "_ProcessedDists", options: Values\n ) -> Generator["_DistWithLatestInfo", None, None]:\n with self._build_session(options) as session:\n finder = self._build_package_finder(options, session)\n\n def latest_info(\n dist: "_DistWithLatestInfo",\n ) -> Optional["_DistWithLatestInfo"]:\n all_candidates = finder.find_all_candidates(dist.canonical_name)\n if not options.pre:\n # Remove prereleases\n all_candidates = [\n candidate\n for candidate in all_candidates\n if not candidate.version.is_prerelease\n ]\n\n evaluator = finder.make_candidate_evaluator(\n project_name=dist.canonical_name,\n )\n best_candidate = evaluator.sort_best_candidate(all_candidates)\n if best_candidate is None:\n return None\n\n remote_version = best_candidate.version\n if best_candidate.link.is_wheel:\n typ = "wheel"\n else:\n typ = "sdist"\n dist.latest_version = remote_version\n dist.latest_filetype = typ\n return dist\n\n for dist in map(latest_info, packages):\n if dist is not None:\n yield dist\n\n def output_package_listing(\n self, packages: "_ProcessedDists", options: Values\n ) -> None:\n packages = sorted(\n packages,\n key=lambda dist: dist.canonical_name,\n )\n if options.list_format == "columns" and packages:\n data, header = format_for_columns(packages, options)\n self.output_package_listing_columns(data, header)\n elif options.list_format == "freeze":\n for dist in packages:\n if options.verbose >= 1:\n write_output(\n "%s==%s (%s)", dist.raw_name, dist.version, dist.location\n )\n else:\n write_output("%s==%s", dist.raw_name, dist.version)\n elif options.list_format == "json":\n write_output(format_for_json(packages, options))\n\n def output_package_listing_columns(\n self, data: List[List[str]], header: List[str]\n ) -> None:\n # insert the header first: we need to know the size of column names\n if len(data) > 0:\n data.insert(0, header)\n\n pkg_strings, sizes = tabulate(data)\n\n # Create and add a separator.\n if len(data) > 0:\n pkg_strings.insert(1, " ".join("-" * x for x in sizes))\n\n for val in pkg_strings:\n write_output(val)\n\n\ndef format_for_columns(\n pkgs: "_ProcessedDists", options: Values\n) -> Tuple[List[List[str]], List[str]]:\n """\n Convert the package data into something usable\n by output_package_listing_columns.\n """\n header = ["Package", "Version"]\n\n running_outdated = options.outdated\n if running_outdated:\n header.extend(["Latest", "Type"])\n\n def wheel_build_tag(dist: BaseDistribution) -> Optional[str]:\n try:\n wheel_file = dist.read_text("WHEEL")\n except FileNotFoundError:\n return None\n return Parser().parsestr(wheel_file).get("Build")\n\n build_tags = [wheel_build_tag(p) for p in pkgs]\n has_build_tags = any(build_tags)\n if has_build_tags:\n header.append("Build")\n\n if options.verbose >= 1:\n header.append("Location")\n if options.verbose >= 1:\n header.append("Installer")\n\n has_editables = any(x.editable for x in pkgs)\n if has_editables:\n header.append("Editable project location")\n\n data = []\n for i, proj in enumerate(pkgs):\n # if we're working on the 'outdated' list, separate out the\n # latest_version and type\n row = [proj.raw_name, proj.raw_version]\n\n if running_outdated:\n row.append(str(proj.latest_version))\n row.append(proj.latest_filetype)\n\n if has_build_tags:\n row.append(build_tags[i] or "")\n\n if has_editables:\n row.append(proj.editable_project_location or "")\n\n if options.verbose >= 1:\n row.append(proj.location or "")\n if options.verbose >= 1:\n row.append(proj.installer)\n\n data.append(row)\n\n return data, header\n\n\ndef format_for_json(packages: "_ProcessedDists", options: Values) -> str:\n data = []\n for dist in packages:\n info = {\n "name": dist.raw_name,\n "version": str(dist.version),\n }\n if options.verbose >= 1:\n info["location"] = dist.location or ""\n info["installer"] = dist.installer\n if options.outdated:\n info["latest_version"] = str(dist.latest_version)\n info["latest_filetype"] = dist.latest_filetype\n editable_project_location = dist.editable_project_location\n if editable_project_location:\n info["editable_project_location"] = editable_project_location\n data.append(info)\n return json.dumps(data)\n
.venv\Lib\site-packages\pip\_internal\commands\list.py
list.py
Python
13,274
0.95
0.168798
0.042424
react-lib
891
2024-06-30T03:37:47.145341
Apache-2.0
false
6002c1692b892859062c54e1b61aa5c2
import sys\nfrom optparse import Values\nfrom pathlib import Path\nfrom typing import List\n\nfrom pip._internal.cache import WheelCache\nfrom pip._internal.cli import cmdoptions\nfrom pip._internal.cli.req_command import (\n RequirementCommand,\n with_cleanup,\n)\nfrom pip._internal.cli.status_codes import SUCCESS\nfrom pip._internal.models.pylock import Pylock, is_valid_pylock_file_name\nfrom pip._internal.operations.build.build_tracker import get_build_tracker\nfrom pip._internal.req.req_install import (\n check_legacy_setup_py_options,\n)\nfrom pip._internal.utils.logging import getLogger\nfrom pip._internal.utils.misc import (\n get_pip_version,\n)\nfrom pip._internal.utils.temp_dir import TempDirectory\n\nlogger = getLogger(__name__)\n\n\nclass LockCommand(RequirementCommand):\n """\n EXPERIMENTAL - Lock packages and their dependencies from:\n\n - PyPI (and other indexes) using requirement specifiers.\n - VCS project urls.\n - Local project directories.\n - Local or remote source archives.\n\n pip also supports locking from "requirements files", which provide an easy\n way to specify a whole environment to be installed.\n\n The generated lock file is only guaranteed to be valid for the current\n python version and platform.\n """\n\n usage = """\n %prog [options] [-e] <local project path> ...\n %prog [options] <requirement specifier> [package-index-options] ...\n %prog [options] -r <requirements file> [package-index-options] ...\n %prog [options] <archive url/path> ..."""\n\n def add_options(self) -> None:\n self.cmd_opts.add_option(\n cmdoptions.PipOption(\n "--output",\n "-o",\n dest="output_file",\n metavar="path",\n type="path",\n default="pylock.toml",\n help="Lock file name (default=pylock.toml). Use - for stdout.",\n )\n )\n self.cmd_opts.add_option(cmdoptions.requirements())\n self.cmd_opts.add_option(cmdoptions.constraints())\n self.cmd_opts.add_option(cmdoptions.no_deps())\n self.cmd_opts.add_option(cmdoptions.pre())\n\n self.cmd_opts.add_option(cmdoptions.editable())\n\n self.cmd_opts.add_option(cmdoptions.src())\n\n self.cmd_opts.add_option(cmdoptions.ignore_requires_python())\n self.cmd_opts.add_option(cmdoptions.no_build_isolation())\n self.cmd_opts.add_option(cmdoptions.use_pep517())\n self.cmd_opts.add_option(cmdoptions.no_use_pep517())\n self.cmd_opts.add_option(cmdoptions.check_build_deps())\n\n self.cmd_opts.add_option(cmdoptions.config_settings())\n\n self.cmd_opts.add_option(cmdoptions.no_binary())\n self.cmd_opts.add_option(cmdoptions.only_binary())\n self.cmd_opts.add_option(cmdoptions.prefer_binary())\n self.cmd_opts.add_option(cmdoptions.require_hashes())\n self.cmd_opts.add_option(cmdoptions.progress_bar())\n\n index_opts = cmdoptions.make_option_group(\n cmdoptions.index_group,\n self.parser,\n )\n\n self.parser.insert_option_group(0, index_opts)\n self.parser.insert_option_group(0, self.cmd_opts)\n\n @with_cleanup\n def run(self, options: Values, args: List[str]) -> int:\n logger.verbose("Using %s", get_pip_version())\n\n logger.warning(\n "pip lock is currently an experimental command. "\n "It may be removed/changed in a future release "\n "without prior warning."\n )\n\n session = self.get_default_session(options)\n\n finder = self._build_package_finder(\n options=options,\n session=session,\n ignore_requires_python=options.ignore_requires_python,\n )\n build_tracker = self.enter_context(get_build_tracker())\n\n directory = TempDirectory(\n delete=not options.no_clean,\n kind="install",\n globally_managed=True,\n )\n\n reqs = self.get_requirements(args, options, finder, session)\n check_legacy_setup_py_options(options, reqs)\n\n wheel_cache = WheelCache(options.cache_dir)\n\n # Only when installing is it permitted to use PEP 660.\n # In other circumstances (pip wheel, pip download) we generate\n # regular (i.e. non editable) metadata and wheels.\n for req in reqs:\n req.permit_editable_wheels = True\n\n preparer = self.make_requirement_preparer(\n temp_build_dir=directory,\n options=options,\n build_tracker=build_tracker,\n session=session,\n finder=finder,\n use_user_site=False,\n verbosity=self.verbosity,\n )\n resolver = self.make_resolver(\n preparer=preparer,\n finder=finder,\n options=options,\n wheel_cache=wheel_cache,\n use_user_site=False,\n ignore_installed=True,\n ignore_requires_python=options.ignore_requires_python,\n upgrade_strategy="to-satisfy-only",\n use_pep517=options.use_pep517,\n )\n\n self.trace_basic_info(finder)\n\n requirement_set = resolver.resolve(reqs, check_supported_wheels=True)\n\n if options.output_file == "-":\n base_dir = Path.cwd()\n else:\n output_file_path = Path(options.output_file)\n if not is_valid_pylock_file_name(output_file_path):\n logger.warning(\n "%s is not a valid lock file name.",\n output_file_path,\n )\n base_dir = output_file_path.parent\n pylock_toml = Pylock.from_install_requirements(\n requirement_set.requirements.values(), base_dir=base_dir\n ).as_toml()\n if options.output_file == "-":\n sys.stdout.write(pylock_toml)\n else:\n output_file_path.write_text(pylock_toml, encoding="utf-8")\n\n return SUCCESS\n
.venv\Lib\site-packages\pip\_internal\commands\lock.py
lock.py
Python
5,941
0.95
0.052632
0.021127
awesome-app
717
2025-03-11T03:25:44.970129
Apache-2.0
false
56444900fc6254c123e392c455e822d8
import logging\nimport shutil\nimport sys\nimport textwrap\nimport xmlrpc.client\nfrom collections import OrderedDict\nfrom optparse import Values\nfrom typing import Dict, List, Optional, TypedDict\n\nfrom pip._vendor.packaging.version import parse as parse_version\n\nfrom pip._internal.cli.base_command import Command\nfrom pip._internal.cli.req_command import SessionCommandMixin\nfrom pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS\nfrom pip._internal.exceptions import CommandError\nfrom pip._internal.metadata import get_default_environment\nfrom pip._internal.metadata.base import BaseDistribution\nfrom pip._internal.models.index import PyPI\nfrom pip._internal.network.xmlrpc import PipXmlrpcTransport\nfrom pip._internal.utils.logging import indent_log\nfrom pip._internal.utils.misc import write_output\n\n\nclass TransformedHit(TypedDict):\n name: str\n summary: str\n versions: List[str]\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass SearchCommand(Command, SessionCommandMixin):\n """Search for PyPI packages whose name or summary contains <query>."""\n\n usage = """\n %prog [options] <query>"""\n ignore_require_venv = True\n\n def add_options(self) -> None:\n self.cmd_opts.add_option(\n "-i",\n "--index",\n dest="index",\n metavar="URL",\n default=PyPI.pypi_url,\n help="Base URL of Python Package Index (default %default)",\n )\n\n self.parser.insert_option_group(0, self.cmd_opts)\n\n def run(self, options: Values, args: List[str]) -> int:\n if not args:\n raise CommandError("Missing required argument (search query).")\n query = args\n pypi_hits = self.search(query, options)\n hits = transform_hits(pypi_hits)\n\n terminal_width = None\n if sys.stdout.isatty():\n terminal_width = shutil.get_terminal_size()[0]\n\n print_results(hits, terminal_width=terminal_width)\n if pypi_hits:\n return SUCCESS\n return NO_MATCHES_FOUND\n\n def search(self, query: List[str], options: Values) -> List[Dict[str, str]]:\n index_url = options.index\n\n session = self.get_default_session(options)\n\n transport = PipXmlrpcTransport(index_url, session)\n pypi = xmlrpc.client.ServerProxy(index_url, transport)\n try:\n hits = pypi.search({"name": query, "summary": query}, "or")\n except xmlrpc.client.Fault as fault:\n message = (\n f"XMLRPC request failed [code: {fault.faultCode}]\n{fault.faultString}"\n )\n raise CommandError(message)\n assert isinstance(hits, list)\n return hits\n\n\ndef transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]:\n """\n The list from pypi is really a list of versions. We want a list of\n packages with the list of versions stored inline. This converts the\n list from pypi into one we can use.\n """\n packages: Dict[str, TransformedHit] = OrderedDict()\n for hit in hits:\n name = hit["name"]\n summary = hit["summary"]\n version = hit["version"]\n\n if name not in packages.keys():\n packages[name] = {\n "name": name,\n "summary": summary,\n "versions": [version],\n }\n else:\n packages[name]["versions"].append(version)\n\n # if this is the highest version, replace summary and score\n if version == highest_version(packages[name]["versions"]):\n packages[name]["summary"] = summary\n\n return list(packages.values())\n\n\ndef print_dist_installation_info(latest: str, dist: Optional[BaseDistribution]) -> None:\n if dist is not None:\n with indent_log():\n if dist.version == latest:\n write_output("INSTALLED: %s (latest)", dist.version)\n else:\n write_output("INSTALLED: %s", dist.version)\n if parse_version(latest).pre:\n write_output(\n "LATEST: %s (pre-release; install"\n " with `pip install --pre`)",\n latest,\n )\n else:\n write_output("LATEST: %s", latest)\n\n\ndef get_installed_distribution(name: str) -> Optional[BaseDistribution]:\n env = get_default_environment()\n return env.get_distribution(name)\n\n\ndef print_results(\n hits: List["TransformedHit"],\n name_column_width: Optional[int] = None,\n terminal_width: Optional[int] = None,\n) -> None:\n if not hits:\n return\n if name_column_width is None:\n name_column_width = (\n max(\n [\n len(hit["name"]) + len(highest_version(hit.get("versions", ["-"])))\n for hit in hits\n ]\n )\n + 4\n )\n\n for hit in hits:\n name = hit["name"]\n summary = hit["summary"] or ""\n latest = highest_version(hit.get("versions", ["-"]))\n if terminal_width is not None:\n target_width = terminal_width - name_column_width - 5\n if target_width > 10:\n # wrap and indent summary to fit terminal\n summary_lines = textwrap.wrap(summary, target_width)\n summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines)\n\n name_latest = f"{name} ({latest})"\n line = f"{name_latest:{name_column_width}} - {summary}"\n try:\n write_output(line)\n dist = get_installed_distribution(name)\n print_dist_installation_info(latest, dist)\n except UnicodeEncodeError:\n pass\n\n\ndef highest_version(versions: List[str]) -> str:\n return max(versions, key=parse_version)\n
.venv\Lib\site-packages\pip\_internal\commands\search.py
search.py
Python
5,784
0.95
0.164773
0.013889
vue-tools
601
2025-07-06T08:30:41.985091
MIT
false
92aa01f7fb344621fa80bda78c44b6b1
import logging\nimport string\nfrom optparse import Values\nfrom typing import Generator, Iterable, Iterator, List, NamedTuple, Optional\n\nfrom pip._vendor.packaging.requirements import InvalidRequirement\nfrom pip._vendor.packaging.utils import canonicalize_name\n\nfrom pip._internal.cli.base_command import Command\nfrom pip._internal.cli.status_codes import ERROR, SUCCESS\nfrom pip._internal.metadata import BaseDistribution, get_default_environment\nfrom pip._internal.utils.misc import write_output\n\nlogger = logging.getLogger(__name__)\n\n\ndef normalize_project_url_label(label: str) -> str:\n # This logic is from PEP 753 (Well-known Project URLs in Metadata).\n chars_to_remove = string.punctuation + string.whitespace\n removal_map = str.maketrans("", "", chars_to_remove)\n return label.translate(removal_map).lower()\n\n\nclass ShowCommand(Command):\n """\n Show information about one or more installed packages.\n\n The output is in RFC-compliant mail header format.\n """\n\n usage = """\n %prog [options] <package> ..."""\n ignore_require_venv = True\n\n def add_options(self) -> None:\n self.cmd_opts.add_option(\n "-f",\n "--files",\n dest="files",\n action="store_true",\n default=False,\n help="Show the full list of installed files for each package.",\n )\n\n self.parser.insert_option_group(0, self.cmd_opts)\n\n def run(self, options: Values, args: List[str]) -> int:\n if not args:\n logger.warning("ERROR: Please provide a package name or names.")\n return ERROR\n query = args\n\n results = search_packages_info(query)\n if not print_results(\n results, list_files=options.files, verbose=options.verbose\n ):\n return ERROR\n return SUCCESS\n\n\nclass _PackageInfo(NamedTuple):\n name: str\n version: str\n location: str\n editable_project_location: Optional[str]\n requires: List[str]\n required_by: List[str]\n installer: str\n metadata_version: str\n classifiers: List[str]\n summary: str\n homepage: str\n project_urls: List[str]\n author: str\n author_email: str\n license: str\n license_expression: str\n entry_points: List[str]\n files: Optional[List[str]]\n\n\ndef search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None]:\n """\n Gather details from installed distributions. Print distribution name,\n version, location, and installed files. Installed files requires a\n pip generated 'installed-files.txt' in the distributions '.egg-info'\n directory.\n """\n env = get_default_environment()\n\n installed = {dist.canonical_name: dist for dist in env.iter_all_distributions()}\n query_names = [canonicalize_name(name) for name in query]\n missing = sorted(\n [name for name, pkg in zip(query, query_names) if pkg not in installed]\n )\n if missing:\n logger.warning("Package(s) not found: %s", ", ".join(missing))\n\n def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]:\n return (\n dist.metadata["Name"] or "UNKNOWN"\n for dist in installed.values()\n if current_dist.canonical_name\n in {canonicalize_name(d.name) for d in dist.iter_dependencies()}\n )\n\n for query_name in query_names:\n try:\n dist = installed[query_name]\n except KeyError:\n continue\n\n try:\n requires = sorted(\n # Avoid duplicates in requirements (e.g. due to environment markers).\n {req.name for req in dist.iter_dependencies()},\n key=str.lower,\n )\n except InvalidRequirement:\n requires = sorted(dist.iter_raw_dependencies(), key=str.lower)\n\n try:\n required_by = sorted(_get_requiring_packages(dist), key=str.lower)\n except InvalidRequirement:\n required_by = ["#N/A"]\n\n try:\n entry_points_text = dist.read_text("entry_points.txt")\n entry_points = entry_points_text.splitlines(keepends=False)\n except FileNotFoundError:\n entry_points = []\n\n files_iter = dist.iter_declared_entries()\n if files_iter is None:\n files: Optional[List[str]] = None\n else:\n files = sorted(files_iter)\n\n metadata = dist.metadata\n\n project_urls = metadata.get_all("Project-URL", [])\n homepage = metadata.get("Home-page", "")\n if not homepage:\n # It's common that there is a "homepage" Project-URL, but Home-page\n # remains unset (especially as PEP 621 doesn't surface the field).\n for url in project_urls:\n url_label, url = url.split(",", maxsplit=1)\n normalized_label = normalize_project_url_label(url_label)\n if normalized_label == "homepage":\n homepage = url.strip()\n break\n\n yield _PackageInfo(\n name=dist.raw_name,\n version=dist.raw_version,\n location=dist.location or "",\n editable_project_location=dist.editable_project_location,\n requires=requires,\n required_by=required_by,\n installer=dist.installer,\n metadata_version=dist.metadata_version or "",\n classifiers=metadata.get_all("Classifier", []),\n summary=metadata.get("Summary", ""),\n homepage=homepage,\n project_urls=project_urls,\n author=metadata.get("Author", ""),\n author_email=metadata.get("Author-email", ""),\n license=metadata.get("License", ""),\n license_expression=metadata.get("License-Expression", ""),\n entry_points=entry_points,\n files=files,\n )\n\n\ndef print_results(\n distributions: Iterable[_PackageInfo],\n list_files: bool,\n verbose: bool,\n) -> bool:\n """\n Print the information from installed distributions found.\n """\n results_printed = False\n for i, dist in enumerate(distributions):\n results_printed = True\n if i > 0:\n write_output("---")\n\n metadata_version_tuple = tuple(map(int, dist.metadata_version.split(".")))\n\n write_output("Name: %s", dist.name)\n write_output("Version: %s", dist.version)\n write_output("Summary: %s", dist.summary)\n write_output("Home-page: %s", dist.homepage)\n write_output("Author: %s", dist.author)\n write_output("Author-email: %s", dist.author_email)\n if metadata_version_tuple >= (2, 4) and dist.license_expression:\n write_output("License-Expression: %s", dist.license_expression)\n else:\n write_output("License: %s", dist.license)\n write_output("Location: %s", dist.location)\n if dist.editable_project_location is not None:\n write_output(\n "Editable project location: %s", dist.editable_project_location\n )\n write_output("Requires: %s", ", ".join(dist.requires))\n write_output("Required-by: %s", ", ".join(dist.required_by))\n\n if verbose:\n write_output("Metadata-Version: %s", dist.metadata_version)\n write_output("Installer: %s", dist.installer)\n write_output("Classifiers:")\n for classifier in dist.classifiers:\n write_output(" %s", classifier)\n write_output("Entry-points:")\n for entry in dist.entry_points:\n write_output(" %s", entry.strip())\n write_output("Project-URLs:")\n for project_url in dist.project_urls:\n write_output(" %s", project_url)\n if list_files:\n write_output("Files:")\n if dist.files is None:\n write_output("Cannot locate RECORD or installed-files.txt")\n else:\n for line in dist.files:\n write_output(" %s", line.strip())\n return results_printed\n
.venv\Lib\site-packages\pip\_internal\commands\show.py
show.py
Python
8,028
0.95
0.175439
0.020408
node-utils
212
2023-10-05T04:46:31.642570
GPL-3.0
false
479418a1b5d0060a28449374fda1ae69
import logging\nfrom optparse import Values\nfrom typing import List\n\nfrom pip._vendor.packaging.utils import canonicalize_name\n\nfrom pip._internal.cli import cmdoptions\nfrom pip._internal.cli.base_command import Command\nfrom pip._internal.cli.index_command import SessionCommandMixin\nfrom pip._internal.cli.status_codes import SUCCESS\nfrom pip._internal.exceptions import InstallationError\nfrom pip._internal.req import parse_requirements\nfrom pip._internal.req.constructors import (\n install_req_from_line,\n install_req_from_parsed_requirement,\n)\nfrom pip._internal.utils.misc import (\n check_externally_managed,\n protect_pip_from_modification_on_windows,\n warn_if_run_as_root,\n)\n\nlogger = logging.getLogger(__name__)\n\n\nclass UninstallCommand(Command, SessionCommandMixin):\n """\n Uninstall packages.\n\n pip is able to uninstall most installed packages. Known exceptions are:\n\n - Pure distutils packages installed with ``python setup.py install``, which\n leave behind no metadata to determine what files were installed.\n - Script wrappers installed by ``python setup.py develop``.\n """\n\n usage = """\n %prog [options] <package> ...\n %prog [options] -r <requirements file> ..."""\n\n def add_options(self) -> None:\n self.cmd_opts.add_option(\n "-r",\n "--requirement",\n dest="requirements",\n action="append",\n default=[],\n metavar="file",\n help=(\n "Uninstall all the packages listed in the given requirements "\n "file. This option can be used multiple times."\n ),\n )\n self.cmd_opts.add_option(\n "-y",\n "--yes",\n dest="yes",\n action="store_true",\n help="Don't ask for confirmation of uninstall deletions.",\n )\n self.cmd_opts.add_option(cmdoptions.root_user_action())\n self.cmd_opts.add_option(cmdoptions.override_externally_managed())\n self.parser.insert_option_group(0, self.cmd_opts)\n\n def run(self, options: Values, args: List[str]) -> int:\n session = self.get_default_session(options)\n\n reqs_to_uninstall = {}\n for name in args:\n req = install_req_from_line(\n name,\n isolated=options.isolated_mode,\n )\n if req.name:\n reqs_to_uninstall[canonicalize_name(req.name)] = req\n else:\n logger.warning(\n "Invalid requirement: %r ignored -"\n " the uninstall command expects named"\n " requirements.",\n name,\n )\n for filename in options.requirements:\n for parsed_req in parse_requirements(\n filename, options=options, session=session\n ):\n req = install_req_from_parsed_requirement(\n parsed_req, isolated=options.isolated_mode\n )\n if req.name:\n reqs_to_uninstall[canonicalize_name(req.name)] = req\n if not reqs_to_uninstall:\n raise InstallationError(\n f"You must give at least one requirement to {self.name} (see "\n f'"pip help {self.name}")'\n )\n\n if not options.override_externally_managed:\n check_externally_managed()\n\n protect_pip_from_modification_on_windows(\n modifying_pip="pip" in reqs_to_uninstall\n )\n\n for req in reqs_to_uninstall.values():\n uninstall_pathset = req.uninstall(\n auto_confirm=options.yes,\n verbose=self.verbosity > 0,\n )\n if uninstall_pathset:\n uninstall_pathset.commit()\n if options.root_user_action == "warn":\n warn_if_run_as_root()\n return SUCCESS\n
.venv\Lib\site-packages\pip\_internal\commands\uninstall.py
uninstall.py
Python
3,892
0.85
0.122807
0
node-utils
708
2024-08-29T20:32:16.170320
Apache-2.0
false
579fa6f1ac9c20a790e30c16a645ac5b
import logging\nimport os\nimport shutil\nfrom optparse import Values\nfrom typing import List\n\nfrom pip._internal.cache import WheelCache\nfrom pip._internal.cli import cmdoptions\nfrom pip._internal.cli.req_command import RequirementCommand, with_cleanup\nfrom pip._internal.cli.status_codes import SUCCESS\nfrom pip._internal.exceptions import CommandError\nfrom pip._internal.operations.build.build_tracker import get_build_tracker\nfrom pip._internal.req.req_install import (\n InstallRequirement,\n check_legacy_setup_py_options,\n)\nfrom pip._internal.utils.misc import ensure_dir, normalize_path\nfrom pip._internal.utils.temp_dir import TempDirectory\nfrom pip._internal.wheel_builder import build\n\nlogger = logging.getLogger(__name__)\n\n\nclass WheelCommand(RequirementCommand):\n """\n Build Wheel archives for your requirements and dependencies.\n\n Wheel is a built-package format, and offers the advantage of not\n recompiling your software during every install. For more details, see the\n wheel docs: https://wheel.readthedocs.io/en/latest/\n\n 'pip wheel' uses the build system interface as described here:\n https://pip.pypa.io/en/stable/reference/build-system/\n\n """\n\n usage = """\n %prog [options] <requirement specifier> ...\n %prog [options] -r <requirements file> ...\n %prog [options] [-e] <vcs project url> ...\n %prog [options] [-e] <local project path> ...\n %prog [options] <archive url/path> ..."""\n\n def add_options(self) -> None:\n self.cmd_opts.add_option(\n "-w",\n "--wheel-dir",\n dest="wheel_dir",\n metavar="dir",\n default=os.curdir,\n help=(\n "Build wheels into <dir>, where the default is the "\n "current working directory."\n ),\n )\n self.cmd_opts.add_option(cmdoptions.no_binary())\n self.cmd_opts.add_option(cmdoptions.only_binary())\n self.cmd_opts.add_option(cmdoptions.prefer_binary())\n self.cmd_opts.add_option(cmdoptions.no_build_isolation())\n self.cmd_opts.add_option(cmdoptions.use_pep517())\n self.cmd_opts.add_option(cmdoptions.no_use_pep517())\n self.cmd_opts.add_option(cmdoptions.check_build_deps())\n self.cmd_opts.add_option(cmdoptions.constraints())\n self.cmd_opts.add_option(cmdoptions.editable())\n self.cmd_opts.add_option(cmdoptions.requirements())\n self.cmd_opts.add_option(cmdoptions.src())\n self.cmd_opts.add_option(cmdoptions.ignore_requires_python())\n self.cmd_opts.add_option(cmdoptions.no_deps())\n self.cmd_opts.add_option(cmdoptions.progress_bar())\n\n self.cmd_opts.add_option(\n "--no-verify",\n dest="no_verify",\n action="store_true",\n default=False,\n help="Don't verify if built wheel is valid.",\n )\n\n self.cmd_opts.add_option(cmdoptions.config_settings())\n self.cmd_opts.add_option(cmdoptions.build_options())\n self.cmd_opts.add_option(cmdoptions.global_options())\n\n self.cmd_opts.add_option(\n "--pre",\n action="store_true",\n default=False,\n help=(\n "Include pre-release and development versions. By default, "\n "pip only finds stable versions."\n ),\n )\n\n self.cmd_opts.add_option(cmdoptions.require_hashes())\n\n index_opts = cmdoptions.make_option_group(\n cmdoptions.index_group,\n self.parser,\n )\n\n self.parser.insert_option_group(0, index_opts)\n self.parser.insert_option_group(0, self.cmd_opts)\n\n @with_cleanup\n def run(self, options: Values, args: List[str]) -> int:\n session = self.get_default_session(options)\n\n finder = self._build_package_finder(options, session)\n\n options.wheel_dir = normalize_path(options.wheel_dir)\n ensure_dir(options.wheel_dir)\n\n build_tracker = self.enter_context(get_build_tracker())\n\n directory = TempDirectory(\n delete=not options.no_clean,\n kind="wheel",\n globally_managed=True,\n )\n\n reqs = self.get_requirements(args, options, finder, session)\n check_legacy_setup_py_options(options, reqs)\n\n wheel_cache = WheelCache(options.cache_dir)\n\n preparer = self.make_requirement_preparer(\n temp_build_dir=directory,\n options=options,\n build_tracker=build_tracker,\n session=session,\n finder=finder,\n download_dir=options.wheel_dir,\n use_user_site=False,\n verbosity=self.verbosity,\n )\n\n resolver = self.make_resolver(\n preparer=preparer,\n finder=finder,\n options=options,\n wheel_cache=wheel_cache,\n ignore_requires_python=options.ignore_requires_python,\n use_pep517=options.use_pep517,\n )\n\n self.trace_basic_info(finder)\n\n requirement_set = resolver.resolve(reqs, check_supported_wheels=True)\n\n reqs_to_build: List[InstallRequirement] = []\n for req in requirement_set.requirements.values():\n if req.is_wheel:\n preparer.save_linked_requirement(req)\n else:\n reqs_to_build.append(req)\n\n preparer.prepare_linked_requirements_more(requirement_set.requirements.values())\n\n # build wheels\n build_successes, build_failures = build(\n reqs_to_build,\n wheel_cache=wheel_cache,\n verify=(not options.no_verify),\n build_options=options.build_options or [],\n global_options=options.global_options or [],\n )\n for req in build_successes:\n assert req.link and req.link.is_wheel\n assert req.local_file_path\n # copy from cache to target directory\n try:\n shutil.copy(req.local_file_path, options.wheel_dir)\n except OSError as e:\n logger.warning(\n "Building wheel for %s failed: %s",\n req.name,\n e,\n )\n build_failures.append(req)\n if len(build_failures) != 0:\n raise CommandError("Failed to build one or more wheels")\n\n return SUCCESS\n
.venv\Lib\site-packages\pip\_internal\commands\wheel.py
wheel.py
Python
6,346
0.95
0.06044
0.013158
node-utils
253
2024-12-17T13:15:26.284839
Apache-2.0
false
167c4f466b4be52e659dd3cf5d65cc29
"""\nPackage containing all pip commands\n"""\n\nimport importlib\nfrom collections import namedtuple\nfrom typing import Any, Dict, Optional\n\nfrom pip._internal.cli.base_command import Command\n\nCommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary")\n\n# This dictionary does a bunch of heavy lifting for help output:\n# - Enables avoiding additional (costly) imports for presenting `--help`.\n# - The ordering matters for help display.\n#\n# Even though the module path starts with the same "pip._internal.commands"\n# prefix, the full path makes testing easier (specifically when modifying\n# `commands_dict` in test setup / teardown).\ncommands_dict: Dict[str, CommandInfo] = {\n "install": CommandInfo(\n "pip._internal.commands.install",\n "InstallCommand",\n "Install packages.",\n ),\n "lock": CommandInfo(\n "pip._internal.commands.lock",\n "LockCommand",\n "Generate a lock file.",\n ),\n "download": CommandInfo(\n "pip._internal.commands.download",\n "DownloadCommand",\n "Download packages.",\n ),\n "uninstall": CommandInfo(\n "pip._internal.commands.uninstall",\n "UninstallCommand",\n "Uninstall packages.",\n ),\n "freeze": CommandInfo(\n "pip._internal.commands.freeze",\n "FreezeCommand",\n "Output installed packages in requirements format.",\n ),\n "inspect": CommandInfo(\n "pip._internal.commands.inspect",\n "InspectCommand",\n "Inspect the python environment.",\n ),\n "list": CommandInfo(\n "pip._internal.commands.list",\n "ListCommand",\n "List installed packages.",\n ),\n "show": CommandInfo(\n "pip._internal.commands.show",\n "ShowCommand",\n "Show information about installed packages.",\n ),\n "check": CommandInfo(\n "pip._internal.commands.check",\n "CheckCommand",\n "Verify installed packages have compatible dependencies.",\n ),\n "config": CommandInfo(\n "pip._internal.commands.configuration",\n "ConfigurationCommand",\n "Manage local and global configuration.",\n ),\n "search": CommandInfo(\n "pip._internal.commands.search",\n "SearchCommand",\n "Search PyPI for packages.",\n ),\n "cache": CommandInfo(\n "pip._internal.commands.cache",\n "CacheCommand",\n "Inspect and manage pip's wheel cache.",\n ),\n "index": CommandInfo(\n "pip._internal.commands.index",\n "IndexCommand",\n "Inspect information available from package indexes.",\n ),\n "wheel": CommandInfo(\n "pip._internal.commands.wheel",\n "WheelCommand",\n "Build wheels from your requirements.",\n ),\n "hash": CommandInfo(\n "pip._internal.commands.hash",\n "HashCommand",\n "Compute hashes of package archives.",\n ),\n "completion": CommandInfo(\n "pip._internal.commands.completion",\n "CompletionCommand",\n "A helper command used for command completion.",\n ),\n "debug": CommandInfo(\n "pip._internal.commands.debug",\n "DebugCommand",\n "Show information useful for debugging.",\n ),\n "help": CommandInfo(\n "pip._internal.commands.help",\n "HelpCommand",\n "Show help for commands.",\n ),\n}\n\n\ndef create_command(name: str, **kwargs: Any) -> Command:\n """\n Create an instance of the Command class with the given name.\n """\n module_path, class_name, summary = commands_dict[name]\n module = importlib.import_module(module_path)\n command_class = getattr(module, class_name)\n command = command_class(name=name, summary=summary, **kwargs)\n\n return command\n\n\ndef get_similar_commands(name: str) -> Optional[str]:\n """Command name auto-correct."""\n from difflib import get_close_matches\n\n name = name.lower()\n\n close_commands = get_close_matches(name, commands_dict.keys())\n\n if close_commands:\n return close_commands[0]\n else:\n return None\n
.venv\Lib\site-packages\pip\_internal\commands\__init__.py
__init__.py
Python
4,009
0.95
0.080292
0.056
node-utils
352
2025-05-24T00:47:27.437994
Apache-2.0
false
083d6d0c2ab5aef19a72392bf84a2d91
\n\n
.venv\Lib\site-packages\pip\_internal\commands\__pycache__\cache.cpython-313.pyc
cache.cpython-313.pyc
Other
10,086
0.95
0.018349
0
awesome-app
114
2024-03-03T07:08:37.874027
Apache-2.0
false
5ad83f232bc94ae67fe438992e4cf3c9
\n\n
.venv\Lib\site-packages\pip\_internal\commands\__pycache__\check.cpython-313.pyc
check.cpython-313.pyc
Other
2,684
0.95
0
0
node-utils
36
2024-10-27T09:14:12.083549
GPL-3.0
false
f14c1e831606f1e36a503573407948fd
\n\n
.venv\Lib\site-packages\pip\_internal\commands\__pycache__\completion.cpython-313.pyc
completion.cpython-313.pyc
Other
5,497
0.95
0.125
0.048077
python-kit
257
2024-05-23T18:54:14.221287
BSD-3-Clause
false
c3c95e6799ba4144fccaf23d2ad73cca
\n\n
.venv\Lib\site-packages\pip\_internal\commands\__pycache__\configuration.cpython-313.pyc
configuration.cpython-313.pyc
Other
13,236
0.95
0.034483
0.022727
react-lib
753
2024-07-26T09:42:29.368901
Apache-2.0
false
35d43576b76975b298243eb65c22b4a0
\n\n
.venv\Lib\site-packages\pip\_internal\commands\__pycache__\debug.cpython-313.pyc
debug.cpython-313.pyc
Other
10,298
0.95
0.038835
0.01
python-kit
971
2024-02-11T22:33:47.659137
BSD-3-Clause
false
026b73ad8e6e2ac706931e249d82071b
\n\n
.venv\Lib\site-packages\pip\_internal\commands\__pycache__\download.cpython-313.pyc
download.cpython-313.pyc
Other
7,551
0.95
0
0
node-utils
847
2025-04-07T16:29:01.184515
Apache-2.0
false
5f3565a2376431c229c8cca667ac453f
\n\n
.venv\Lib\site-packages\pip\_internal\commands\__pycache__\freeze.cpython-313.pyc
freeze.cpython-313.pyc
Other
4,401
0.95
0
0
awesome-app
787
2024-03-03T16:52:12.020351
GPL-3.0
false
c2b8972e1c8b641cf2a7d75bc52c0c8b
\n\n
.venv\Lib\site-packages\pip\_internal\commands\__pycache__\hash.cpython-313.pyc
hash.cpython-313.pyc
Other
3,033
0.95
0
0
python-kit
446
2024-11-12T09:29:34.346962
GPL-3.0
false
aaeae24f345332c5e3898f048fca7283
\n\n
.venv\Lib\site-packages\pip\_internal\commands\__pycache__\help.cpython-313.pyc
help.cpython-313.pyc
Other
1,747
0.95
0.047619
0
node-utils
897
2024-10-05T18:20:26.248335
MIT
false
aad083fa4800bbcf1ea36e73e629cb38
\n\n
.venv\Lib\site-packages\pip\_internal\commands\__pycache__\index.cpython-313.pyc
index.cpython-313.pyc
Other
7,043
0.95
0.012195
0
vue-tools
394
2025-02-02T19:01:06.076069
Apache-2.0
false
06c84403c502522a64d5d6e81b3a3efa
\n\n
.venv\Lib\site-packages\pip\_internal\commands\__pycache__\inspect.cpython-313.pyc
inspect.cpython-313.pyc
Other
4,038
0.95
0
0
python-kit
331
2023-08-30T15:08:05.910083
BSD-3-Clause
false
1ab2f2d1d67a98102c7327135d123a35
\n\n
.venv\Lib\site-packages\pip\_internal\commands\__pycache__\install.cpython-313.pyc
install.cpython-313.pyc
Other
29,766
0.95
0.04375
0.003891
react-lib
518
2024-01-05T17:17:34.944177
GPL-3.0
false
7cb7f87aa98792ea7920f322313a63ab
\n\n
.venv\Lib\site-packages\pip\_internal\commands\__pycache__\list.cpython-313.pyc
list.cpython-313.pyc
Other
16,990
0.95
0
0.011696
awesome-app
440
2025-01-13T08:30:10.100591
BSD-3-Clause
false
9f360f7fa40645855b2914920aba1d89
\n\n
.venv\Lib\site-packages\pip\_internal\commands\__pycache__\lock.cpython-313.pyc
lock.cpython-313.pyc
Other
8,040
0.95
0.017241
0
node-utils
560
2025-05-20T05:47:29.913525
MIT
false
c74dd4d786808556810d9a6bf361fcad
\n\n
.venv\Lib\site-packages\pip\_internal\commands\__pycache__\search.cpython-313.pyc
search.cpython-313.pyc
Other
7,906
0.95
0.011628
0.012195
awesome-app
502
2024-09-13T18:09:34.789059
BSD-3-Clause
false
07e724ccf536cd571957dc9242f508ae
\n\n
.venv\Lib\site-packages\pip\_internal\commands\__pycache__\show.cpython-313.pyc
show.cpython-313.pyc
Other
11,484
0.95
0.007634
0.008
awesome-app
314
2023-08-19T02:46:24.067129
MIT
false
c7dcd138288a404b01ae408aa578b80e
\n\n
.venv\Lib\site-packages\pip\_internal\commands\__pycache__\uninstall.cpython-313.pyc
uninstall.cpython-313.pyc
Other
4,772
0.95
0.015152
0
vue-tools
169
2024-10-25T10:39:56.385750
MIT
false
752afcdf21627fbae7bf4329ab1f8169
\n\n
.venv\Lib\site-packages\pip\_internal\commands\__pycache__\wheel.cpython-313.pyc
wheel.cpython-313.pyc
Other
8,885
0.95
0.022059
0
awesome-app
350
2024-08-12T17:08:45.811920
BSD-3-Clause
false
07396c099f29b825e0b08d152d2ebfc8
\n\n
.venv\Lib\site-packages\pip\_internal\commands\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
4,124
0.95
0.096154
0
node-utils
94
2023-11-21T21:20:09.863185
MIT
false
80d997979e93c8d3029c83cb959a2127
import abc\nfrom typing import TYPE_CHECKING, Optional\n\nfrom pip._internal.metadata.base import BaseDistribution\nfrom pip._internal.req import InstallRequirement\n\nif TYPE_CHECKING:\n from pip._internal.index.package_finder import PackageFinder\n\n\nclass AbstractDistribution(metaclass=abc.ABCMeta):\n """A base class for handling installable artifacts.\n\n The requirements for anything installable are as follows:\n\n - we must be able to determine the requirement name\n (or we can't correctly handle the non-upgrade case).\n\n - for packages with setup requirements, we must also be able\n to determine their requirements without installing additional\n packages (for the same reason as run-time dependencies)\n\n - we must be able to create a Distribution object exposing the\n above metadata.\n\n - if we need to do work in the build tracker, we must be able to generate a unique\n string to identify the requirement in the build tracker.\n """\n\n def __init__(self, req: InstallRequirement) -> None:\n super().__init__()\n self.req = req\n\n @abc.abstractproperty\n def build_tracker_id(self) -> Optional[str]:\n """A string that uniquely identifies this requirement to the build tracker.\n\n If None, then this dist has no work to do in the build tracker, and\n ``.prepare_distribution_metadata()`` will not be called."""\n raise NotImplementedError()\n\n @abc.abstractmethod\n def get_metadata_distribution(self) -> BaseDistribution:\n raise NotImplementedError()\n\n @abc.abstractmethod\n def prepare_distribution_metadata(\n self,\n finder: "PackageFinder",\n build_isolation: bool,\n check_build_deps: bool,\n ) -> None:\n raise NotImplementedError()\n
.venv\Lib\site-packages\pip\_internal\distributions\base.py
base.py
Python
1,783
0.85
0.226415
0
vue-tools
295
2024-05-07T18:17:38.783363
Apache-2.0
false
b56cb85c7d81c388fa2e2e8eefc5aa79
from typing import Optional\n\nfrom pip._internal.distributions.base import AbstractDistribution\nfrom pip._internal.index.package_finder import PackageFinder\nfrom pip._internal.metadata import BaseDistribution\n\n\nclass InstalledDistribution(AbstractDistribution):\n """Represents an installed package.\n\n This does not need any preparation as the required information has already\n been computed.\n """\n\n @property\n def build_tracker_id(self) -> Optional[str]:\n return None\n\n def get_metadata_distribution(self) -> BaseDistribution:\n assert self.req.satisfied_by is not None, "not actually installed"\n return self.req.satisfied_by\n\n def prepare_distribution_metadata(\n self,\n finder: PackageFinder,\n build_isolation: bool,\n check_build_deps: bool,\n ) -> None:\n pass\n
.venv\Lib\site-packages\pip\_internal\distributions\installed.py
installed.py
Python
842
0.85
0.137931
0
node-utils
464
2024-02-25T17:31:09.000387
GPL-3.0
false
38f5423ba5ba35d0628bf5abd595a207
import logging\nfrom typing import TYPE_CHECKING, Iterable, Optional, Set, Tuple\n\nfrom pip._internal.build_env import BuildEnvironment\nfrom pip._internal.distributions.base import AbstractDistribution\nfrom pip._internal.exceptions import InstallationError\nfrom pip._internal.metadata import BaseDistribution\nfrom pip._internal.utils.subprocess import runner_with_spinner_message\n\nif TYPE_CHECKING:\n from pip._internal.index.package_finder import PackageFinder\n\nlogger = logging.getLogger(__name__)\n\n\nclass SourceDistribution(AbstractDistribution):\n """Represents a source distribution.\n\n The preparation step for these needs metadata for the packages to be\n generated, either using PEP 517 or using the legacy `setup.py egg_info`.\n """\n\n @property\n def build_tracker_id(self) -> Optional[str]:\n """Identify this requirement uniquely by its link."""\n assert self.req.link\n return self.req.link.url_without_fragment\n\n def get_metadata_distribution(self) -> BaseDistribution:\n return self.req.get_dist()\n\n def prepare_distribution_metadata(\n self,\n finder: "PackageFinder",\n build_isolation: bool,\n check_build_deps: bool,\n ) -> None:\n # Load pyproject.toml, to determine whether PEP 517 is to be used\n self.req.load_pyproject_toml()\n\n # Set up the build isolation, if this requirement should be isolated\n should_isolate = self.req.use_pep517 and build_isolation\n if should_isolate:\n # Setup an isolated environment and install the build backend static\n # requirements in it.\n self._prepare_build_backend(finder)\n # Check that if the requirement is editable, it either supports PEP 660 or\n # has a setup.py or a setup.cfg. This cannot be done earlier because we need\n # to setup the build backend to verify it supports build_editable, nor can\n # it be done later, because we want to avoid installing build requirements\n # needlessly. Doing it here also works around setuptools generating\n # UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory\n # without setup.py nor setup.cfg.\n self.req.isolated_editable_sanity_check()\n # Install the dynamic build requirements.\n self._install_build_reqs(finder)\n # Check if the current environment provides build dependencies\n should_check_deps = self.req.use_pep517 and check_build_deps\n if should_check_deps:\n pyproject_requires = self.req.pyproject_requires\n assert pyproject_requires is not None\n conflicting, missing = self.req.build_env.check_requirements(\n pyproject_requires\n )\n if conflicting:\n self._raise_conflicts("the backend dependencies", conflicting)\n if missing:\n self._raise_missing_reqs(missing)\n self.req.prepare_metadata()\n\n def _prepare_build_backend(self, finder: "PackageFinder") -> None:\n # Isolate in a BuildEnvironment and install the build-time\n # requirements.\n pyproject_requires = self.req.pyproject_requires\n assert pyproject_requires is not None\n\n self.req.build_env = BuildEnvironment()\n self.req.build_env.install_requirements(\n finder, pyproject_requires, "overlay", kind="build dependencies"\n )\n conflicting, missing = self.req.build_env.check_requirements(\n self.req.requirements_to_check\n )\n if conflicting:\n self._raise_conflicts("PEP 517/518 supported requirements", conflicting)\n if missing:\n logger.warning(\n "Missing build requirements in pyproject.toml for %s.",\n self.req,\n )\n logger.warning(\n "The project does not specify a build backend, and "\n "pip cannot fall back to setuptools without %s.",\n " and ".join(map(repr, sorted(missing))),\n )\n\n def _get_build_requires_wheel(self) -> Iterable[str]:\n with self.req.build_env:\n runner = runner_with_spinner_message("Getting requirements to build wheel")\n backend = self.req.pep517_backend\n assert backend is not None\n with backend.subprocess_runner(runner):\n return backend.get_requires_for_build_wheel()\n\n def _get_build_requires_editable(self) -> Iterable[str]:\n with self.req.build_env:\n runner = runner_with_spinner_message(\n "Getting requirements to build editable"\n )\n backend = self.req.pep517_backend\n assert backend is not None\n with backend.subprocess_runner(runner):\n return backend.get_requires_for_build_editable()\n\n def _install_build_reqs(self, finder: "PackageFinder") -> None:\n # Install any extra build dependencies that the backend requests.\n # This must be done in a second pass, as the pyproject.toml\n # dependencies must be installed before we can call the backend.\n if (\n self.req.editable\n and self.req.permit_editable_wheels\n and self.req.supports_pyproject_editable\n ):\n build_reqs = self._get_build_requires_editable()\n else:\n build_reqs = self._get_build_requires_wheel()\n conflicting, missing = self.req.build_env.check_requirements(build_reqs)\n if conflicting:\n self._raise_conflicts("the backend dependencies", conflicting)\n self.req.build_env.install_requirements(\n finder, missing, "normal", kind="backend dependencies"\n )\n\n def _raise_conflicts(\n self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]]\n ) -> None:\n format_string = (\n "Some build dependencies for {requirement} "\n "conflict with {conflicting_with}: {description}."\n )\n error_message = format_string.format(\n requirement=self.req,\n conflicting_with=conflicting_with,\n description=", ".join(\n f"{installed} is incompatible with {wanted}"\n for installed, wanted in sorted(conflicting_reqs)\n ),\n )\n raise InstallationError(error_message)\n\n def _raise_missing_reqs(self, missing: Set[str]) -> None:\n format_string = (\n "Some build dependencies for {requirement} are missing: {missing}."\n )\n error_message = format_string.format(\n requirement=self.req, missing=", ".join(map(repr, sorted(missing)))\n )\n raise InstallationError(error_message)\n
.venv\Lib\site-packages\pip\_internal\distributions\sdist.py
sdist.py
Python
6,751
0.95
0.177215
0.12766
vue-tools
36
2023-09-18T09:20:23.890445
BSD-3-Clause
false
f23617dae5ef71d2703fe97d8a28be63
from typing import TYPE_CHECKING, Optional\n\nfrom pip._vendor.packaging.utils import canonicalize_name\n\nfrom pip._internal.distributions.base import AbstractDistribution\nfrom pip._internal.metadata import (\n BaseDistribution,\n FilesystemWheel,\n get_wheel_distribution,\n)\n\nif TYPE_CHECKING:\n from pip._internal.index.package_finder import PackageFinder\n\n\nclass WheelDistribution(AbstractDistribution):\n """Represents a wheel distribution.\n\n This does not need any preparation as wheels can be directly unpacked.\n """\n\n @property\n def build_tracker_id(self) -> Optional[str]:\n return None\n\n def get_metadata_distribution(self) -> BaseDistribution:\n """Loads the metadata from the wheel file into memory and returns a\n Distribution that uses it, not relying on the wheel file or\n requirement.\n """\n assert self.req.local_file_path, "Set as part of preparation during download"\n assert self.req.name, "Wheels are never unnamed"\n wheel = FilesystemWheel(self.req.local_file_path)\n return get_wheel_distribution(wheel, canonicalize_name(self.req.name))\n\n def prepare_distribution_metadata(\n self,\n finder: "PackageFinder",\n build_isolation: bool,\n check_build_deps: bool,\n ) -> None:\n pass\n
.venv\Lib\site-packages\pip\_internal\distributions\wheel.py
wheel.py
Python
1,317
0.85
0.119048
0
react-lib
867
2024-09-25T08:13:36.179084
GPL-3.0
false
2fb31e6f810839162c907943a8a18f57
from pip._internal.distributions.base import AbstractDistribution\nfrom pip._internal.distributions.sdist import SourceDistribution\nfrom pip._internal.distributions.wheel import WheelDistribution\nfrom pip._internal.req.req_install import InstallRequirement\n\n\ndef make_distribution_for_install_requirement(\n install_req: InstallRequirement,\n) -> AbstractDistribution:\n """Returns a Distribution for the given InstallRequirement"""\n # Editable requirements will always be source distributions. They use the\n # legacy logic until we create a modern standard for them.\n if install_req.editable:\n return SourceDistribution(install_req)\n\n # If it's a wheel, it's a WheelDistribution\n if install_req.is_wheel:\n return WheelDistribution(install_req)\n\n # Otherwise, a SourceDistribution\n return SourceDistribution(install_req)\n
.venv\Lib\site-packages\pip\_internal\distributions\__init__.py
__init__.py
Python
858
0.95
0.238095
0.235294
vue-tools
341
2025-03-11T04:10:10.877453
MIT
false
8fbfe6a40e1f2ad53e483516eb995753
\n\n
.venv\Lib\site-packages\pip\_internal\distributions\__pycache__\base.cpython-313.pyc
base.cpython-313.pyc
Other
2,905
0.95
0.171429
0
react-lib
315
2025-02-19T21:49:45.121543
BSD-3-Clause
false
ae899fa4f3043a5ac851d5fe8ab3e6a9
\n\n
.venv\Lib\site-packages\pip\_internal\distributions\__pycache__\installed.cpython-313.pyc
installed.cpython-313.pyc
Other
1,756
0.85
0
0
awesome-app
771
2025-01-01T06:20:56.949770
MIT
false
5b32a40bd030d0a925bfcd0d67f34e5a
\n\n
.venv\Lib\site-packages\pip\_internal\distributions\__pycache__\sdist.cpython-313.pyc
sdist.cpython-313.pyc
Other
8,635
0.95
0.065789
0.014706
awesome-app
156
2024-03-19T12:14:07.771671
BSD-3-Clause
false
de469b29036fb6963631e370b593ae75
\n\n
.venv\Lib\site-packages\pip\_internal\distributions\__pycache__\wheel.cpython-313.pyc
wheel.cpython-313.pyc
Other
2,345
0.85
0
0
node-utils
825
2023-12-20T03:53:07.679208
MIT
false
3b2d98c8438fbbd908782bd230b2c17f
\n\n
.venv\Lib\site-packages\pip\_internal\distributions\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
965
0.95
0.25
0
react-lib
613
2023-09-27T11:41:01.126247
Apache-2.0
false
5da426d8f5554ad600541ad5a92a7c33
"""\nThe main purpose of this module is to expose LinkCollector.collect_sources().\n"""\n\nimport collections\nimport email.message\nimport functools\nimport itertools\nimport json\nimport logging\nimport os\nimport urllib.parse\nimport urllib.request\nfrom dataclasses import dataclass\nfrom html.parser import HTMLParser\nfrom optparse import Values\nfrom typing import (\n Callable,\n Dict,\n Iterable,\n List,\n MutableMapping,\n NamedTuple,\n Optional,\n Protocol,\n Sequence,\n Tuple,\n Union,\n)\n\nfrom pip._vendor import requests\nfrom pip._vendor.requests import Response\nfrom pip._vendor.requests.exceptions import RetryError, SSLError\n\nfrom pip._internal.exceptions import NetworkConnectionError\nfrom pip._internal.models.link import Link\nfrom pip._internal.models.search_scope import SearchScope\nfrom pip._internal.network.session import PipSession\nfrom pip._internal.network.utils import raise_for_status\nfrom pip._internal.utils.filetypes import is_archive_file\nfrom pip._internal.utils.misc import redact_auth_from_url\nfrom pip._internal.vcs import vcs\n\nfrom .sources import CandidatesFromPage, LinkSource, build_source\n\nlogger = logging.getLogger(__name__)\n\nResponseHeaders = MutableMapping[str, str]\n\n\ndef _match_vcs_scheme(url: str) -> Optional[str]:\n """Look for VCS schemes in the URL.\n\n Returns the matched VCS scheme, or None if there's no match.\n """\n for scheme in vcs.schemes:\n if url.lower().startswith(scheme) and url[len(scheme)] in "+:":\n return scheme\n return None\n\n\nclass _NotAPIContent(Exception):\n def __init__(self, content_type: str, request_desc: str) -> None:\n super().__init__(content_type, request_desc)\n self.content_type = content_type\n self.request_desc = request_desc\n\n\ndef _ensure_api_header(response: Response) -> None:\n """\n Check the Content-Type header to ensure the response contains a Simple\n API Response.\n\n Raises `_NotAPIContent` if the content type is not a valid content-type.\n """\n content_type = response.headers.get("Content-Type", "Unknown")\n\n content_type_l = content_type.lower()\n if content_type_l.startswith(\n (\n "text/html",\n "application/vnd.pypi.simple.v1+html",\n "application/vnd.pypi.simple.v1+json",\n )\n ):\n return\n\n raise _NotAPIContent(content_type, response.request.method)\n\n\nclass _NotHTTP(Exception):\n pass\n\n\ndef _ensure_api_response(url: str, session: PipSession) -> None:\n """\n Send a HEAD request to the URL, and ensure the response contains a simple\n API Response.\n\n Raises `_NotHTTP` if the URL is not available for a HEAD request, or\n `_NotAPIContent` if the content type is not a valid content type.\n """\n scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url)\n if scheme not in {"http", "https"}:\n raise _NotHTTP()\n\n resp = session.head(url, allow_redirects=True)\n raise_for_status(resp)\n\n _ensure_api_header(resp)\n\n\ndef _get_simple_response(url: str, session: PipSession) -> Response:\n """Access an Simple API response with GET, and return the response.\n\n This consists of three parts:\n\n 1. If the URL looks suspiciously like an archive, send a HEAD first to\n check the Content-Type is HTML or Simple API, to avoid downloading a\n large file. Raise `_NotHTTP` if the content type cannot be determined, or\n `_NotAPIContent` if it is not HTML or a Simple API.\n 2. Actually perform the request. Raise HTTP exceptions on network failures.\n 3. Check the Content-Type header to make sure we got a Simple API response,\n and raise `_NotAPIContent` otherwise.\n """\n if is_archive_file(Link(url).filename):\n _ensure_api_response(url, session=session)\n\n logger.debug("Getting page %s", redact_auth_from_url(url))\n\n resp = session.get(\n url,\n headers={\n "Accept": ", ".join(\n [\n "application/vnd.pypi.simple.v1+json",\n "application/vnd.pypi.simple.v1+html; q=0.1",\n "text/html; q=0.01",\n ]\n ),\n # We don't want to blindly returned cached data for\n # /simple/, because authors generally expecting that\n # twine upload && pip install will function, but if\n # they've done a pip install in the last ~10 minutes\n # it won't. Thus by setting this to zero we will not\n # blindly use any cached data, however the benefit of\n # using max-age=0 instead of no-cache, is that we will\n # still support conditional requests, so we will still\n # minimize traffic sent in cases where the page hasn't\n # changed at all, we will just always incur the round\n # trip for the conditional GET now instead of only\n # once per 10 minutes.\n # For more information, please see pypa/pip#5670.\n "Cache-Control": "max-age=0",\n },\n )\n raise_for_status(resp)\n\n # The check for archives above only works if the url ends with\n # something that looks like an archive. However that is not a\n # requirement of an url. Unless we issue a HEAD request on every\n # url we cannot know ahead of time for sure if something is a\n # Simple API response or not. However we can check after we've\n # downloaded it.\n _ensure_api_header(resp)\n\n logger.debug(\n "Fetched page %s as %s",\n redact_auth_from_url(url),\n resp.headers.get("Content-Type", "Unknown"),\n )\n\n return resp\n\n\ndef _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]:\n """Determine if we have any encoding information in our headers."""\n if headers and "Content-Type" in headers:\n m = email.message.Message()\n m["content-type"] = headers["Content-Type"]\n charset = m.get_param("charset")\n if charset:\n return str(charset)\n return None\n\n\nclass CacheablePageContent:\n def __init__(self, page: "IndexContent") -> None:\n assert page.cache_link_parsing\n self.page = page\n\n def __eq__(self, other: object) -> bool:\n return isinstance(other, type(self)) and self.page.url == other.page.url\n\n def __hash__(self) -> int:\n return hash(self.page.url)\n\n\nclass ParseLinks(Protocol):\n def __call__(self, page: "IndexContent") -> Iterable[Link]: ...\n\n\ndef with_cached_index_content(fn: ParseLinks) -> ParseLinks:\n """\n Given a function that parses an Iterable[Link] from an IndexContent, cache the\n function's result (keyed by CacheablePageContent), unless the IndexContent\n `page` has `page.cache_link_parsing == False`.\n """\n\n @functools.lru_cache(maxsize=None)\n def wrapper(cacheable_page: CacheablePageContent) -> List[Link]:\n return list(fn(cacheable_page.page))\n\n @functools.wraps(fn)\n def wrapper_wrapper(page: "IndexContent") -> List[Link]:\n if page.cache_link_parsing:\n return wrapper(CacheablePageContent(page))\n return list(fn(page))\n\n return wrapper_wrapper\n\n\n@with_cached_index_content\ndef parse_links(page: "IndexContent") -> Iterable[Link]:\n """\n Parse a Simple API's Index Content, and yield its anchor elements as Link objects.\n """\n\n content_type_l = page.content_type.lower()\n if content_type_l.startswith("application/vnd.pypi.simple.v1+json"):\n data = json.loads(page.content)\n for file in data.get("files", []):\n link = Link.from_json(file, page.url)\n if link is None:\n continue\n yield link\n return\n\n parser = HTMLLinkParser(page.url)\n encoding = page.encoding or "utf-8"\n parser.feed(page.content.decode(encoding))\n\n url = page.url\n base_url = parser.base_url or url\n for anchor in parser.anchors:\n link = Link.from_element(anchor, page_url=url, base_url=base_url)\n if link is None:\n continue\n yield link\n\n\n@dataclass(frozen=True)\nclass IndexContent:\n """Represents one response (or page), along with its URL.\n\n :param encoding: the encoding to decode the given content.\n :param url: the URL from which the HTML was downloaded.\n :param cache_link_parsing: whether links parsed from this page's url\n should be cached. PyPI index urls should\n have this set to False, for example.\n """\n\n content: bytes\n content_type: str\n encoding: Optional[str]\n url: str\n cache_link_parsing: bool = True\n\n def __str__(self) -> str:\n return redact_auth_from_url(self.url)\n\n\nclass HTMLLinkParser(HTMLParser):\n """\n HTMLParser that keeps the first base HREF and a list of all anchor\n elements' attributes.\n """\n\n def __init__(self, url: str) -> None:\n super().__init__(convert_charrefs=True)\n\n self.url: str = url\n self.base_url: Optional[str] = None\n self.anchors: List[Dict[str, Optional[str]]] = []\n\n def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None:\n if tag == "base" and self.base_url is None:\n href = self.get_href(attrs)\n if href is not None:\n self.base_url = href\n elif tag == "a":\n self.anchors.append(dict(attrs))\n\n def get_href(self, attrs: List[Tuple[str, Optional[str]]]) -> Optional[str]:\n for name, value in attrs:\n if name == "href":\n return value\n return None\n\n\ndef _handle_get_simple_fail(\n link: Link,\n reason: Union[str, Exception],\n meth: Optional[Callable[..., None]] = None,\n) -> None:\n if meth is None:\n meth = logger.debug\n meth("Could not fetch URL %s: %s - skipping", link, reason)\n\n\ndef _make_index_content(\n response: Response, cache_link_parsing: bool = True\n) -> IndexContent:\n encoding = _get_encoding_from_headers(response.headers)\n return IndexContent(\n response.content,\n response.headers["Content-Type"],\n encoding=encoding,\n url=response.url,\n cache_link_parsing=cache_link_parsing,\n )\n\n\ndef _get_index_content(link: Link, *, session: PipSession) -> Optional["IndexContent"]:\n url = link.url.split("#", 1)[0]\n\n # Check for VCS schemes that do not support lookup as web pages.\n vcs_scheme = _match_vcs_scheme(url)\n if vcs_scheme:\n logger.warning(\n "Cannot look at %s URL %s because it does not support lookup as web pages.",\n vcs_scheme,\n link,\n )\n return None\n\n # Tack index.html onto file:// URLs that point to directories\n scheme, _, path, _, _, _ = urllib.parse.urlparse(url)\n if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)):\n # add trailing slash if not present so urljoin doesn't trim\n # final segment\n if not url.endswith("/"):\n url += "/"\n # TODO: In the future, it would be nice if pip supported PEP 691\n # style responses in the file:// URLs, however there's no\n # standard file extension for application/vnd.pypi.simple.v1+json\n # so we'll need to come up with something on our own.\n url = urllib.parse.urljoin(url, "index.html")\n logger.debug(" file: URL is directory, getting %s", url)\n\n try:\n resp = _get_simple_response(url, session=session)\n except _NotHTTP:\n logger.warning(\n "Skipping page %s because it looks like an archive, and cannot "\n "be checked by a HTTP HEAD request.",\n link,\n )\n except _NotAPIContent as exc:\n logger.warning(\n "Skipping page %s because the %s request got Content-Type: %s. "\n "The only supported Content-Types are application/vnd.pypi.simple.v1+json, "\n "application/vnd.pypi.simple.v1+html, and text/html",\n link,\n exc.request_desc,\n exc.content_type,\n )\n except NetworkConnectionError as exc:\n _handle_get_simple_fail(link, exc)\n except RetryError as exc:\n _handle_get_simple_fail(link, exc)\n except SSLError as exc:\n reason = "There was a problem confirming the ssl certificate: "\n reason += str(exc)\n _handle_get_simple_fail(link, reason, meth=logger.info)\n except requests.ConnectionError as exc:\n _handle_get_simple_fail(link, f"connection error: {exc}")\n except requests.Timeout:\n _handle_get_simple_fail(link, "timed out")\n else:\n return _make_index_content(resp, cache_link_parsing=link.cache_link_parsing)\n return None\n\n\nclass CollectedSources(NamedTuple):\n find_links: Sequence[Optional[LinkSource]]\n index_urls: Sequence[Optional[LinkSource]]\n\n\nclass LinkCollector:\n """\n Responsible for collecting Link objects from all configured locations,\n making network requests as needed.\n\n The class's main method is its collect_sources() method.\n """\n\n def __init__(\n self,\n session: PipSession,\n search_scope: SearchScope,\n ) -> None:\n self.search_scope = search_scope\n self.session = session\n\n @classmethod\n def create(\n cls,\n session: PipSession,\n options: Values,\n suppress_no_index: bool = False,\n ) -> "LinkCollector":\n """\n :param session: The Session to use to make requests.\n :param suppress_no_index: Whether to ignore the --no-index option\n when constructing the SearchScope object.\n """\n index_urls = [options.index_url] + options.extra_index_urls\n if options.no_index and not suppress_no_index:\n logger.debug(\n "Ignoring indexes: %s",\n ",".join(redact_auth_from_url(url) for url in index_urls),\n )\n index_urls = []\n\n # Make sure find_links is a list before passing to create().\n find_links = options.find_links or []\n\n search_scope = SearchScope.create(\n find_links=find_links,\n index_urls=index_urls,\n no_index=options.no_index,\n )\n link_collector = LinkCollector(\n session=session,\n search_scope=search_scope,\n )\n return link_collector\n\n @property\n def find_links(self) -> List[str]:\n return self.search_scope.find_links\n\n def fetch_response(self, location: Link) -> Optional[IndexContent]:\n """\n Fetch an HTML page containing package links.\n """\n return _get_index_content(location, session=self.session)\n\n def collect_sources(\n self,\n project_name: str,\n candidates_from_page: CandidatesFromPage,\n ) -> CollectedSources:\n # The OrderedDict calls deduplicate sources by URL.\n index_url_sources = collections.OrderedDict(\n build_source(\n loc,\n candidates_from_page=candidates_from_page,\n page_validator=self.session.is_secure_origin,\n expand_dir=False,\n cache_link_parsing=False,\n project_name=project_name,\n )\n for loc in self.search_scope.get_index_urls_locations(project_name)\n ).values()\n find_links_sources = collections.OrderedDict(\n build_source(\n loc,\n candidates_from_page=candidates_from_page,\n page_validator=self.session.is_secure_origin,\n expand_dir=True,\n cache_link_parsing=True,\n project_name=project_name,\n )\n for loc in self.find_links\n ).values()\n\n if logger.isEnabledFor(logging.DEBUG):\n lines = [\n f"* {s.link}"\n for s in itertools.chain(find_links_sources, index_url_sources)\n if s is not None and s.link is not None\n ]\n lines = [\n f"{len(lines)} location(s) to search "\n f"for versions of {project_name}:"\n ] + lines\n logger.debug("\n".join(lines))\n\n return CollectedSources(\n find_links=list(find_links_sources),\n index_urls=list(index_url_sources),\n )\n
.venv\Lib\site-packages\pip\_internal\index\collector.py
collector.py
Python
16,265
0.95
0.182186
0.070732
python-kit
339
2025-03-04T07:59:16.728548
BSD-3-Clause
false
45293a6b89a0943c30b6191584f99c04
"""Routines related to PyPI, indexes"""\n\nimport enum\nimport functools\nimport itertools\nimport logging\nimport re\nfrom dataclasses import dataclass\nfrom typing import (\n TYPE_CHECKING,\n Dict,\n FrozenSet,\n Iterable,\n List,\n Optional,\n Set,\n Tuple,\n Union,\n)\n\nfrom pip._vendor.packaging import specifiers\nfrom pip._vendor.packaging.tags import Tag\nfrom pip._vendor.packaging.utils import canonicalize_name\nfrom pip._vendor.packaging.version import InvalidVersion, _BaseVersion\nfrom pip._vendor.packaging.version import parse as parse_version\n\nfrom pip._internal.exceptions import (\n BestVersionAlreadyInstalled,\n DistributionNotFound,\n InvalidWheelFilename,\n UnsupportedWheel,\n)\nfrom pip._internal.index.collector import LinkCollector, parse_links\nfrom pip._internal.models.candidate import InstallationCandidate\nfrom pip._internal.models.format_control import FormatControl\nfrom pip._internal.models.link import Link\nfrom pip._internal.models.search_scope import SearchScope\nfrom pip._internal.models.selection_prefs import SelectionPreferences\nfrom pip._internal.models.target_python import TargetPython\nfrom pip._internal.models.wheel import Wheel\nfrom pip._internal.req import InstallRequirement\nfrom pip._internal.utils._log import getLogger\nfrom pip._internal.utils.filetypes import WHEEL_EXTENSION\nfrom pip._internal.utils.hashes import Hashes\nfrom pip._internal.utils.logging import indent_log\nfrom pip._internal.utils.misc import build_netloc\nfrom pip._internal.utils.packaging import check_requires_python\nfrom pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS\n\nif TYPE_CHECKING:\n from pip._vendor.typing_extensions import TypeGuard\n\n__all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"]\n\n\nlogger = getLogger(__name__)\n\nBuildTag = Union[Tuple[()], Tuple[int, str]]\nCandidateSortingKey = Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag]\n\n\ndef _check_link_requires_python(\n link: Link,\n version_info: Tuple[int, int, int],\n ignore_requires_python: bool = False,\n) -> bool:\n """\n Return whether the given Python version is compatible with a link's\n "Requires-Python" value.\n\n :param version_info: A 3-tuple of ints representing the Python\n major-minor-micro version to check.\n :param ignore_requires_python: Whether to ignore the "Requires-Python"\n value if the given Python version isn't compatible.\n """\n try:\n is_compatible = check_requires_python(\n link.requires_python,\n version_info=version_info,\n )\n except specifiers.InvalidSpecifier:\n logger.debug(\n "Ignoring invalid Requires-Python (%r) for link: %s",\n link.requires_python,\n link,\n )\n else:\n if not is_compatible:\n version = ".".join(map(str, version_info))\n if not ignore_requires_python:\n logger.verbose(\n "Link requires a different Python (%s not in: %r): %s",\n version,\n link.requires_python,\n link,\n )\n return False\n\n logger.debug(\n "Ignoring failed Requires-Python check (%s not in: %r) for link: %s",\n version,\n link.requires_python,\n link,\n )\n\n return True\n\n\nclass LinkType(enum.Enum):\n candidate = enum.auto()\n different_project = enum.auto()\n yanked = enum.auto()\n format_unsupported = enum.auto()\n format_invalid = enum.auto()\n platform_mismatch = enum.auto()\n requires_python_mismatch = enum.auto()\n\n\nclass LinkEvaluator:\n """\n Responsible for evaluating links for a particular project.\n """\n\n _py_version_re = re.compile(r"-py([123]\.?[0-9]?)$")\n\n # Don't include an allow_yanked default value to make sure each call\n # site considers whether yanked releases are allowed. This also causes\n # that decision to be made explicit in the calling code, which helps\n # people when reading the code.\n def __init__(\n self,\n project_name: str,\n canonical_name: str,\n formats: FrozenSet[str],\n target_python: TargetPython,\n allow_yanked: bool,\n ignore_requires_python: Optional[bool] = None,\n ) -> None:\n """\n :param project_name: The user supplied package name.\n :param canonical_name: The canonical package name.\n :param formats: The formats allowed for this package. Should be a set\n with 'binary' or 'source' or both in it.\n :param target_python: The target Python interpreter to use when\n evaluating link compatibility. This is used, for example, to\n check wheel compatibility, as well as when checking the Python\n version, e.g. the Python version embedded in a link filename\n (or egg fragment) and against an HTML link's optional PEP 503\n "data-requires-python" attribute.\n :param allow_yanked: Whether files marked as yanked (in the sense\n of PEP 592) are permitted to be candidates for install.\n :param ignore_requires_python: Whether to ignore incompatible\n PEP 503 "data-requires-python" values in HTML links. Defaults\n to False.\n """\n if ignore_requires_python is None:\n ignore_requires_python = False\n\n self._allow_yanked = allow_yanked\n self._canonical_name = canonical_name\n self._ignore_requires_python = ignore_requires_python\n self._formats = formats\n self._target_python = target_python\n\n self.project_name = project_name\n\n def evaluate_link(self, link: Link) -> Tuple[LinkType, str]:\n """\n Determine whether a link is a candidate for installation.\n\n :return: A tuple (result, detail), where *result* is an enum\n representing whether the evaluation found a candidate, or the reason\n why one is not found. If a candidate is found, *detail* will be the\n candidate's version string; if one is not found, it contains the\n reason the link fails to qualify.\n """\n version = None\n if link.is_yanked and not self._allow_yanked:\n reason = link.yanked_reason or "<none given>"\n return (LinkType.yanked, f"yanked for reason: {reason}")\n\n if link.egg_fragment:\n egg_info = link.egg_fragment\n ext = link.ext\n else:\n egg_info, ext = link.splitext()\n if not ext:\n return (LinkType.format_unsupported, "not a file")\n if ext not in SUPPORTED_EXTENSIONS:\n return (\n LinkType.format_unsupported,\n f"unsupported archive format: {ext}",\n )\n if "binary" not in self._formats and ext == WHEEL_EXTENSION:\n reason = f"No binaries permitted for {self.project_name}"\n return (LinkType.format_unsupported, reason)\n if "macosx10" in link.path and ext == ".zip":\n return (LinkType.format_unsupported, "macosx10 one")\n if ext == WHEEL_EXTENSION:\n try:\n wheel = Wheel(link.filename)\n except InvalidWheelFilename:\n return (\n LinkType.format_invalid,\n "invalid wheel filename",\n )\n if canonicalize_name(wheel.name) != self._canonical_name:\n reason = f"wrong project name (not {self.project_name})"\n return (LinkType.different_project, reason)\n\n supported_tags = self._target_python.get_unsorted_tags()\n if not wheel.supported(supported_tags):\n # Include the wheel's tags in the reason string to\n # simplify troubleshooting compatibility issues.\n file_tags = ", ".join(wheel.get_formatted_file_tags())\n reason = (\n f"none of the wheel's tags ({file_tags}) are compatible "\n f"(run pip debug --verbose to show compatible tags)"\n )\n return (LinkType.platform_mismatch, reason)\n\n version = wheel.version\n\n # This should be up by the self.ok_binary check, but see issue 2700.\n if "source" not in self._formats and ext != WHEEL_EXTENSION:\n reason = f"No sources permitted for {self.project_name}"\n return (LinkType.format_unsupported, reason)\n\n if not version:\n version = _extract_version_from_fragment(\n egg_info,\n self._canonical_name,\n )\n if not version:\n reason = f"Missing project version for {self.project_name}"\n return (LinkType.format_invalid, reason)\n\n match = self._py_version_re.search(version)\n if match:\n version = version[: match.start()]\n py_version = match.group(1)\n if py_version != self._target_python.py_version:\n return (\n LinkType.platform_mismatch,\n "Python version is incorrect",\n )\n\n supports_python = _check_link_requires_python(\n link,\n version_info=self._target_python.py_version_info,\n ignore_requires_python=self._ignore_requires_python,\n )\n if not supports_python:\n reason = f"{version} Requires-Python {link.requires_python}"\n return (LinkType.requires_python_mismatch, reason)\n\n logger.debug("Found link %s, version: %s", link, version)\n\n return (LinkType.candidate, version)\n\n\ndef filter_unallowed_hashes(\n candidates: List[InstallationCandidate],\n hashes: Optional[Hashes],\n project_name: str,\n) -> List[InstallationCandidate]:\n """\n Filter out candidates whose hashes aren't allowed, and return a new\n list of candidates.\n\n If at least one candidate has an allowed hash, then all candidates with\n either an allowed hash or no hash specified are returned. Otherwise,\n the given candidates are returned.\n\n Including the candidates with no hash specified when there is a match\n allows a warning to be logged if there is a more preferred candidate\n with no hash specified. Returning all candidates in the case of no\n matches lets pip report the hash of the candidate that would otherwise\n have been installed (e.g. permitting the user to more easily update\n their requirements file with the desired hash).\n """\n if not hashes:\n logger.debug(\n "Given no hashes to check %s links for project %r: "\n "discarding no candidates",\n len(candidates),\n project_name,\n )\n # Make sure we're not returning back the given value.\n return list(candidates)\n\n matches_or_no_digest = []\n # Collect the non-matches for logging purposes.\n non_matches = []\n match_count = 0\n for candidate in candidates:\n link = candidate.link\n if not link.has_hash:\n pass\n elif link.is_hash_allowed(hashes=hashes):\n match_count += 1\n else:\n non_matches.append(candidate)\n continue\n\n matches_or_no_digest.append(candidate)\n\n if match_count:\n filtered = matches_or_no_digest\n else:\n # Make sure we're not returning back the given value.\n filtered = list(candidates)\n\n if len(filtered) == len(candidates):\n discard_message = "discarding no candidates"\n else:\n discard_message = "discarding {} non-matches:\n {}".format(\n len(non_matches),\n "\n ".join(str(candidate.link) for candidate in non_matches),\n )\n\n logger.debug(\n "Checked %s links for project %r against %s hashes "\n "(%s matches, %s no digest): %s",\n len(candidates),\n project_name,\n hashes.digest_count,\n match_count,\n len(matches_or_no_digest) - match_count,\n discard_message,\n )\n\n return filtered\n\n\n@dataclass\nclass CandidatePreferences:\n """\n Encapsulates some of the preferences for filtering and sorting\n InstallationCandidate objects.\n """\n\n prefer_binary: bool = False\n allow_all_prereleases: bool = False\n\n\n@dataclass(frozen=True)\nclass BestCandidateResult:\n """A collection of candidates, returned by `PackageFinder.find_best_candidate`.\n\n This class is only intended to be instantiated by CandidateEvaluator's\n `compute_best_candidate()` method.\n\n :param all_candidates: A sequence of all available candidates found.\n :param applicable_candidates: The applicable candidates.\n :param best_candidate: The most preferred candidate found, or None\n if no applicable candidates were found.\n """\n\n all_candidates: List[InstallationCandidate]\n applicable_candidates: List[InstallationCandidate]\n best_candidate: Optional[InstallationCandidate]\n\n def __post_init__(self) -> None:\n assert set(self.applicable_candidates) <= set(self.all_candidates)\n\n if self.best_candidate is None:\n assert not self.applicable_candidates\n else:\n assert self.best_candidate in self.applicable_candidates\n\n\nclass CandidateEvaluator:\n """\n Responsible for filtering and sorting candidates for installation based\n on what tags are valid.\n """\n\n @classmethod\n def create(\n cls,\n project_name: str,\n target_python: Optional[TargetPython] = None,\n prefer_binary: bool = False,\n allow_all_prereleases: bool = False,\n specifier: Optional[specifiers.BaseSpecifier] = None,\n hashes: Optional[Hashes] = None,\n ) -> "CandidateEvaluator":\n """Create a CandidateEvaluator object.\n\n :param target_python: The target Python interpreter to use when\n checking compatibility. If None (the default), a TargetPython\n object will be constructed from the running Python.\n :param specifier: An optional object implementing `filter`\n (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable\n versions.\n :param hashes: An optional collection of allowed hashes.\n """\n if target_python is None:\n target_python = TargetPython()\n if specifier is None:\n specifier = specifiers.SpecifierSet()\n\n supported_tags = target_python.get_sorted_tags()\n\n return cls(\n project_name=project_name,\n supported_tags=supported_tags,\n specifier=specifier,\n prefer_binary=prefer_binary,\n allow_all_prereleases=allow_all_prereleases,\n hashes=hashes,\n )\n\n def __init__(\n self,\n project_name: str,\n supported_tags: List[Tag],\n specifier: specifiers.BaseSpecifier,\n prefer_binary: bool = False,\n allow_all_prereleases: bool = False,\n hashes: Optional[Hashes] = None,\n ) -> None:\n """\n :param supported_tags: The PEP 425 tags supported by the target\n Python in order of preference (most preferred first).\n """\n self._allow_all_prereleases = allow_all_prereleases\n self._hashes = hashes\n self._prefer_binary = prefer_binary\n self._project_name = project_name\n self._specifier = specifier\n self._supported_tags = supported_tags\n # Since the index of the tag in the _supported_tags list is used\n # as a priority, precompute a map from tag to index/priority to be\n # used in wheel.find_most_preferred_tag.\n self._wheel_tag_preferences = {\n tag: idx for idx, tag in enumerate(supported_tags)\n }\n\n def get_applicable_candidates(\n self,\n candidates: List[InstallationCandidate],\n ) -> List[InstallationCandidate]:\n """\n Return the applicable candidates from a list of candidates.\n """\n # Using None infers from the specifier instead.\n allow_prereleases = self._allow_all_prereleases or None\n specifier = self._specifier\n\n # We turn the version object into a str here because otherwise\n # when we're debundled but setuptools isn't, Python will see\n # packaging.version.Version and\n # pkg_resources._vendor.packaging.version.Version as different\n # types. This way we'll use a str as a common data interchange\n # format. If we stop using the pkg_resources provided specifier\n # and start using our own, we can drop the cast to str().\n candidates_and_versions = [(c, str(c.version)) for c in candidates]\n versions = set(\n specifier.filter(\n (v for _, v in candidates_and_versions),\n prereleases=allow_prereleases,\n )\n )\n\n applicable_candidates = [c for c, v in candidates_and_versions if v in versions]\n filtered_applicable_candidates = filter_unallowed_hashes(\n candidates=applicable_candidates,\n hashes=self._hashes,\n project_name=self._project_name,\n )\n\n return sorted(filtered_applicable_candidates, key=self._sort_key)\n\n def _sort_key(self, candidate: InstallationCandidate) -> CandidateSortingKey:\n """\n Function to pass as the `key` argument to a call to sorted() to sort\n InstallationCandidates by preference.\n\n Returns a tuple such that tuples sorting as greater using Python's\n default comparison operator are more preferred.\n\n The preference is as follows:\n\n First and foremost, candidates with allowed (matching) hashes are\n always preferred over candidates without matching hashes. This is\n because e.g. if the only candidate with an allowed hash is yanked,\n we still want to use that candidate.\n\n Second, excepting hash considerations, candidates that have been\n yanked (in the sense of PEP 592) are always less preferred than\n candidates that haven't been yanked. Then:\n\n If not finding wheels, they are sorted by version only.\n If finding wheels, then the sort order is by version, then:\n 1. existing installs\n 2. wheels ordered via Wheel.support_index_min(self._supported_tags)\n 3. source archives\n If prefer_binary was set, then all wheels are sorted above sources.\n\n Note: it was considered to embed this logic into the Link\n comparison operators, but then different sdist links\n with the same version, would have to be considered equal\n """\n valid_tags = self._supported_tags\n support_num = len(valid_tags)\n build_tag: BuildTag = ()\n binary_preference = 0\n link = candidate.link\n if link.is_wheel:\n # can raise InvalidWheelFilename\n wheel = Wheel(link.filename)\n try:\n pri = -(\n wheel.find_most_preferred_tag(\n valid_tags, self._wheel_tag_preferences\n )\n )\n except ValueError:\n raise UnsupportedWheel(\n f"{wheel.filename} is not a supported wheel for this platform. It "\n "can't be sorted."\n )\n if self._prefer_binary:\n binary_preference = 1\n build_tag = wheel.build_tag\n else: # sdist\n pri = -(support_num)\n has_allowed_hash = int(link.is_hash_allowed(self._hashes))\n yank_value = -1 * int(link.is_yanked) # -1 for yanked.\n return (\n has_allowed_hash,\n yank_value,\n binary_preference,\n candidate.version,\n pri,\n build_tag,\n )\n\n def sort_best_candidate(\n self,\n candidates: List[InstallationCandidate],\n ) -> Optional[InstallationCandidate]:\n """\n Return the best candidate per the instance's sort order, or None if\n no candidate is acceptable.\n """\n if not candidates:\n return None\n best_candidate = max(candidates, key=self._sort_key)\n return best_candidate\n\n def compute_best_candidate(\n self,\n candidates: List[InstallationCandidate],\n ) -> BestCandidateResult:\n """\n Compute and return a `BestCandidateResult` instance.\n """\n applicable_candidates = self.get_applicable_candidates(candidates)\n\n best_candidate = self.sort_best_candidate(applicable_candidates)\n\n return BestCandidateResult(\n candidates,\n applicable_candidates=applicable_candidates,\n best_candidate=best_candidate,\n )\n\n\nclass PackageFinder:\n """This finds packages.\n\n This is meant to match easy_install's technique for looking for\n packages, by reading pages and looking for appropriate links.\n """\n\n def __init__(\n self,\n link_collector: LinkCollector,\n target_python: TargetPython,\n allow_yanked: bool,\n format_control: Optional[FormatControl] = None,\n candidate_prefs: Optional[CandidatePreferences] = None,\n ignore_requires_python: Optional[bool] = None,\n ) -> None:\n """\n This constructor is primarily meant to be used by the create() class\n method and from tests.\n\n :param format_control: A FormatControl object, used to control\n the selection of source packages / binary packages when consulting\n the index and links.\n :param candidate_prefs: Options to use when creating a\n CandidateEvaluator object.\n """\n if candidate_prefs is None:\n candidate_prefs = CandidatePreferences()\n\n format_control = format_control or FormatControl(set(), set())\n\n self._allow_yanked = allow_yanked\n self._candidate_prefs = candidate_prefs\n self._ignore_requires_python = ignore_requires_python\n self._link_collector = link_collector\n self._target_python = target_python\n\n self.format_control = format_control\n\n # These are boring links that have already been logged somehow.\n self._logged_links: Set[Tuple[Link, LinkType, str]] = set()\n\n # Cache of the result of finding candidates\n self._all_candidates: Dict[str, List[InstallationCandidate]] = {}\n self._best_candidates: Dict[\n Tuple[str, Optional[specifiers.BaseSpecifier], Optional[Hashes]],\n BestCandidateResult,\n ] = {}\n\n # Don't include an allow_yanked default value to make sure each call\n # site considers whether yanked releases are allowed. This also causes\n # that decision to be made explicit in the calling code, which helps\n # people when reading the code.\n @classmethod\n def create(\n cls,\n link_collector: LinkCollector,\n selection_prefs: SelectionPreferences,\n target_python: Optional[TargetPython] = None,\n ) -> "PackageFinder":\n """Create a PackageFinder.\n\n :param selection_prefs: The candidate selection preferences, as a\n SelectionPreferences object.\n :param target_python: The target Python interpreter to use when\n checking compatibility. If None (the default), a TargetPython\n object will be constructed from the running Python.\n """\n if target_python is None:\n target_python = TargetPython()\n\n candidate_prefs = CandidatePreferences(\n prefer_binary=selection_prefs.prefer_binary,\n allow_all_prereleases=selection_prefs.allow_all_prereleases,\n )\n\n return cls(\n candidate_prefs=candidate_prefs,\n link_collector=link_collector,\n target_python=target_python,\n allow_yanked=selection_prefs.allow_yanked,\n format_control=selection_prefs.format_control,\n ignore_requires_python=selection_prefs.ignore_requires_python,\n )\n\n @property\n def target_python(self) -> TargetPython:\n return self._target_python\n\n @property\n def search_scope(self) -> SearchScope:\n return self._link_collector.search_scope\n\n @search_scope.setter\n def search_scope(self, search_scope: SearchScope) -> None:\n self._link_collector.search_scope = search_scope\n\n @property\n def find_links(self) -> List[str]:\n return self._link_collector.find_links\n\n @property\n def index_urls(self) -> List[str]:\n return self.search_scope.index_urls\n\n @property\n def proxy(self) -> Optional[str]:\n return self._link_collector.session.pip_proxy\n\n @property\n def trusted_hosts(self) -> Iterable[str]:\n for host_port in self._link_collector.session.pip_trusted_origins:\n yield build_netloc(*host_port)\n\n @property\n def custom_cert(self) -> Optional[str]:\n # session.verify is either a boolean (use default bundle/no SSL\n # verification) or a string path to a custom CA bundle to use. We only\n # care about the latter.\n verify = self._link_collector.session.verify\n return verify if isinstance(verify, str) else None\n\n @property\n def client_cert(self) -> Optional[str]:\n cert = self._link_collector.session.cert\n assert not isinstance(cert, tuple), "pip only supports PEM client certs"\n return cert\n\n @property\n def allow_all_prereleases(self) -> bool:\n return self._candidate_prefs.allow_all_prereleases\n\n def set_allow_all_prereleases(self) -> None:\n self._candidate_prefs.allow_all_prereleases = True\n\n @property\n def prefer_binary(self) -> bool:\n return self._candidate_prefs.prefer_binary\n\n def set_prefer_binary(self) -> None:\n self._candidate_prefs.prefer_binary = True\n\n def requires_python_skipped_reasons(self) -> List[str]:\n reasons = {\n detail\n for _, result, detail in self._logged_links\n if result == LinkType.requires_python_mismatch\n }\n return sorted(reasons)\n\n def make_link_evaluator(self, project_name: str) -> LinkEvaluator:\n canonical_name = canonicalize_name(project_name)\n formats = self.format_control.get_allowed_formats(canonical_name)\n\n return LinkEvaluator(\n project_name=project_name,\n canonical_name=canonical_name,\n formats=formats,\n target_python=self._target_python,\n allow_yanked=self._allow_yanked,\n ignore_requires_python=self._ignore_requires_python,\n )\n\n def _sort_links(self, links: Iterable[Link]) -> List[Link]:\n """\n Returns elements of links in order, non-egg links first, egg links\n second, while eliminating duplicates\n """\n eggs, no_eggs = [], []\n seen: Set[Link] = set()\n for link in links:\n if link not in seen:\n seen.add(link)\n if link.egg_fragment:\n eggs.append(link)\n else:\n no_eggs.append(link)\n return no_eggs + eggs\n\n def _log_skipped_link(self, link: Link, result: LinkType, detail: str) -> None:\n entry = (link, result, detail)\n if entry not in self._logged_links:\n # Put the link at the end so the reason is more visible and because\n # the link string is usually very long.\n logger.debug("Skipping link: %s: %s", detail, link)\n self._logged_links.add(entry)\n\n def get_install_candidate(\n self, link_evaluator: LinkEvaluator, link: Link\n ) -> Optional[InstallationCandidate]:\n """\n If the link is a candidate for install, convert it to an\n InstallationCandidate and return it. Otherwise, return None.\n """\n result, detail = link_evaluator.evaluate_link(link)\n if result != LinkType.candidate:\n self._log_skipped_link(link, result, detail)\n return None\n\n try:\n return InstallationCandidate(\n name=link_evaluator.project_name,\n link=link,\n version=detail,\n )\n except InvalidVersion:\n return None\n\n def evaluate_links(\n self, link_evaluator: LinkEvaluator, links: Iterable[Link]\n ) -> List[InstallationCandidate]:\n """\n Convert links that are candidates to InstallationCandidate objects.\n """\n candidates = []\n for link in self._sort_links(links):\n candidate = self.get_install_candidate(link_evaluator, link)\n if candidate is not None:\n candidates.append(candidate)\n\n return candidates\n\n def process_project_url(\n self, project_url: Link, link_evaluator: LinkEvaluator\n ) -> List[InstallationCandidate]:\n logger.debug(\n "Fetching project page and analyzing links: %s",\n project_url,\n )\n index_response = self._link_collector.fetch_response(project_url)\n if index_response is None:\n return []\n\n page_links = list(parse_links(index_response))\n\n with indent_log():\n package_links = self.evaluate_links(\n link_evaluator,\n links=page_links,\n )\n\n return package_links\n\n def find_all_candidates(self, project_name: str) -> List[InstallationCandidate]:\n """Find all available InstallationCandidate for project_name\n\n This checks index_urls and find_links.\n All versions found are returned as an InstallationCandidate list.\n\n See LinkEvaluator.evaluate_link() for details on which files\n are accepted.\n """\n if project_name in self._all_candidates:\n return self._all_candidates[project_name]\n\n link_evaluator = self.make_link_evaluator(project_name)\n\n collected_sources = self._link_collector.collect_sources(\n project_name=project_name,\n candidates_from_page=functools.partial(\n self.process_project_url,\n link_evaluator=link_evaluator,\n ),\n )\n\n page_candidates_it = itertools.chain.from_iterable(\n source.page_candidates()\n for sources in collected_sources\n for source in sources\n if source is not None\n )\n page_candidates = list(page_candidates_it)\n\n file_links_it = itertools.chain.from_iterable(\n source.file_links()\n for sources in collected_sources\n for source in sources\n if source is not None\n )\n file_candidates = self.evaluate_links(\n link_evaluator,\n sorted(file_links_it, reverse=True),\n )\n\n if logger.isEnabledFor(logging.DEBUG) and file_candidates:\n paths = []\n for candidate in file_candidates:\n assert candidate.link.url # we need to have a URL\n try:\n paths.append(candidate.link.file_path)\n except Exception:\n paths.append(candidate.link.url) # it's not a local file\n\n logger.debug("Local files found: %s", ", ".join(paths))\n\n # This is an intentional priority ordering\n self._all_candidates[project_name] = file_candidates + page_candidates\n\n return self._all_candidates[project_name]\n\n def make_candidate_evaluator(\n self,\n project_name: str,\n specifier: Optional[specifiers.BaseSpecifier] = None,\n hashes: Optional[Hashes] = None,\n ) -> CandidateEvaluator:\n """Create a CandidateEvaluator object to use."""\n candidate_prefs = self._candidate_prefs\n return CandidateEvaluator.create(\n project_name=project_name,\n target_python=self._target_python,\n prefer_binary=candidate_prefs.prefer_binary,\n allow_all_prereleases=candidate_prefs.allow_all_prereleases,\n specifier=specifier,\n hashes=hashes,\n )\n\n def find_best_candidate(\n self,\n project_name: str,\n specifier: Optional[specifiers.BaseSpecifier] = None,\n hashes: Optional[Hashes] = None,\n ) -> BestCandidateResult:\n """Find matches for the given project and specifier.\n\n :param specifier: An optional object implementing `filter`\n (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable\n versions.\n\n :return: A `BestCandidateResult` instance.\n """\n if (project_name, specifier, hashes) in self._best_candidates:\n return self._best_candidates[project_name, specifier, hashes]\n\n candidates = self.find_all_candidates(project_name)\n candidate_evaluator = self.make_candidate_evaluator(\n project_name=project_name,\n specifier=specifier,\n hashes=hashes,\n )\n self._best_candidates[project_name, specifier, hashes] = (\n candidate_evaluator.compute_best_candidate(candidates)\n )\n\n return self._best_candidates[project_name, specifier, hashes]\n\n def find_requirement(\n self, req: InstallRequirement, upgrade: bool\n ) -> Optional[InstallationCandidate]:\n """Try to find a Link matching req\n\n Expects req, an InstallRequirement and upgrade, a boolean\n Returns a InstallationCandidate if found,\n Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise\n """\n name = req.name\n assert name is not None, "find_requirement() called with no name"\n\n hashes = req.hashes(trust_internet=False)\n best_candidate_result = self.find_best_candidate(\n name,\n specifier=req.specifier,\n hashes=hashes,\n )\n best_candidate = best_candidate_result.best_candidate\n\n installed_version: Optional[_BaseVersion] = None\n if req.satisfied_by is not None:\n installed_version = req.satisfied_by.version\n\n def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str:\n # This repeated parse_version and str() conversion is needed to\n # handle different vendoring sources from pip and pkg_resources.\n # If we stop using the pkg_resources provided specifier and start\n # using our own, we can drop the cast to str().\n return (\n ", ".join(\n sorted(\n {str(c.version) for c in cand_iter},\n key=parse_version,\n )\n )\n or "none"\n )\n\n if installed_version is None and best_candidate is None:\n logger.critical(\n "Could not find a version that satisfies the requirement %s "\n "(from versions: %s)",\n req,\n _format_versions(best_candidate_result.all_candidates),\n )\n\n raise DistributionNotFound(f"No matching distribution found for {req}")\n\n def _should_install_candidate(\n candidate: Optional[InstallationCandidate],\n ) -> "TypeGuard[InstallationCandidate]":\n if installed_version is None:\n return True\n if best_candidate is None:\n return False\n return best_candidate.version > installed_version\n\n if not upgrade and installed_version is not None:\n if _should_install_candidate(best_candidate):\n logger.debug(\n "Existing installed version (%s) satisfies requirement "\n "(most up-to-date version is %s)",\n installed_version,\n best_candidate.version,\n )\n else:\n logger.debug(\n "Existing installed version (%s) is most up-to-date and "\n "satisfies requirement",\n installed_version,\n )\n return None\n\n if _should_install_candidate(best_candidate):\n logger.debug(\n "Using version %s (newest of versions: %s)",\n best_candidate.version,\n _format_versions(best_candidate_result.applicable_candidates),\n )\n return best_candidate\n\n # We have an existing version, and its the best version\n logger.debug(\n "Installed version (%s) is most up-to-date (past versions: %s)",\n installed_version,\n _format_versions(best_candidate_result.applicable_candidates),\n )\n raise BestVersionAlreadyInstalled\n\n\ndef _find_name_version_sep(fragment: str, canonical_name: str) -> int:\n """Find the separator's index based on the package's canonical name.\n\n :param fragment: A <package>+<version> filename "fragment" (stem) or\n egg fragment.\n :param canonical_name: The package's canonical name.\n\n This function is needed since the canonicalized name does not necessarily\n have the same length as the egg info's name part. An example::\n\n >>> fragment = 'foo__bar-1.0'\n >>> canonical_name = 'foo-bar'\n >>> _find_name_version_sep(fragment, canonical_name)\n 8\n """\n # Project name and version must be separated by one single dash. Find all\n # occurrences of dashes; if the string in front of it matches the canonical\n # name, this is the one separating the name and version parts.\n for i, c in enumerate(fragment):\n if c != "-":\n continue\n if canonicalize_name(fragment[:i]) == canonical_name:\n return i\n raise ValueError(f"{fragment} does not match {canonical_name}")\n\n\ndef _extract_version_from_fragment(fragment: str, canonical_name: str) -> Optional[str]:\n """Parse the version string from a <package>+<version> filename\n "fragment" (stem) or egg fragment.\n\n :param fragment: The string to parse. E.g. foo-2.1\n :param canonical_name: The canonicalized name of the package this\n belongs to.\n """\n try:\n version_start = _find_name_version_sep(fragment, canonical_name) + 1\n except ValueError:\n return None\n version = fragment[version_start:]\n if not version:\n return None\n return version\n
.venv\Lib\site-packages\pip\_internal\index\package_finder.py
package_finder.py
Python
38,446
0.95
0.157143
0.046667
node-utils
396
2024-10-11T13:12:59.502334
BSD-3-Clause
false
7c156f2acd037a9ca6589c8ccb1ce36f
import logging\nimport mimetypes\nimport os\nfrom collections import defaultdict\nfrom typing import Callable, Dict, Iterable, List, Optional, Tuple\n\nfrom pip._vendor.packaging.utils import (\n InvalidSdistFilename,\n InvalidWheelFilename,\n canonicalize_name,\n parse_sdist_filename,\n parse_wheel_filename,\n)\n\nfrom pip._internal.models.candidate import InstallationCandidate\nfrom pip._internal.models.link import Link\nfrom pip._internal.utils.urls import path_to_url, url_to_path\nfrom pip._internal.vcs import is_url\n\nlogger = logging.getLogger(__name__)\n\nFoundCandidates = Iterable[InstallationCandidate]\nFoundLinks = Iterable[Link]\nCandidatesFromPage = Callable[[Link], Iterable[InstallationCandidate]]\nPageValidator = Callable[[Link], bool]\n\n\nclass LinkSource:\n @property\n def link(self) -> Optional[Link]:\n """Returns the underlying link, if there's one."""\n raise NotImplementedError()\n\n def page_candidates(self) -> FoundCandidates:\n """Candidates found by parsing an archive listing HTML file."""\n raise NotImplementedError()\n\n def file_links(self) -> FoundLinks:\n """Links found by specifying archives directly."""\n raise NotImplementedError()\n\n\ndef _is_html_file(file_url: str) -> bool:\n return mimetypes.guess_type(file_url, strict=False)[0] == "text/html"\n\n\nclass _FlatDirectoryToUrls:\n """Scans directory and caches results"""\n\n def __init__(self, path: str) -> None:\n self._path = path\n self._page_candidates: List[str] = []\n self._project_name_to_urls: Dict[str, List[str]] = defaultdict(list)\n self._scanned_directory = False\n\n def _scan_directory(self) -> None:\n """Scans directory once and populates both page_candidates\n and project_name_to_urls at the same time\n """\n for entry in os.scandir(self._path):\n url = path_to_url(entry.path)\n if _is_html_file(url):\n self._page_candidates.append(url)\n continue\n\n # File must have a valid wheel or sdist name,\n # otherwise not worth considering as a package\n try:\n project_filename = parse_wheel_filename(entry.name)[0]\n except InvalidWheelFilename:\n try:\n project_filename = parse_sdist_filename(entry.name)[0]\n except InvalidSdistFilename:\n continue\n\n self._project_name_to_urls[project_filename].append(url)\n self._scanned_directory = True\n\n @property\n def page_candidates(self) -> List[str]:\n if not self._scanned_directory:\n self._scan_directory()\n\n return self._page_candidates\n\n @property\n def project_name_to_urls(self) -> Dict[str, List[str]]:\n if not self._scanned_directory:\n self._scan_directory()\n\n return self._project_name_to_urls\n\n\nclass _FlatDirectorySource(LinkSource):\n """Link source specified by ``--find-links=<path-to-dir>``.\n\n This looks the content of the directory, and returns:\n\n * ``page_candidates``: Links listed on each HTML file in the directory.\n * ``file_candidates``: Archives in the directory.\n """\n\n _paths_to_urls: Dict[str, _FlatDirectoryToUrls] = {}\n\n def __init__(\n self,\n candidates_from_page: CandidatesFromPage,\n path: str,\n project_name: str,\n ) -> None:\n self._candidates_from_page = candidates_from_page\n self._project_name = canonicalize_name(project_name)\n\n # Get existing instance of _FlatDirectoryToUrls if it exists\n if path in self._paths_to_urls:\n self._path_to_urls = self._paths_to_urls[path]\n else:\n self._path_to_urls = _FlatDirectoryToUrls(path=path)\n self._paths_to_urls[path] = self._path_to_urls\n\n @property\n def link(self) -> Optional[Link]:\n return None\n\n def page_candidates(self) -> FoundCandidates:\n for url in self._path_to_urls.page_candidates:\n yield from self._candidates_from_page(Link(url))\n\n def file_links(self) -> FoundLinks:\n for url in self._path_to_urls.project_name_to_urls[self._project_name]:\n yield Link(url)\n\n\nclass _LocalFileSource(LinkSource):\n """``--find-links=<path-or-url>`` or ``--[extra-]index-url=<path-or-url>``.\n\n If a URL is supplied, it must be a ``file:`` URL. If a path is supplied to\n the option, it is converted to a URL first. This returns:\n\n * ``page_candidates``: Links listed on an HTML file.\n * ``file_candidates``: The non-HTML file.\n """\n\n def __init__(\n self,\n candidates_from_page: CandidatesFromPage,\n link: Link,\n ) -> None:\n self._candidates_from_page = candidates_from_page\n self._link = link\n\n @property\n def link(self) -> Optional[Link]:\n return self._link\n\n def page_candidates(self) -> FoundCandidates:\n if not _is_html_file(self._link.url):\n return\n yield from self._candidates_from_page(self._link)\n\n def file_links(self) -> FoundLinks:\n if _is_html_file(self._link.url):\n return\n yield self._link\n\n\nclass _RemoteFileSource(LinkSource):\n """``--find-links=<url>`` or ``--[extra-]index-url=<url>``.\n\n This returns:\n\n * ``page_candidates``: Links listed on an HTML file.\n * ``file_candidates``: The non-HTML file.\n """\n\n def __init__(\n self,\n candidates_from_page: CandidatesFromPage,\n page_validator: PageValidator,\n link: Link,\n ) -> None:\n self._candidates_from_page = candidates_from_page\n self._page_validator = page_validator\n self._link = link\n\n @property\n def link(self) -> Optional[Link]:\n return self._link\n\n def page_candidates(self) -> FoundCandidates:\n if not self._page_validator(self._link):\n return\n yield from self._candidates_from_page(self._link)\n\n def file_links(self) -> FoundLinks:\n yield self._link\n\n\nclass _IndexDirectorySource(LinkSource):\n """``--[extra-]index-url=<path-to-directory>``.\n\n This is treated like a remote URL; ``candidates_from_page`` contains logic\n for this by appending ``index.html`` to the link.\n """\n\n def __init__(\n self,\n candidates_from_page: CandidatesFromPage,\n link: Link,\n ) -> None:\n self._candidates_from_page = candidates_from_page\n self._link = link\n\n @property\n def link(self) -> Optional[Link]:\n return self._link\n\n def page_candidates(self) -> FoundCandidates:\n yield from self._candidates_from_page(self._link)\n\n def file_links(self) -> FoundLinks:\n return ()\n\n\ndef build_source(\n location: str,\n *,\n candidates_from_page: CandidatesFromPage,\n page_validator: PageValidator,\n expand_dir: bool,\n cache_link_parsing: bool,\n project_name: str,\n) -> Tuple[Optional[str], Optional[LinkSource]]:\n path: Optional[str] = None\n url: Optional[str] = None\n if os.path.exists(location): # Is a local path.\n url = path_to_url(location)\n path = location\n elif location.startswith("file:"): # A file: URL.\n url = location\n path = url_to_path(location)\n elif is_url(location):\n url = location\n\n if url is None:\n msg = (\n "Location '%s' is ignored: "\n "it is either a non-existing path or lacks a specific scheme."\n )\n logger.warning(msg, location)\n return (None, None)\n\n if path is None:\n source: LinkSource = _RemoteFileSource(\n candidates_from_page=candidates_from_page,\n page_validator=page_validator,\n link=Link(url, cache_link_parsing=cache_link_parsing),\n )\n return (url, source)\n\n if os.path.isdir(path):\n if expand_dir:\n source = _FlatDirectorySource(\n candidates_from_page=candidates_from_page,\n path=path,\n project_name=project_name,\n )\n else:\n source = _IndexDirectorySource(\n candidates_from_page=candidates_from_page,\n link=Link(url, cache_link_parsing=cache_link_parsing),\n )\n return (url, source)\n elif os.path.isfile(path):\n source = _LocalFileSource(\n candidates_from_page=candidates_from_page,\n link=Link(url, cache_link_parsing=cache_link_parsing),\n )\n return (url, source)\n logger.warning(\n "Location '%s' is ignored: it is neither a file nor a directory.",\n location,\n )\n return (url, None)\n
.venv\Lib\site-packages\pip\_internal\index\sources.py
sources.py
Python
8,632
0.95
0.179577
0.044248
awesome-app
55
2024-08-03T18:08:58.736434
Apache-2.0
false
e141bacbe7819b7ab6ffb2de697a9148
"""Index interaction code"""\n
.venv\Lib\site-packages\pip\_internal\index\__init__.py
__init__.py
Python
29
0.5
0
0
awesome-app
765
2023-12-19T04:36:10.233141
BSD-3-Clause
false
ea3e3b9d6288464f5b3d66603e087646
\n\n
.venv\Lib\site-packages\pip\_internal\index\__pycache__\collector.cpython-313.pyc
collector.cpython-313.pyc
Other
21,843
0.95
0.066964
0.004717
python-kit
164
2024-05-01T04:03:00.325097
BSD-3-Clause
false
75a59c2ac543be5573bd6d0ce765bb6d
\n\n
.venv\Lib\site-packages\pip\_internal\index\__pycache__\package_finder.cpython-313.pyc
package_finder.cpython-313.pyc
Other
40,949
0.95
0.069159
0.004024
awesome-app
533
2024-01-06T13:37:49.893640
BSD-3-Clause
false
7ee93a7a0ad55b44bad71dd6ad69d7cd
\n\n
.venv\Lib\site-packages\pip\_internal\index\__pycache__\sources.cpython-313.pyc
sources.cpython-313.pyc
Other
12,755
0.8
0.017857
0.078431
react-lib
136
2024-03-06T09:46:38.566171
BSD-3-Clause
false
42b9a6dff64c975c24bf502895dae1a5
\n\n
.venv\Lib\site-packages\pip\_internal\index\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
231
0.7
0
0
awesome-app
809
2023-07-23T10:46:27.289980
Apache-2.0
false
603b90457cb4224e836014c2aae3d223
import functools\nimport os\nimport site\nimport sys\nimport sysconfig\nimport typing\n\nfrom pip._internal.exceptions import InstallationError\nfrom pip._internal.utils import appdirs\nfrom pip._internal.utils.virtualenv import running_under_virtualenv\n\n# Application Directories\nUSER_CACHE_DIR = appdirs.user_cache_dir("pip")\n\n# FIXME doesn't account for venv linked to global site-packages\nsite_packages: str = sysconfig.get_path("purelib")\n\n\ndef get_major_minor_version() -> str:\n """\n Return the major-minor version of the current Python as a string, e.g.\n "3.7" or "3.10".\n """\n return "{}.{}".format(*sys.version_info)\n\n\ndef change_root(new_root: str, pathname: str) -> str:\n """Return 'pathname' with 'new_root' prepended.\n\n If 'pathname' is relative, this is equivalent to os.path.join(new_root, pathname).\n Otherwise, it requires making 'pathname' relative and then joining the\n two, which is tricky on DOS/Windows and Mac OS.\n\n This is borrowed from Python's standard library's distutils module.\n """\n if os.name == "posix":\n if not os.path.isabs(pathname):\n return os.path.join(new_root, pathname)\n else:\n return os.path.join(new_root, pathname[1:])\n\n elif os.name == "nt":\n (drive, path) = os.path.splitdrive(pathname)\n if path[0] == "\\":\n path = path[1:]\n return os.path.join(new_root, path)\n\n else:\n raise InstallationError(\n f"Unknown platform: {os.name}\n"\n "Can not change root path prefix on unknown platform."\n )\n\n\ndef get_src_prefix() -> str:\n if running_under_virtualenv():\n src_prefix = os.path.join(sys.prefix, "src")\n else:\n # FIXME: keep src in cwd for now (it is not a temporary folder)\n try:\n src_prefix = os.path.join(os.getcwd(), "src")\n except OSError:\n # In case the current working directory has been renamed or deleted\n sys.exit("The folder you are executing pip from can no longer be found.")\n\n # under macOS + virtualenv sys.prefix is not properly resolved\n # it is something like /path/to/python/bin/..\n return os.path.abspath(src_prefix)\n\n\ntry:\n # Use getusersitepackages if this is present, as it ensures that the\n # value is initialised properly.\n user_site: typing.Optional[str] = site.getusersitepackages()\nexcept AttributeError:\n user_site = site.USER_SITE\n\n\n@functools.lru_cache(maxsize=None)\ndef is_osx_framework() -> bool:\n return bool(sysconfig.get_config_var("PYTHONFRAMEWORK"))\n
.venv\Lib\site-packages\pip\_internal\locations\base.py
base.py
Python
2,556
0.95
0.160494
0.126984
react-lib
739
2025-05-09T19:26:28.092732
MIT
false
df3959adc2db3eb93e958438ad137a98
"""Locations where we look for configs, install stuff, etc"""\n\n# The following comment should be removed at some point in the future.\n# mypy: strict-optional=False\n\n# If pip's going to use distutils, it should not be using the copy that setuptools\n# might have injected into the environment. This is done by removing the injected\n# shim, if it's injected.\n#\n# See https://github.com/pypa/pip/issues/8761 for the original discussion and\n# rationale for why this is done within pip.\ntry:\n __import__("_distutils_hack").remove_shim()\nexcept (ImportError, AttributeError):\n pass\n\nimport logging\nimport os\nimport sys\nfrom distutils.cmd import Command as DistutilsCommand\nfrom distutils.command.install import SCHEME_KEYS\nfrom distutils.command.install import install as distutils_install_command\nfrom distutils.sysconfig import get_python_lib\nfrom typing import Dict, List, Optional, Union\n\nfrom pip._internal.models.scheme import Scheme\nfrom pip._internal.utils.compat import WINDOWS\nfrom pip._internal.utils.virtualenv import running_under_virtualenv\n\nfrom .base import get_major_minor_version\n\nlogger = logging.getLogger(__name__)\n\n\ndef distutils_scheme(\n dist_name: str,\n user: bool = False,\n home: Optional[str] = None,\n root: Optional[str] = None,\n isolated: bool = False,\n prefix: Optional[str] = None,\n *,\n ignore_config_files: bool = False,\n) -> Dict[str, str]:\n """\n Return a distutils install scheme\n """\n from distutils.dist import Distribution\n\n dist_args: Dict[str, Union[str, List[str]]] = {"name": dist_name}\n if isolated:\n dist_args["script_args"] = ["--no-user-cfg"]\n\n d = Distribution(dist_args)\n if not ignore_config_files:\n try:\n d.parse_config_files()\n except UnicodeDecodeError:\n paths = d.find_config_files()\n logger.warning(\n "Ignore distutils configs in %s due to encoding errors.",\n ", ".join(os.path.basename(p) for p in paths),\n )\n obj: Optional[DistutilsCommand] = None\n obj = d.get_command_obj("install", create=True)\n assert obj is not None\n i: distutils_install_command = obj\n # NOTE: setting user or home has the side-effect of creating the home dir\n # or user base for installations during finalize_options()\n # ideally, we'd prefer a scheme class that has no side-effects.\n assert not (user and prefix), f"user={user} prefix={prefix}"\n assert not (home and prefix), f"home={home} prefix={prefix}"\n i.user = user or i.user\n if user or home:\n i.prefix = ""\n i.prefix = prefix or i.prefix\n i.home = home or i.home\n i.root = root or i.root\n i.finalize_options()\n\n scheme: Dict[str, str] = {}\n for key in SCHEME_KEYS:\n scheme[key] = getattr(i, "install_" + key)\n\n # install_lib specified in setup.cfg should install *everything*\n # into there (i.e. it takes precedence over both purelib and\n # platlib). Note, i.install_lib is *always* set after\n # finalize_options(); we only want to override here if the user\n # has explicitly requested it hence going back to the config\n if "install_lib" in d.get_option_dict("install"):\n scheme.update({"purelib": i.install_lib, "platlib": i.install_lib})\n\n if running_under_virtualenv():\n if home:\n prefix = home\n elif user:\n prefix = i.install_userbase\n else:\n prefix = i.prefix\n scheme["headers"] = os.path.join(\n prefix,\n "include",\n "site",\n f"python{get_major_minor_version()}",\n dist_name,\n )\n\n if root is not None:\n path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1]\n scheme["headers"] = os.path.join(root, path_no_drive[1:])\n\n return scheme\n\n\ndef get_scheme(\n dist_name: str,\n user: bool = False,\n home: Optional[str] = None,\n root: Optional[str] = None,\n isolated: bool = False,\n prefix: Optional[str] = None,\n) -> Scheme:\n """\n Get the "scheme" corresponding to the input parameters. The distutils\n documentation provides the context for the available schemes:\n https://docs.python.org/3/install/index.html#alternate-installation\n\n :param dist_name: the name of the package to retrieve the scheme for, used\n in the headers scheme path\n :param user: indicates to use the "user" scheme\n :param home: indicates to use the "home" scheme and provides the base\n directory for the same\n :param root: root under which other directories are re-based\n :param isolated: equivalent to --no-user-cfg, i.e. do not consider\n ~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for\n scheme paths\n :param prefix: indicates to use the "prefix" scheme and provides the\n base directory for the same\n """\n scheme = distutils_scheme(dist_name, user, home, root, isolated, prefix)\n return Scheme(\n platlib=scheme["platlib"],\n purelib=scheme["purelib"],\n headers=scheme["headers"],\n scripts=scheme["scripts"],\n data=scheme["data"],\n )\n\n\ndef get_bin_prefix() -> str:\n # XXX: In old virtualenv versions, sys.prefix can contain '..' components,\n # so we need to call normpath to eliminate them.\n prefix = os.path.normpath(sys.prefix)\n if WINDOWS:\n bin_py = os.path.join(prefix, "Scripts")\n # buildout uses 'bin' on Windows too?\n if not os.path.exists(bin_py):\n bin_py = os.path.join(prefix, "bin")\n return bin_py\n # Forcing to use /usr/local/bin for standard macOS framework installs\n # Also log to ~/Library/Logs/ for use with the Console.app log viewer\n if sys.platform[:6] == "darwin" and prefix[:16] == "/System/Library/":\n return "/usr/local/bin"\n return os.path.join(prefix, "bin")\n\n\ndef get_purelib() -> str:\n return get_python_lib(plat_specific=False)\n\n\ndef get_platlib() -> str:\n return get_python_lib(plat_specific=True)\n
.venv\Lib\site-packages\pip\_internal\locations\_distutils.py
_distutils.py
Python
6,013
0.95
0.19186
0.148649
node-utils
997
2024-11-04T16:11:17.123216
MIT
false
7d77239f739c7c5363ff3f387e1b09ca
import logging\nimport os\nimport sys\nimport sysconfig\nimport typing\n\nfrom pip._internal.exceptions import InvalidSchemeCombination, UserInstallationInvalid\nfrom pip._internal.models.scheme import SCHEME_KEYS, Scheme\nfrom pip._internal.utils.virtualenv import running_under_virtualenv\n\nfrom .base import change_root, get_major_minor_version, is_osx_framework\n\nlogger = logging.getLogger(__name__)\n\n\n# Notes on _infer_* functions.\n# Unfortunately ``get_default_scheme()`` didn't exist before 3.10, so there's no\n# way to ask things like "what is the '_prefix' scheme on this platform". These\n# functions try to answer that with some heuristics while accounting for ad-hoc\n# platforms not covered by CPython's default sysconfig implementation. If the\n# ad-hoc implementation does not fully implement sysconfig, we'll fall back to\n# a POSIX scheme.\n\n_AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names())\n\n_PREFERRED_SCHEME_API = getattr(sysconfig, "get_preferred_scheme", None)\n\n\ndef _should_use_osx_framework_prefix() -> bool:\n """Check for Apple's ``osx_framework_library`` scheme.\n\n Python distributed by Apple's Command Line Tools has this special scheme\n that's used when:\n\n * This is a framework build.\n * We are installing into the system prefix.\n\n This does not account for ``pip install --prefix`` (also means we're not\n installing to the system prefix), which should use ``posix_prefix``, but\n logic here means ``_infer_prefix()`` outputs ``osx_framework_library``. But\n since ``prefix`` is not available for ``sysconfig.get_default_scheme()``,\n which is the stdlib replacement for ``_infer_prefix()``, presumably Apple\n wouldn't be able to magically switch between ``osx_framework_library`` and\n ``posix_prefix``. ``_infer_prefix()`` returning ``osx_framework_library``\n means its behavior is consistent whether we use the stdlib implementation\n or our own, and we deal with this special case in ``get_scheme()`` instead.\n """\n return (\n "osx_framework_library" in _AVAILABLE_SCHEMES\n and not running_under_virtualenv()\n and is_osx_framework()\n )\n\n\ndef _infer_prefix() -> str:\n """Try to find a prefix scheme for the current platform.\n\n This tries:\n\n * A special ``osx_framework_library`` for Python distributed by Apple's\n Command Line Tools, when not running in a virtual environment.\n * Implementation + OS, used by PyPy on Windows (``pypy_nt``).\n * Implementation without OS, used by PyPy on POSIX (``pypy``).\n * OS + "prefix", used by CPython on POSIX (``posix_prefix``).\n * Just the OS name, used by CPython on Windows (``nt``).\n\n If none of the above works, fall back to ``posix_prefix``.\n """\n if _PREFERRED_SCHEME_API:\n return _PREFERRED_SCHEME_API("prefix")\n if _should_use_osx_framework_prefix():\n return "osx_framework_library"\n implementation_suffixed = f"{sys.implementation.name}_{os.name}"\n if implementation_suffixed in _AVAILABLE_SCHEMES:\n return implementation_suffixed\n if sys.implementation.name in _AVAILABLE_SCHEMES:\n return sys.implementation.name\n suffixed = f"{os.name}_prefix"\n if suffixed in _AVAILABLE_SCHEMES:\n return suffixed\n if os.name in _AVAILABLE_SCHEMES: # On Windows, prefx is just called "nt".\n return os.name\n return "posix_prefix"\n\n\ndef _infer_user() -> str:\n """Try to find a user scheme for the current platform."""\n if _PREFERRED_SCHEME_API:\n return _PREFERRED_SCHEME_API("user")\n if is_osx_framework() and not running_under_virtualenv():\n suffixed = "osx_framework_user"\n else:\n suffixed = f"{os.name}_user"\n if suffixed in _AVAILABLE_SCHEMES:\n return suffixed\n if "posix_user" not in _AVAILABLE_SCHEMES: # User scheme unavailable.\n raise UserInstallationInvalid()\n return "posix_user"\n\n\ndef _infer_home() -> str:\n """Try to find a home for the current platform."""\n if _PREFERRED_SCHEME_API:\n return _PREFERRED_SCHEME_API("home")\n suffixed = f"{os.name}_home"\n if suffixed in _AVAILABLE_SCHEMES:\n return suffixed\n return "posix_home"\n\n\n# Update these keys if the user sets a custom home.\n_HOME_KEYS = [\n "installed_base",\n "base",\n "installed_platbase",\n "platbase",\n "prefix",\n "exec_prefix",\n]\nif sysconfig.get_config_var("userbase") is not None:\n _HOME_KEYS.append("userbase")\n\n\ndef get_scheme(\n dist_name: str,\n user: bool = False,\n home: typing.Optional[str] = None,\n root: typing.Optional[str] = None,\n isolated: bool = False,\n prefix: typing.Optional[str] = None,\n) -> Scheme:\n """\n Get the "scheme" corresponding to the input parameters.\n\n :param dist_name: the name of the package to retrieve the scheme for, used\n in the headers scheme path\n :param user: indicates to use the "user" scheme\n :param home: indicates to use the "home" scheme\n :param root: root under which other directories are re-based\n :param isolated: ignored, but kept for distutils compatibility (where\n this controls whether the user-site pydistutils.cfg is honored)\n :param prefix: indicates to use the "prefix" scheme and provides the\n base directory for the same\n """\n if user and prefix:\n raise InvalidSchemeCombination("--user", "--prefix")\n if home and prefix:\n raise InvalidSchemeCombination("--home", "--prefix")\n\n if home is not None:\n scheme_name = _infer_home()\n elif user:\n scheme_name = _infer_user()\n else:\n scheme_name = _infer_prefix()\n\n # Special case: When installing into a custom prefix, use posix_prefix\n # instead of osx_framework_library. See _should_use_osx_framework_prefix()\n # docstring for details.\n if prefix is not None and scheme_name == "osx_framework_library":\n scheme_name = "posix_prefix"\n\n if home is not None:\n variables = {k: home for k in _HOME_KEYS}\n elif prefix is not None:\n variables = {k: prefix for k in _HOME_KEYS}\n else:\n variables = {}\n\n paths = sysconfig.get_paths(scheme=scheme_name, vars=variables)\n\n # Logic here is very arbitrary, we're doing it for compatibility, don't ask.\n # 1. Pip historically uses a special header path in virtual environments.\n # 2. If the distribution name is not known, distutils uses 'UNKNOWN'. We\n # only do the same when not running in a virtual environment because\n # pip's historical header path logic (see point 1) did not do this.\n if running_under_virtualenv():\n if user:\n base = variables.get("userbase", sys.prefix)\n else:\n base = variables.get("base", sys.prefix)\n python_xy = f"python{get_major_minor_version()}"\n paths["include"] = os.path.join(base, "include", "site", python_xy)\n elif not dist_name:\n dist_name = "UNKNOWN"\n\n scheme = Scheme(\n platlib=paths["platlib"],\n purelib=paths["purelib"],\n headers=os.path.join(paths["include"], dist_name),\n scripts=paths["scripts"],\n data=paths["data"],\n )\n if root is not None:\n converted_keys = {}\n for key in SCHEME_KEYS:\n converted_keys[key] = change_root(root, getattr(scheme, key))\n scheme = Scheme(**converted_keys)\n return scheme\n\n\ndef get_bin_prefix() -> str:\n # Forcing to use /usr/local/bin for standard macOS framework installs.\n if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/":\n return "/usr/local/bin"\n return sysconfig.get_paths()["scripts"]\n\n\ndef get_purelib() -> str:\n return sysconfig.get_paths()["purelib"]\n\n\ndef get_platlib() -> str:\n return sysconfig.get_paths()["platlib"]\n
.venv\Lib\site-packages\pip\_internal\locations\_sysconfig.py
_sysconfig.py
Python
7,724
0.95
0.242991
0.136364
awesome-app
132
2024-01-21T17:31:07.600477
GPL-3.0
false
241c82a4ab5a64b587b9e06d6d3467c9
import functools\nimport logging\nimport os\nimport pathlib\nimport sys\nimport sysconfig\nfrom typing import Any, Dict, Optional\n\nfrom pip._internal.models.scheme import SCHEME_KEYS, Scheme\nfrom pip._internal.utils.compat import WINDOWS\nfrom pip._internal.utils.deprecation import deprecated\nfrom pip._internal.utils.virtualenv import running_under_virtualenv\n\nfrom . import _sysconfig\nfrom .base import (\n USER_CACHE_DIR,\n get_major_minor_version,\n get_src_prefix,\n is_osx_framework,\n site_packages,\n user_site,\n)\n\n__all__ = [\n "USER_CACHE_DIR",\n "get_bin_prefix",\n "get_bin_user",\n "get_major_minor_version",\n "get_platlib",\n "get_purelib",\n "get_scheme",\n "get_src_prefix",\n "site_packages",\n "user_site",\n]\n\n\nlogger = logging.getLogger(__name__)\n\n\n_PLATLIBDIR: str = getattr(sys, "platlibdir", "lib")\n\n_USE_SYSCONFIG_DEFAULT = sys.version_info >= (3, 10)\n\n\ndef _should_use_sysconfig() -> bool:\n """This function determines the value of _USE_SYSCONFIG.\n\n By default, pip uses sysconfig on Python 3.10+.\n But Python distributors can override this decision by setting:\n sysconfig._PIP_USE_SYSCONFIG = True / False\n Rationale in https://github.com/pypa/pip/issues/10647\n\n This is a function for testability, but should be constant during any one\n run.\n """\n return bool(getattr(sysconfig, "_PIP_USE_SYSCONFIG", _USE_SYSCONFIG_DEFAULT))\n\n\n_USE_SYSCONFIG = _should_use_sysconfig()\n\nif not _USE_SYSCONFIG:\n # Import distutils lazily to avoid deprecation warnings,\n # but import it soon enough that it is in memory and available during\n # a pip reinstall.\n from . import _distutils\n\n# Be noisy about incompatibilities if this platforms "should" be using\n# sysconfig, but is explicitly opting out and using distutils instead.\nif _USE_SYSCONFIG_DEFAULT and not _USE_SYSCONFIG:\n _MISMATCH_LEVEL = logging.WARNING\nelse:\n _MISMATCH_LEVEL = logging.DEBUG\n\n\ndef _looks_like_bpo_44860() -> bool:\n """The resolution to bpo-44860 will change this incorrect platlib.\n\n See <https://bugs.python.org/issue44860>.\n """\n from distutils.command.install import INSTALL_SCHEMES\n\n try:\n unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"]\n except KeyError:\n return False\n return unix_user_platlib == "$usersite"\n\n\ndef _looks_like_red_hat_patched_platlib_purelib(scheme: Dict[str, str]) -> bool:\n platlib = scheme["platlib"]\n if "/$platlibdir/" in platlib:\n platlib = platlib.replace("/$platlibdir/", f"/{_PLATLIBDIR}/")\n if "/lib64/" not in platlib:\n return False\n unpatched = platlib.replace("/lib64/", "/lib/")\n return unpatched.replace("$platbase/", "$base/") == scheme["purelib"]\n\n\n@functools.lru_cache(maxsize=None)\ndef _looks_like_red_hat_lib() -> bool:\n """Red Hat patches platlib in unix_prefix and unix_home, but not purelib.\n\n This is the only way I can see to tell a Red Hat-patched Python.\n """\n from distutils.command.install import INSTALL_SCHEMES\n\n return all(\n k in INSTALL_SCHEMES\n and _looks_like_red_hat_patched_platlib_purelib(INSTALL_SCHEMES[k])\n for k in ("unix_prefix", "unix_home")\n )\n\n\n@functools.lru_cache(maxsize=None)\ndef _looks_like_debian_scheme() -> bool:\n """Debian adds two additional schemes."""\n from distutils.command.install import INSTALL_SCHEMES\n\n return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES\n\n\n@functools.lru_cache(maxsize=None)\ndef _looks_like_red_hat_scheme() -> bool:\n """Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``.\n\n Red Hat's ``00251-change-user-install-location.patch`` changes the install\n command's ``prefix`` and ``exec_prefix`` to append ``"/local"``. This is\n (fortunately?) done quite unconditionally, so we create a default command\n object without any configuration to detect this.\n """\n from distutils.command.install import install\n from distutils.dist import Distribution\n\n cmd: Any = install(Distribution())\n cmd.finalize_options()\n return (\n cmd.exec_prefix == f"{os.path.normpath(sys.exec_prefix)}/local"\n and cmd.prefix == f"{os.path.normpath(sys.prefix)}/local"\n )\n\n\n@functools.lru_cache(maxsize=None)\ndef _looks_like_slackware_scheme() -> bool:\n """Slackware patches sysconfig but fails to patch distutils and site.\n\n Slackware changes sysconfig's user scheme to use ``"lib64"`` for the lib\n path, but does not do the same to the site module.\n """\n if user_site is None: # User-site not available.\n return False\n try:\n paths = sysconfig.get_paths(scheme="posix_user", expand=False)\n except KeyError: # User-site not available.\n return False\n return "/lib64/" in paths["purelib"] and "/lib64/" not in user_site\n\n\n@functools.lru_cache(maxsize=None)\ndef _looks_like_msys2_mingw_scheme() -> bool:\n """MSYS2 patches distutils and sysconfig to use a UNIX-like scheme.\n\n However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is\n likely going to be included in their 3.10 release, so we ignore the warning.\n See msys2/MINGW-packages#9319.\n\n MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase,\n and is missing the final ``"site-packages"``.\n """\n paths = sysconfig.get_paths("nt", expand=False)\n return all(\n "Lib" not in p and "lib" in p and not p.endswith("site-packages")\n for p in (paths[key] for key in ("platlib", "purelib"))\n )\n\n\n@functools.lru_cache(maxsize=None)\ndef _warn_mismatched(old: pathlib.Path, new: pathlib.Path, *, key: str) -> None:\n issue_url = "https://github.com/pypa/pip/issues/10151"\n message = (\n "Value for %s does not match. Please report this to <%s>"\n "\ndistutils: %s"\n "\nsysconfig: %s"\n )\n logger.log(_MISMATCH_LEVEL, message, key, issue_url, old, new)\n\n\ndef _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool:\n if old == new:\n return False\n _warn_mismatched(old, new, key=key)\n return True\n\n\n@functools.lru_cache(maxsize=None)\ndef _log_context(\n *,\n user: bool = False,\n home: Optional[str] = None,\n root: Optional[str] = None,\n prefix: Optional[str] = None,\n) -> None:\n parts = [\n "Additional context:",\n "user = %r",\n "home = %r",\n "root = %r",\n "prefix = %r",\n ]\n\n logger.log(_MISMATCH_LEVEL, "\n".join(parts), user, home, root, prefix)\n\n\ndef get_scheme(\n dist_name: str,\n user: bool = False,\n home: Optional[str] = None,\n root: Optional[str] = None,\n isolated: bool = False,\n prefix: Optional[str] = None,\n) -> Scheme:\n new = _sysconfig.get_scheme(\n dist_name,\n user=user,\n home=home,\n root=root,\n isolated=isolated,\n prefix=prefix,\n )\n if _USE_SYSCONFIG:\n return new\n\n old = _distutils.get_scheme(\n dist_name,\n user=user,\n home=home,\n root=root,\n isolated=isolated,\n prefix=prefix,\n )\n\n warning_contexts = []\n for k in SCHEME_KEYS:\n old_v = pathlib.Path(getattr(old, k))\n new_v = pathlib.Path(getattr(new, k))\n\n if old_v == new_v:\n continue\n\n # distutils incorrectly put PyPy packages under ``site-packages/python``\n # in the ``posix_home`` scheme, but PyPy devs said they expect the\n # directory name to be ``pypy`` instead. So we treat this as a bug fix\n # and not warn about it. See bpo-43307 and python/cpython#24628.\n skip_pypy_special_case = (\n sys.implementation.name == "pypy"\n and home is not None\n and k in ("platlib", "purelib")\n and old_v.parent == new_v.parent\n and old_v.name.startswith("python")\n and new_v.name.startswith("pypy")\n )\n if skip_pypy_special_case:\n continue\n\n # sysconfig's ``osx_framework_user`` does not include ``pythonX.Y`` in\n # the ``include`` value, but distutils's ``headers`` does. We'll let\n # CPython decide whether this is a bug or feature. See bpo-43948.\n skip_osx_framework_user_special_case = (\n user\n and is_osx_framework()\n and k == "headers"\n and old_v.parent.parent == new_v.parent\n and old_v.parent.name.startswith("python")\n )\n if skip_osx_framework_user_special_case:\n continue\n\n # On Red Hat and derived Linux distributions, distutils is patched to\n # use "lib64" instead of "lib" for platlib.\n if k == "platlib" and _looks_like_red_hat_lib():\n continue\n\n # On Python 3.9+, sysconfig's posix_user scheme sets platlib against\n # sys.platlibdir, but distutils's unix_user incorrectly coninutes\n # using the same $usersite for both platlib and purelib. This creates a\n # mismatch when sys.platlibdir is not "lib".\n skip_bpo_44860 = (\n user\n and k == "platlib"\n and not WINDOWS\n and _PLATLIBDIR != "lib"\n and _looks_like_bpo_44860()\n )\n if skip_bpo_44860:\n continue\n\n # Slackware incorrectly patches posix_user to use lib64 instead of lib,\n # but not usersite to match the location.\n skip_slackware_user_scheme = (\n user\n and k in ("platlib", "purelib")\n and not WINDOWS\n and _looks_like_slackware_scheme()\n )\n if skip_slackware_user_scheme:\n continue\n\n # Both Debian and Red Hat patch Python to place the system site under\n # /usr/local instead of /usr. Debian also places lib in dist-packages\n # instead of site-packages, but the /usr/local check should cover it.\n skip_linux_system_special_case = (\n not (user or home or prefix or running_under_virtualenv())\n and old_v.parts[1:3] == ("usr", "local")\n and len(new_v.parts) > 1\n and new_v.parts[1] == "usr"\n and (len(new_v.parts) < 3 or new_v.parts[2] != "local")\n and (_looks_like_red_hat_scheme() or _looks_like_debian_scheme())\n )\n if skip_linux_system_special_case:\n continue\n\n # MSYS2 MINGW's sysconfig patch does not include the "site-packages"\n # part of the path. This is incorrect and will be fixed in MSYS.\n skip_msys2_mingw_bug = (\n WINDOWS and k in ("platlib", "purelib") and _looks_like_msys2_mingw_scheme()\n )\n if skip_msys2_mingw_bug:\n continue\n\n # CPython's POSIX install script invokes pip (via ensurepip) against the\n # interpreter located in the source tree, not the install site. This\n # triggers special logic in sysconfig that's not present in distutils.\n # https://github.com/python/cpython/blob/8c21941ddaf/Lib/sysconfig.py#L178-L194\n skip_cpython_build = (\n sysconfig.is_python_build(check_home=True)\n and not WINDOWS\n and k in ("headers", "include", "platinclude")\n )\n if skip_cpython_build:\n continue\n\n warning_contexts.append((old_v, new_v, f"scheme.{k}"))\n\n if not warning_contexts:\n return old\n\n # Check if this path mismatch is caused by distutils config files. Those\n # files will no longer work once we switch to sysconfig, so this raises a\n # deprecation message for them.\n default_old = _distutils.distutils_scheme(\n dist_name,\n user,\n home,\n root,\n isolated,\n prefix,\n ignore_config_files=True,\n )\n if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS):\n deprecated(\n reason=(\n "Configuring installation scheme with distutils config files "\n "is deprecated and will no longer work in the near future. If you "\n "are using a Homebrew or Linuxbrew Python, please see discussion "\n "at https://github.com/Homebrew/homebrew-core/issues/76621"\n ),\n replacement=None,\n gone_in=None,\n )\n return old\n\n # Post warnings about this mismatch so user can report them back.\n for old_v, new_v, key in warning_contexts:\n _warn_mismatched(old_v, new_v, key=key)\n _log_context(user=user, home=home, root=root, prefix=prefix)\n\n return old\n\n\ndef get_bin_prefix() -> str:\n new = _sysconfig.get_bin_prefix()\n if _USE_SYSCONFIG:\n return new\n\n old = _distutils.get_bin_prefix()\n if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"):\n _log_context()\n return old\n\n\ndef get_bin_user() -> str:\n return _sysconfig.get_scheme("", user=True).scripts\n\n\ndef _looks_like_deb_system_dist_packages(value: str) -> bool:\n """Check if the value is Debian's APT-controlled dist-packages.\n\n Debian's ``distutils.sysconfig.get_python_lib()`` implementation returns the\n default package path controlled by APT, but does not patch ``sysconfig`` to\n do the same. This is similar to the bug worked around in ``get_scheme()``,\n but here the default is ``deb_system`` instead of ``unix_local``. Ultimately\n we can't do anything about this Debian bug, and this detection allows us to\n skip the warning when needed.\n """\n if not _looks_like_debian_scheme():\n return False\n if value == "/usr/lib/python3/dist-packages":\n return True\n return False\n\n\ndef get_purelib() -> str:\n """Return the default pure-Python lib location."""\n new = _sysconfig.get_purelib()\n if _USE_SYSCONFIG:\n return new\n\n old = _distutils.get_purelib()\n if _looks_like_deb_system_dist_packages(old):\n return old\n if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"):\n _log_context()\n return old\n\n\ndef get_platlib() -> str:\n """Return the default platform-shared lib location."""\n new = _sysconfig.get_platlib()\n if _USE_SYSCONFIG:\n return new\n\n from . import _distutils\n\n old = _distutils.get_platlib()\n if _looks_like_deb_system_dist_packages(old):\n return old\n if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"):\n _log_context()\n return old\n
.venv\Lib\site-packages\pip\_internal\locations\__init__.py
__init__.py
Python
14,309
0.95
0.148064
0.094708
python-kit
969
2024-05-26T02:45:34.320681
Apache-2.0
false
90dba80cc53e87237faef1e7e7e45be5
\n\n
.venv\Lib\site-packages\pip\_internal\locations\__pycache__\base.cpython-313.pyc
base.cpython-313.pyc
Other
3,770
0.95
0
0.026316
awesome-app
686
2025-02-06T13:04:46.441884
BSD-3-Clause
false
cda824aed0da8f5514bbe4a4ef9f9fe3
\n\n
.venv\Lib\site-packages\pip\_internal\locations\__pycache__\_distutils.cpython-313.pyc
_distutils.cpython-313.pyc
Other
6,901
0.95
0.064516
0.011364
vue-tools
859
2023-08-22T08:10:47.635941
BSD-3-Clause
false
4a37354ca0b5a67b57fdba18c9a24daf
\n\n
.venv\Lib\site-packages\pip\_internal\locations\__pycache__\_sysconfig.cpython-313.pyc
_sysconfig.cpython-313.pyc
Other
8,138
0.8
0.121212
0.077778
awesome-app
710
2023-07-31T06:04:18.738624
MIT
false
62cf899d66c49f01a74cf203b4c3baa7
\n\n
.venv\Lib\site-packages\pip\_internal\locations\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
16,145
0.95
0.027273
0.020725
react-lib
114
2023-12-06T12:24:34.882750
BSD-3-Clause
false
62063578faba2348a6c33d244144d8eb
import csv\nimport email.message\nimport functools\nimport json\nimport logging\nimport pathlib\nimport re\nimport zipfile\nfrom typing import (\n IO,\n Any,\n Collection,\n Container,\n Dict,\n Iterable,\n Iterator,\n List,\n NamedTuple,\n Optional,\n Protocol,\n Tuple,\n Union,\n)\n\nfrom pip._vendor.packaging.requirements import Requirement\nfrom pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet\nfrom pip._vendor.packaging.utils import NormalizedName, canonicalize_name\nfrom pip._vendor.packaging.version import Version\n\nfrom pip._internal.exceptions import NoneMetadataError\nfrom pip._internal.locations import site_packages, user_site\nfrom pip._internal.models.direct_url import (\n DIRECT_URL_METADATA_NAME,\n DirectUrl,\n DirectUrlValidationError,\n)\nfrom pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here.\nfrom pip._internal.utils.egg_link import egg_link_path_from_sys_path\nfrom pip._internal.utils.misc import is_local, normalize_path\nfrom pip._internal.utils.urls import url_to_path\n\nfrom ._json import msg_to_json\n\nInfoPath = Union[str, pathlib.PurePath]\n\nlogger = logging.getLogger(__name__)\n\n\nclass BaseEntryPoint(Protocol):\n @property\n def name(self) -> str:\n raise NotImplementedError()\n\n @property\n def value(self) -> str:\n raise NotImplementedError()\n\n @property\n def group(self) -> str:\n raise NotImplementedError()\n\n\ndef _convert_installed_files_path(\n entry: Tuple[str, ...],\n info: Tuple[str, ...],\n) -> str:\n """Convert a legacy installed-files.txt path into modern RECORD path.\n\n The legacy format stores paths relative to the info directory, while the\n modern format stores paths relative to the package root, e.g. the\n site-packages directory.\n\n :param entry: Path parts of the installed-files.txt entry.\n :param info: Path parts of the egg-info directory relative to package root.\n :returns: The converted entry.\n\n For best compatibility with symlinks, this does not use ``abspath()`` or\n ``Path.resolve()``, but tries to work with path parts:\n\n 1. While ``entry`` starts with ``..``, remove the equal amounts of parts\n from ``info``; if ``info`` is empty, start appending ``..`` instead.\n 2. Join the two directly.\n """\n while entry and entry[0] == "..":\n if not info or info[-1] == "..":\n info += ("..",)\n else:\n info = info[:-1]\n entry = entry[1:]\n return str(pathlib.Path(*info, *entry))\n\n\nclass RequiresEntry(NamedTuple):\n requirement: str\n extra: str\n marker: str\n\n\nclass BaseDistribution(Protocol):\n @classmethod\n def from_directory(cls, directory: str) -> "BaseDistribution":\n """Load the distribution from a metadata directory.\n\n :param directory: Path to a metadata directory, e.g. ``.dist-info``.\n """\n raise NotImplementedError()\n\n @classmethod\n def from_metadata_file_contents(\n cls,\n metadata_contents: bytes,\n filename: str,\n project_name: str,\n ) -> "BaseDistribution":\n """Load the distribution from the contents of a METADATA file.\n\n This is used to implement PEP 658 by generating a "shallow" dist object that can\n be used for resolution without downloading or building the actual dist yet.\n\n :param metadata_contents: The contents of a METADATA file.\n :param filename: File name for the dist with this metadata.\n :param project_name: Name of the project this dist represents.\n """\n raise NotImplementedError()\n\n @classmethod\n def from_wheel(cls, wheel: "Wheel", name: str) -> "BaseDistribution":\n """Load the distribution from a given wheel.\n\n :param wheel: A concrete wheel definition.\n :param name: File name of the wheel.\n\n :raises InvalidWheel: Whenever loading of the wheel causes a\n :py:exc:`zipfile.BadZipFile` exception to be thrown.\n :raises UnsupportedWheel: If the wheel is a valid zip, but malformed\n internally.\n """\n raise NotImplementedError()\n\n def __repr__(self) -> str:\n return f"{self.raw_name} {self.raw_version} ({self.location})"\n\n def __str__(self) -> str:\n return f"{self.raw_name} {self.raw_version}"\n\n @property\n def location(self) -> Optional[str]:\n """Where the distribution is loaded from.\n\n A string value is not necessarily a filesystem path, since distributions\n can be loaded from other sources, e.g. arbitrary zip archives. ``None``\n means the distribution is created in-memory.\n\n Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If\n this is a symbolic link, we want to preserve the relative path between\n it and files in the distribution.\n """\n raise NotImplementedError()\n\n @property\n def editable_project_location(self) -> Optional[str]:\n """The project location for editable distributions.\n\n This is the directory where pyproject.toml or setup.py is located.\n None if the distribution is not installed in editable mode.\n """\n # TODO: this property is relatively costly to compute, memoize it ?\n direct_url = self.direct_url\n if direct_url:\n if direct_url.is_local_editable():\n return url_to_path(direct_url.url)\n else:\n # Search for an .egg-link file by walking sys.path, as it was\n # done before by dist_is_editable().\n egg_link_path = egg_link_path_from_sys_path(self.raw_name)\n if egg_link_path:\n # TODO: get project location from second line of egg_link file\n # (https://github.com/pypa/pip/issues/10243)\n return self.location\n return None\n\n @property\n def installed_location(self) -> Optional[str]:\n """The distribution's "installed" location.\n\n This should generally be a ``site-packages`` directory. This is\n usually ``dist.location``, except for legacy develop-installed packages,\n where ``dist.location`` is the source code location, and this is where\n the ``.egg-link`` file is.\n\n The returned location is normalized (in particular, with symlinks removed).\n """\n raise NotImplementedError()\n\n @property\n def info_location(self) -> Optional[str]:\n """Location of the .[egg|dist]-info directory or file.\n\n Similarly to ``location``, a string value is not necessarily a\n filesystem path. ``None`` means the distribution is created in-memory.\n\n For a modern .dist-info installation on disk, this should be something\n like ``{location}/{raw_name}-{version}.dist-info``.\n\n Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If\n this is a symbolic link, we want to preserve the relative path between\n it and other files in the distribution.\n """\n raise NotImplementedError()\n\n @property\n def installed_by_distutils(self) -> bool:\n """Whether this distribution is installed with legacy distutils format.\n\n A distribution installed with "raw" distutils not patched by setuptools\n uses one single file at ``info_location`` to store metadata. We need to\n treat this specially on uninstallation.\n """\n info_location = self.info_location\n if not info_location:\n return False\n return pathlib.Path(info_location).is_file()\n\n @property\n def installed_as_egg(self) -> bool:\n """Whether this distribution is installed as an egg.\n\n This usually indicates the distribution was installed by (older versions\n of) easy_install.\n """\n location = self.location\n if not location:\n return False\n # XXX if the distribution is a zipped egg, location has a trailing /\n # so we resort to pathlib.Path to check the suffix in a reliable way.\n return pathlib.Path(location).suffix == ".egg"\n\n @property\n def installed_with_setuptools_egg_info(self) -> bool:\n """Whether this distribution is installed with the ``.egg-info`` format.\n\n This usually indicates the distribution was installed with setuptools\n with an old pip version or with ``single-version-externally-managed``.\n\n Note that this ensure the metadata store is a directory. distutils can\n also installs an ``.egg-info``, but as a file, not a directory. This\n property is *False* for that case. Also see ``installed_by_distutils``.\n """\n info_location = self.info_location\n if not info_location:\n return False\n if not info_location.endswith(".egg-info"):\n return False\n return pathlib.Path(info_location).is_dir()\n\n @property\n def installed_with_dist_info(self) -> bool:\n """Whether this distribution is installed with the "modern format".\n\n This indicates a "modern" installation, e.g. storing metadata in the\n ``.dist-info`` directory. This applies to installations made by\n setuptools (but through pip, not directly), or anything using the\n standardized build backend interface (PEP 517).\n """\n info_location = self.info_location\n if not info_location:\n return False\n if not info_location.endswith(".dist-info"):\n return False\n return pathlib.Path(info_location).is_dir()\n\n @property\n def canonical_name(self) -> NormalizedName:\n raise NotImplementedError()\n\n @property\n def version(self) -> Version:\n raise NotImplementedError()\n\n @property\n def raw_version(self) -> str:\n raise NotImplementedError()\n\n @property\n def setuptools_filename(self) -> str:\n """Convert a project name to its setuptools-compatible filename.\n\n This is a copy of ``pkg_resources.to_filename()`` for compatibility.\n """\n return self.raw_name.replace("-", "_")\n\n @property\n def direct_url(self) -> Optional[DirectUrl]:\n """Obtain a DirectUrl from this distribution.\n\n Returns None if the distribution has no `direct_url.json` metadata,\n or if `direct_url.json` is invalid.\n """\n try:\n content = self.read_text(DIRECT_URL_METADATA_NAME)\n except FileNotFoundError:\n return None\n try:\n return DirectUrl.from_json(content)\n except (\n UnicodeDecodeError,\n json.JSONDecodeError,\n DirectUrlValidationError,\n ) as e:\n logger.warning(\n "Error parsing %s for %s: %s",\n DIRECT_URL_METADATA_NAME,\n self.canonical_name,\n e,\n )\n return None\n\n @property\n def installer(self) -> str:\n try:\n installer_text = self.read_text("INSTALLER")\n except (OSError, ValueError, NoneMetadataError):\n return "" # Fail silently if the installer file cannot be read.\n for line in installer_text.splitlines():\n cleaned_line = line.strip()\n if cleaned_line:\n return cleaned_line\n return ""\n\n @property\n def requested(self) -> bool:\n return self.is_file("REQUESTED")\n\n @property\n def editable(self) -> bool:\n return bool(self.editable_project_location)\n\n @property\n def local(self) -> bool:\n """If distribution is installed in the current virtual environment.\n\n Always True if we're not in a virtualenv.\n """\n if self.installed_location is None:\n return False\n return is_local(self.installed_location)\n\n @property\n def in_usersite(self) -> bool:\n if self.installed_location is None or user_site is None:\n return False\n return self.installed_location.startswith(normalize_path(user_site))\n\n @property\n def in_site_packages(self) -> bool:\n if self.installed_location is None or site_packages is None:\n return False\n return self.installed_location.startswith(normalize_path(site_packages))\n\n def is_file(self, path: InfoPath) -> bool:\n """Check whether an entry in the info directory is a file."""\n raise NotImplementedError()\n\n def iter_distutils_script_names(self) -> Iterator[str]:\n """Find distutils 'scripts' entries metadata.\n\n If 'scripts' is supplied in ``setup.py``, distutils records those in the\n installed distribution's ``scripts`` directory, a file for each script.\n """\n raise NotImplementedError()\n\n def read_text(self, path: InfoPath) -> str:\n """Read a file in the info directory.\n\n :raise FileNotFoundError: If ``path`` does not exist in the directory.\n :raise NoneMetadataError: If ``path`` exists in the info directory, but\n cannot be read.\n """\n raise NotImplementedError()\n\n def iter_entry_points(self) -> Iterable[BaseEntryPoint]:\n raise NotImplementedError()\n\n def _metadata_impl(self) -> email.message.Message:\n raise NotImplementedError()\n\n @functools.cached_property\n def metadata(self) -> email.message.Message:\n """Metadata of distribution parsed from e.g. METADATA or PKG-INFO.\n\n This should return an empty message if the metadata file is unavailable.\n\n :raises NoneMetadataError: If the metadata file is available, but does\n not contain valid metadata.\n """\n metadata = self._metadata_impl()\n self._add_egg_info_requires(metadata)\n return metadata\n\n @property\n def metadata_dict(self) -> Dict[str, Any]:\n """PEP 566 compliant JSON-serializable representation of METADATA or PKG-INFO.\n\n This should return an empty dict if the metadata file is unavailable.\n\n :raises NoneMetadataError: If the metadata file is available, but does\n not contain valid metadata.\n """\n return msg_to_json(self.metadata)\n\n @property\n def metadata_version(self) -> Optional[str]:\n """Value of "Metadata-Version:" in distribution metadata, if available."""\n return self.metadata.get("Metadata-Version")\n\n @property\n def raw_name(self) -> str:\n """Value of "Name:" in distribution metadata."""\n # The metadata should NEVER be missing the Name: key, but if it somehow\n # does, fall back to the known canonical name.\n return self.metadata.get("Name", self.canonical_name)\n\n @property\n def requires_python(self) -> SpecifierSet:\n """Value of "Requires-Python:" in distribution metadata.\n\n If the key does not exist or contains an invalid value, an empty\n SpecifierSet should be returned.\n """\n value = self.metadata.get("Requires-Python")\n if value is None:\n return SpecifierSet()\n try:\n # Convert to str to satisfy the type checker; this can be a Header object.\n spec = SpecifierSet(str(value))\n except InvalidSpecifier as e:\n message = "Package %r has an invalid Requires-Python: %s"\n logger.warning(message, self.raw_name, e)\n return SpecifierSet()\n return spec\n\n def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:\n """Dependencies of this distribution.\n\n For modern .dist-info distributions, this is the collection of\n "Requires-Dist:" entries in distribution metadata.\n """\n raise NotImplementedError()\n\n def iter_raw_dependencies(self) -> Iterable[str]:\n """Raw Requires-Dist metadata."""\n return self.metadata.get_all("Requires-Dist", [])\n\n def iter_provided_extras(self) -> Iterable[NormalizedName]:\n """Extras provided by this distribution.\n\n For modern .dist-info distributions, this is the collection of\n "Provides-Extra:" entries in distribution metadata.\n\n The return value of this function is expected to be normalised names,\n per PEP 685, with the returned value being handled appropriately by\n `iter_dependencies`.\n """\n raise NotImplementedError()\n\n def _iter_declared_entries_from_record(self) -> Optional[Iterator[str]]:\n try:\n text = self.read_text("RECORD")\n except FileNotFoundError:\n return None\n # This extra Path-str cast normalizes entries.\n return (str(pathlib.Path(row[0])) for row in csv.reader(text.splitlines()))\n\n def _iter_declared_entries_from_legacy(self) -> Optional[Iterator[str]]:\n try:\n text = self.read_text("installed-files.txt")\n except FileNotFoundError:\n return None\n paths = (p for p in text.splitlines(keepends=False) if p)\n root = self.location\n info = self.info_location\n if root is None or info is None:\n return paths\n try:\n info_rel = pathlib.Path(info).relative_to(root)\n except ValueError: # info is not relative to root.\n return paths\n if not info_rel.parts: # info *is* root.\n return paths\n return (\n _convert_installed_files_path(pathlib.Path(p).parts, info_rel.parts)\n for p in paths\n )\n\n def iter_declared_entries(self) -> Optional[Iterator[str]]:\n """Iterate through file entries declared in this distribution.\n\n For modern .dist-info distributions, this is the files listed in the\n ``RECORD`` metadata file. For legacy setuptools distributions, this\n comes from ``installed-files.txt``, with entries normalized to be\n compatible with the format used by ``RECORD``.\n\n :return: An iterator for listed entries, or None if the distribution\n contains neither ``RECORD`` nor ``installed-files.txt``.\n """\n return (\n self._iter_declared_entries_from_record()\n or self._iter_declared_entries_from_legacy()\n )\n\n def _iter_requires_txt_entries(self) -> Iterator[RequiresEntry]:\n """Parse a ``requires.txt`` in an egg-info directory.\n\n This is an INI-ish format where an egg-info stores dependencies. A\n section name describes extra other environment markers, while each entry\n is an arbitrary string (not a key-value pair) representing a dependency\n as a requirement string (no markers).\n\n There is a construct in ``importlib.metadata`` called ``Sectioned`` that\n does mostly the same, but the format is currently considered private.\n """\n try:\n content = self.read_text("requires.txt")\n except FileNotFoundError:\n return\n extra = marker = "" # Section-less entries don't have markers.\n for line in content.splitlines():\n line = line.strip()\n if not line or line.startswith("#"): # Comment; ignored.\n continue\n if line.startswith("[") and line.endswith("]"): # A section header.\n extra, _, marker = line.strip("[]").partition(":")\n continue\n yield RequiresEntry(requirement=line, extra=extra, marker=marker)\n\n def _iter_egg_info_extras(self) -> Iterable[str]:\n """Get extras from the egg-info directory."""\n known_extras = {""}\n for entry in self._iter_requires_txt_entries():\n extra = canonicalize_name(entry.extra)\n if extra in known_extras:\n continue\n known_extras.add(extra)\n yield extra\n\n def _iter_egg_info_dependencies(self) -> Iterable[str]:\n """Get distribution dependencies from the egg-info directory.\n\n To ease parsing, this converts a legacy dependency entry into a PEP 508\n requirement string. Like ``_iter_requires_txt_entries()``, there is code\n in ``importlib.metadata`` that does mostly the same, but not do exactly\n what we need.\n\n Namely, ``importlib.metadata`` does not normalize the extra name before\n putting it into the requirement string, which causes marker comparison\n to fail because the dist-info format do normalize. This is consistent in\n all currently available PEP 517 backends, although not standardized.\n """\n for entry in self._iter_requires_txt_entries():\n extra = canonicalize_name(entry.extra)\n if extra and entry.marker:\n marker = f'({entry.marker}) and extra == "{extra}"'\n elif extra:\n marker = f'extra == "{extra}"'\n elif entry.marker:\n marker = entry.marker\n else:\n marker = ""\n if marker:\n yield f"{entry.requirement} ; {marker}"\n else:\n yield entry.requirement\n\n def _add_egg_info_requires(self, metadata: email.message.Message) -> None:\n """Add egg-info requires.txt information to the metadata."""\n if not metadata.get_all("Requires-Dist"):\n for dep in self._iter_egg_info_dependencies():\n metadata["Requires-Dist"] = dep\n if not metadata.get_all("Provides-Extra"):\n for extra in self._iter_egg_info_extras():\n metadata["Provides-Extra"] = extra\n\n\nclass BaseEnvironment:\n """An environment containing distributions to introspect."""\n\n @classmethod\n def default(cls) -> "BaseEnvironment":\n raise NotImplementedError()\n\n @classmethod\n def from_paths(cls, paths: Optional[List[str]]) -> "BaseEnvironment":\n raise NotImplementedError()\n\n def get_distribution(self, name: str) -> Optional["BaseDistribution"]:\n """Given a requirement name, return the installed distributions.\n\n The name may not be normalized. The implementation must canonicalize\n it for lookup.\n """\n raise NotImplementedError()\n\n def _iter_distributions(self) -> Iterator["BaseDistribution"]:\n """Iterate through installed distributions.\n\n This function should be implemented by subclass, but never called\n directly. Use the public ``iter_distribution()`` instead, which\n implements additional logic to make sure the distributions are valid.\n """\n raise NotImplementedError()\n\n def iter_all_distributions(self) -> Iterator[BaseDistribution]:\n """Iterate through all installed distributions without any filtering."""\n for dist in self._iter_distributions():\n # Make sure the distribution actually comes from a valid Python\n # packaging distribution. Pip's AdjacentTempDirectory leaves folders\n # e.g. ``~atplotlib.dist-info`` if cleanup was interrupted. The\n # valid project name pattern is taken from PEP 508.\n project_name_valid = re.match(\n r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$",\n dist.canonical_name,\n flags=re.IGNORECASE,\n )\n if not project_name_valid:\n logger.warning(\n "Ignoring invalid distribution %s (%s)",\n dist.canonical_name,\n dist.location,\n )\n continue\n yield dist\n\n def iter_installed_distributions(\n self,\n local_only: bool = True,\n skip: Container[str] = stdlib_pkgs,\n include_editables: bool = True,\n editables_only: bool = False,\n user_only: bool = False,\n ) -> Iterator[BaseDistribution]:\n """Return a list of installed distributions.\n\n This is based on ``iter_all_distributions()`` with additional filtering\n options. Note that ``iter_installed_distributions()`` without arguments\n is *not* equal to ``iter_all_distributions()``, since some of the\n configurations exclude packages by default.\n\n :param local_only: If True (default), only return installations\n local to the current virtualenv, if in a virtualenv.\n :param skip: An iterable of canonicalized project names to ignore;\n defaults to ``stdlib_pkgs``.\n :param include_editables: If False, don't report editables.\n :param editables_only: If True, only report editables.\n :param user_only: If True, only report installations in the user\n site directory.\n """\n it = self.iter_all_distributions()\n if local_only:\n it = (d for d in it if d.local)\n if not include_editables:\n it = (d for d in it if not d.editable)\n if editables_only:\n it = (d for d in it if d.editable)\n if user_only:\n it = (d for d in it if d.in_usersite)\n return (d for d in it if d.canonical_name not in skip)\n\n\nclass Wheel(Protocol):\n location: str\n\n def as_zipfile(self) -> zipfile.ZipFile:\n raise NotImplementedError()\n\n\nclass FilesystemWheel(Wheel):\n def __init__(self, location: str) -> None:\n self.location = location\n\n def as_zipfile(self) -> zipfile.ZipFile:\n return zipfile.ZipFile(self.location, allowZip64=True)\n\n\nclass MemoryWheel(Wheel):\n def __init__(self, location: str, stream: IO[bytes]) -> None:\n self.location = location\n self.stream = stream\n\n def as_zipfile(self) -> zipfile.ZipFile:\n return zipfile.ZipFile(self.stream, allowZip64=True)\n
.venv\Lib\site-packages\pip\_internal\metadata\base.py
base.py
Python
25,467
0.95
0.223188
0.026316
node-utils
286
2024-05-09T04:45:17.971452
Apache-2.0
false
d1421041b700171f2cb2ad159930468d
import email.message\nimport email.parser\nimport logging\nimport os\nimport zipfile\nfrom typing import (\n Collection,\n Iterable,\n Iterator,\n List,\n Mapping,\n NamedTuple,\n Optional,\n)\n\nfrom pip._vendor import pkg_resources\nfrom pip._vendor.packaging.requirements import Requirement\nfrom pip._vendor.packaging.utils import NormalizedName, canonicalize_name\nfrom pip._vendor.packaging.version import Version\nfrom pip._vendor.packaging.version import parse as parse_version\n\nfrom pip._internal.exceptions import InvalidWheel, NoneMetadataError, UnsupportedWheel\nfrom pip._internal.utils.egg_link import egg_link_path_from_location\nfrom pip._internal.utils.misc import display_path, normalize_path\nfrom pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file\n\nfrom .base import (\n BaseDistribution,\n BaseEntryPoint,\n BaseEnvironment,\n InfoPath,\n Wheel,\n)\n\n__all__ = ["NAME", "Distribution", "Environment"]\n\nlogger = logging.getLogger(__name__)\n\nNAME = "pkg_resources"\n\n\nclass EntryPoint(NamedTuple):\n name: str\n value: str\n group: str\n\n\nclass InMemoryMetadata:\n """IMetadataProvider that reads metadata files from a dictionary.\n\n This also maps metadata decoding exceptions to our internal exception type.\n """\n\n def __init__(self, metadata: Mapping[str, bytes], wheel_name: str) -> None:\n self._metadata = metadata\n self._wheel_name = wheel_name\n\n def has_metadata(self, name: str) -> bool:\n return name in self._metadata\n\n def get_metadata(self, name: str) -> str:\n try:\n return self._metadata[name].decode()\n except UnicodeDecodeError as e:\n # Augment the default error with the origin of the file.\n raise UnsupportedWheel(\n f"Error decoding metadata for {self._wheel_name}: {e} in {name} file"\n )\n\n def get_metadata_lines(self, name: str) -> Iterable[str]:\n return pkg_resources.yield_lines(self.get_metadata(name))\n\n def metadata_isdir(self, name: str) -> bool:\n return False\n\n def metadata_listdir(self, name: str) -> List[str]:\n return []\n\n def run_script(self, script_name: str, namespace: str) -> None:\n pass\n\n\nclass Distribution(BaseDistribution):\n def __init__(self, dist: pkg_resources.Distribution) -> None:\n self._dist = dist\n # This is populated lazily, to avoid loading metadata for all possible\n # distributions eagerly.\n self.__extra_mapping: Optional[Mapping[NormalizedName, str]] = None\n\n @property\n def _extra_mapping(self) -> Mapping[NormalizedName, str]:\n if self.__extra_mapping is None:\n self.__extra_mapping = {\n canonicalize_name(extra): extra for extra in self._dist.extras\n }\n\n return self.__extra_mapping\n\n @classmethod\n def from_directory(cls, directory: str) -> BaseDistribution:\n dist_dir = directory.rstrip(os.sep)\n\n # Build a PathMetadata object, from path to metadata. :wink:\n base_dir, dist_dir_name = os.path.split(dist_dir)\n metadata = pkg_resources.PathMetadata(base_dir, dist_dir)\n\n # Determine the correct Distribution object type.\n if dist_dir.endswith(".egg-info"):\n dist_cls = pkg_resources.Distribution\n dist_name = os.path.splitext(dist_dir_name)[0]\n else:\n assert dist_dir.endswith(".dist-info")\n dist_cls = pkg_resources.DistInfoDistribution\n dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0]\n\n dist = dist_cls(base_dir, project_name=dist_name, metadata=metadata)\n return cls(dist)\n\n @classmethod\n def from_metadata_file_contents(\n cls,\n metadata_contents: bytes,\n filename: str,\n project_name: str,\n ) -> BaseDistribution:\n metadata_dict = {\n "METADATA": metadata_contents,\n }\n dist = pkg_resources.DistInfoDistribution(\n location=filename,\n metadata=InMemoryMetadata(metadata_dict, filename),\n project_name=project_name,\n )\n return cls(dist)\n\n @classmethod\n def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution:\n try:\n with wheel.as_zipfile() as zf:\n info_dir, _ = parse_wheel(zf, name)\n metadata_dict = {\n path.split("/", 1)[-1]: read_wheel_metadata_file(zf, path)\n for path in zf.namelist()\n if path.startswith(f"{info_dir}/")\n }\n except zipfile.BadZipFile as e:\n raise InvalidWheel(wheel.location, name) from e\n except UnsupportedWheel as e:\n raise UnsupportedWheel(f"{name} has an invalid wheel, {e}")\n dist = pkg_resources.DistInfoDistribution(\n location=wheel.location,\n metadata=InMemoryMetadata(metadata_dict, wheel.location),\n project_name=name,\n )\n return cls(dist)\n\n @property\n def location(self) -> Optional[str]:\n return self._dist.location\n\n @property\n def installed_location(self) -> Optional[str]:\n egg_link = egg_link_path_from_location(self.raw_name)\n if egg_link:\n location = egg_link\n elif self.location:\n location = self.location\n else:\n return None\n return normalize_path(location)\n\n @property\n def info_location(self) -> Optional[str]:\n return self._dist.egg_info\n\n @property\n def installed_by_distutils(self) -> bool:\n # A distutils-installed distribution is provided by FileMetadata. This\n # provider has a "path" attribute not present anywhere else. Not the\n # best introspection logic, but pip has been doing this for a long time.\n try:\n return bool(self._dist._provider.path)\n except AttributeError:\n return False\n\n @property\n def canonical_name(self) -> NormalizedName:\n return canonicalize_name(self._dist.project_name)\n\n @property\n def version(self) -> Version:\n return parse_version(self._dist.version)\n\n @property\n def raw_version(self) -> str:\n return self._dist.version\n\n def is_file(self, path: InfoPath) -> bool:\n return self._dist.has_metadata(str(path))\n\n def iter_distutils_script_names(self) -> Iterator[str]:\n yield from self._dist.metadata_listdir("scripts")\n\n def read_text(self, path: InfoPath) -> str:\n name = str(path)\n if not self._dist.has_metadata(name):\n raise FileNotFoundError(name)\n content = self._dist.get_metadata(name)\n if content is None:\n raise NoneMetadataError(self, name)\n return content\n\n def iter_entry_points(self) -> Iterable[BaseEntryPoint]:\n for group, entries in self._dist.get_entry_map().items():\n for name, entry_point in entries.items():\n name, _, value = str(entry_point).partition("=")\n yield EntryPoint(name=name.strip(), value=value.strip(), group=group)\n\n def _metadata_impl(self) -> email.message.Message:\n """\n :raises NoneMetadataError: if the distribution reports `has_metadata()`\n True but `get_metadata()` returns None.\n """\n if isinstance(self._dist, pkg_resources.DistInfoDistribution):\n metadata_name = "METADATA"\n else:\n metadata_name = "PKG-INFO"\n try:\n metadata = self.read_text(metadata_name)\n except FileNotFoundError:\n if self.location:\n displaying_path = display_path(self.location)\n else:\n displaying_path = repr(self.location)\n logger.warning("No metadata found in %s", displaying_path)\n metadata = ""\n feed_parser = email.parser.FeedParser()\n feed_parser.feed(metadata)\n return feed_parser.close()\n\n def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:\n if extras:\n relevant_extras = set(self._extra_mapping) & set(\n map(canonicalize_name, extras)\n )\n extras = [self._extra_mapping[extra] for extra in relevant_extras]\n return self._dist.requires(extras)\n\n def iter_provided_extras(self) -> Iterable[NormalizedName]:\n return self._extra_mapping.keys()\n\n\nclass Environment(BaseEnvironment):\n def __init__(self, ws: pkg_resources.WorkingSet) -> None:\n self._ws = ws\n\n @classmethod\n def default(cls) -> BaseEnvironment:\n return cls(pkg_resources.working_set)\n\n @classmethod\n def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment:\n return cls(pkg_resources.WorkingSet(paths))\n\n def _iter_distributions(self) -> Iterator[BaseDistribution]:\n for dist in self._ws:\n yield Distribution(dist)\n\n def _search_distribution(self, name: str) -> Optional[BaseDistribution]:\n """Find a distribution matching the ``name`` in the environment.\n\n This searches from *all* distributions available in the environment, to\n match the behavior of ``pkg_resources.get_distribution()``.\n """\n canonical_name = canonicalize_name(name)\n for dist in self.iter_all_distributions():\n if dist.canonical_name == canonical_name:\n return dist\n return None\n\n def get_distribution(self, name: str) -> Optional[BaseDistribution]:\n # Search the distribution by looking through the working set.\n dist = self._search_distribution(name)\n if dist:\n return dist\n\n # If distribution could not be found, call working_set.require to\n # update the working set, and try to find the distribution again.\n # This might happen for e.g. when you install a package twice, once\n # using setup.py develop and again using setup.py install. Now when\n # running pip uninstall twice, the package gets removed from the\n # working set in the first uninstall, so we have to populate the\n # working set again so that pip knows about it and the packages gets\n # picked up and is successfully uninstalled the second time too.\n try:\n # We didn't pass in any version specifiers, so this can never\n # raise pkg_resources.VersionConflict.\n self._ws.require(name)\n except pkg_resources.DistributionNotFound:\n return None\n return self._search_distribution(name)\n
.venv\Lib\site-packages\pip\_internal\metadata\pkg_resources.py
pkg_resources.py
Python
10,542
0.95
0.215947
0.076
awesome-app
533
2023-11-01T14:08:44.095054
MIT
false
2d8b96ee89fc53c452c90025c741619a
# Extracted from https://github.com/pfmoore/pkg_metadata\n\nfrom email.header import Header, decode_header, make_header\nfrom email.message import Message\nfrom typing import Any, Dict, List, Union, cast\n\nMETADATA_FIELDS = [\n # Name, Multiple-Use\n ("Metadata-Version", False),\n ("Name", False),\n ("Version", False),\n ("Dynamic", True),\n ("Platform", True),\n ("Supported-Platform", True),\n ("Summary", False),\n ("Description", False),\n ("Description-Content-Type", False),\n ("Keywords", False),\n ("Home-page", False),\n ("Download-URL", False),\n ("Author", False),\n ("Author-email", False),\n ("Maintainer", False),\n ("Maintainer-email", False),\n ("License", False),\n ("License-Expression", False),\n ("License-File", True),\n ("Classifier", True),\n ("Requires-Dist", True),\n ("Requires-Python", False),\n ("Requires-External", True),\n ("Project-URL", True),\n ("Provides-Extra", True),\n ("Provides-Dist", True),\n ("Obsoletes-Dist", True),\n]\n\n\ndef json_name(field: str) -> str:\n return field.lower().replace("-", "_")\n\n\ndef msg_to_json(msg: Message) -> Dict[str, Any]:\n """Convert a Message object into a JSON-compatible dictionary."""\n\n def sanitise_header(h: Union[Header, str]) -> str:\n if isinstance(h, Header):\n chunks = []\n for bytes, encoding in decode_header(h):\n if encoding == "unknown-8bit":\n try:\n # See if UTF-8 works\n bytes.decode("utf-8")\n encoding = "utf-8"\n except UnicodeDecodeError:\n # If not, latin1 at least won't fail\n encoding = "latin1"\n chunks.append((bytes, encoding))\n return str(make_header(chunks))\n return str(h)\n\n result = {}\n for field, multi in METADATA_FIELDS:\n if field not in msg:\n continue\n key = json_name(field)\n if multi:\n value: Union[str, List[str]] = [\n sanitise_header(v) for v in msg.get_all(field) # type: ignore\n ]\n else:\n value = sanitise_header(msg.get(field)) # type: ignore\n if key == "keywords":\n # Accept both comma-separated and space-separated\n # forms, for better compatibility with old data.\n if "," in value:\n value = [v.strip() for v in value.split(",")]\n else:\n value = value.split()\n result[key] = value\n\n payload = cast(str, msg.get_payload())\n if payload:\n result["description"] = payload\n\n return result\n
.venv\Lib\site-packages\pip\_internal\metadata\_json.py
_json.py
Python
2,707
0.95
0.197674
0.078947
react-lib
504
2024-02-14T22:44:10.214555
GPL-3.0
false
4f9b1fc11e7c529d24d0748ed70a9372
import contextlib\nimport functools\nimport os\nimport sys\nfrom typing import List, Literal, Optional, Protocol, Type, cast\n\nfrom pip._internal.utils.deprecation import deprecated\nfrom pip._internal.utils.misc import strtobool\n\nfrom .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel\n\n__all__ = [\n "BaseDistribution",\n "BaseEnvironment",\n "FilesystemWheel",\n "MemoryWheel",\n "Wheel",\n "get_default_environment",\n "get_environment",\n "get_wheel_distribution",\n "select_backend",\n]\n\n\ndef _should_use_importlib_metadata() -> bool:\n """Whether to use the ``importlib.metadata`` or ``pkg_resources`` backend.\n\n By default, pip uses ``importlib.metadata`` on Python 3.11+, and\n ``pkg_resources`` otherwise. Up to Python 3.13, This can be\n overridden by a couple of ways:\n\n * If environment variable ``_PIP_USE_IMPORTLIB_METADATA`` is set, it\n dictates whether ``importlib.metadata`` is used, for Python <3.14.\n * On Python 3.11, 3.12 and 3.13, Python distributors can patch\n ``importlib.metadata`` to add a global constant\n ``_PIP_USE_IMPORTLIB_METADATA = False``. This makes pip use\n ``pkg_resources`` (unless the user set the aforementioned environment\n variable to *True*).\n\n On Python 3.14+, the ``pkg_resources`` backend cannot be used.\n """\n if sys.version_info >= (3, 14):\n # On Python >=3.14 we only support importlib.metadata.\n return True\n with contextlib.suppress(KeyError, ValueError):\n # On Python <3.14, if the environment variable is set, we obey what it says.\n return bool(strtobool(os.environ["_PIP_USE_IMPORTLIB_METADATA"]))\n if sys.version_info < (3, 11):\n # On Python <3.11, we always use pkg_resources, unless the environment\n # variable was set.\n return False\n # On Python 3.11, 3.12 and 3.13, we check if the global constant is set.\n import importlib.metadata\n\n return bool(getattr(importlib.metadata, "_PIP_USE_IMPORTLIB_METADATA", True))\n\n\ndef _emit_pkg_resources_deprecation_if_needed() -> None:\n if sys.version_info < (3, 11):\n # All pip versions supporting Python<=3.11 will support pkg_resources,\n # and pkg_resources is the default for these, so let's not bother users.\n return\n\n import importlib.metadata\n\n if hasattr(importlib.metadata, "_PIP_USE_IMPORTLIB_METADATA"):\n # The Python distributor has set the global constant, so we don't\n # warn, since it is not a user decision.\n return\n\n # The user has decided to use pkg_resources, so we warn.\n deprecated(\n reason="Using the pkg_resources metadata backend is deprecated.",\n replacement=(\n "to use the default importlib.metadata backend, "\n "by unsetting the _PIP_USE_IMPORTLIB_METADATA environment variable"\n ),\n gone_in="26.3",\n issue=13317,\n )\n\n\nclass Backend(Protocol):\n NAME: 'Literal["importlib", "pkg_resources"]'\n Distribution: Type[BaseDistribution]\n Environment: Type[BaseEnvironment]\n\n\n@functools.lru_cache(maxsize=None)\ndef select_backend() -> Backend:\n if _should_use_importlib_metadata():\n from . import importlib\n\n return cast(Backend, importlib)\n\n _emit_pkg_resources_deprecation_if_needed()\n\n from . import pkg_resources\n\n return cast(Backend, pkg_resources)\n\n\ndef get_default_environment() -> BaseEnvironment:\n """Get the default representation for the current environment.\n\n This returns an Environment instance from the chosen backend. The default\n Environment instance should be built from ``sys.path`` and may use caching\n to share instance state across calls.\n """\n return select_backend().Environment.default()\n\n\ndef get_environment(paths: Optional[List[str]]) -> BaseEnvironment:\n """Get a representation of the environment specified by ``paths``.\n\n This returns an Environment instance from the chosen backend based on the\n given import paths. The backend must build a fresh instance representing\n the state of installed distributions when this function is called.\n """\n return select_backend().Environment.from_paths(paths)\n\n\ndef get_directory_distribution(directory: str) -> BaseDistribution:\n """Get the distribution metadata representation in the specified directory.\n\n This returns a Distribution instance from the chosen backend based on\n the given on-disk ``.dist-info`` directory.\n """\n return select_backend().Distribution.from_directory(directory)\n\n\ndef get_wheel_distribution(wheel: Wheel, canonical_name: str) -> BaseDistribution:\n """Get the representation of the specified wheel's distribution metadata.\n\n This returns a Distribution instance from the chosen backend based on\n the given wheel's ``.dist-info`` directory.\n\n :param canonical_name: Normalized project name of the given wheel.\n """\n return select_backend().Distribution.from_wheel(wheel, canonical_name)\n\n\ndef get_metadata_distribution(\n metadata_contents: bytes,\n filename: str,\n canonical_name: str,\n) -> BaseDistribution:\n """Get the dist representation of the specified METADATA file contents.\n\n This returns a Distribution instance from the chosen backend sourced from the data\n in `metadata_contents`.\n\n :param metadata_contents: Contents of a METADATA file within a dist, or one served\n via PEP 658.\n :param filename: Filename for the dist this metadata represents.\n :param canonical_name: Normalized project name of the given dist.\n """\n return select_backend().Distribution.from_metadata_file_contents(\n metadata_contents,\n filename,\n canonical_name,\n )\n
.venv\Lib\site-packages\pip\_internal\metadata\__init__.py
__init__.py
Python
5,723
0.95
0.12963
0.097561
python-kit
167
2024-02-27T05:30:03.045365
BSD-3-Clause
false
adc5d3ea98000907a9e8248713071d07
import importlib.metadata\nimport os\nfrom typing import Any, Optional, Protocol, Tuple, cast\n\nfrom pip._vendor.packaging.utils import NormalizedName, canonicalize_name\n\n\nclass BadMetadata(ValueError):\n def __init__(self, dist: importlib.metadata.Distribution, *, reason: str) -> None:\n self.dist = dist\n self.reason = reason\n\n def __str__(self) -> str:\n return f"Bad metadata in {self.dist} ({self.reason})"\n\n\nclass BasePath(Protocol):\n """A protocol that various path objects conform.\n\n This exists because importlib.metadata uses both ``pathlib.Path`` and\n ``zipfile.Path``, and we need a common base for type hints (Union does not\n work well since ``zipfile.Path`` is too new for our linter setup).\n\n This does not mean to be exhaustive, but only contains things that present\n in both classes *that we need*.\n """\n\n @property\n def name(self) -> str:\n raise NotImplementedError()\n\n @property\n def parent(self) -> "BasePath":\n raise NotImplementedError()\n\n\ndef get_info_location(d: importlib.metadata.Distribution) -> Optional[BasePath]:\n """Find the path to the distribution's metadata directory.\n\n HACK: This relies on importlib.metadata's private ``_path`` attribute. Not\n all distributions exist on disk, so importlib.metadata is correct to not\n expose the attribute as public. But pip's code base is old and not as clean,\n so we do this to avoid having to rewrite too many things. Hopefully we can\n eliminate this some day.\n """\n return getattr(d, "_path", None)\n\n\ndef parse_name_and_version_from_info_directory(\n dist: importlib.metadata.Distribution,\n) -> Tuple[Optional[str], Optional[str]]:\n """Get a name and version from the metadata directory name.\n\n This is much faster than reading distribution metadata.\n """\n info_location = get_info_location(dist)\n if info_location is None:\n return None, None\n\n stem, suffix = os.path.splitext(info_location.name)\n if suffix == ".dist-info":\n name, sep, version = stem.partition("-")\n if sep:\n return name, version\n\n if suffix == ".egg-info":\n name = stem.split("-", 1)[0]\n return name, None\n\n return None, None\n\n\ndef get_dist_canonical_name(dist: importlib.metadata.Distribution) -> NormalizedName:\n """Get the distribution's normalized name.\n\n The ``name`` attribute is only available in Python 3.10 or later. We are\n targeting exactly that, but Mypy does not know this.\n """\n if name := parse_name_and_version_from_info_directory(dist)[0]:\n return canonicalize_name(name)\n\n name = cast(Any, dist).name\n if not isinstance(name, str):\n raise BadMetadata(dist, reason="invalid metadata entry 'name'")\n return canonicalize_name(name)\n
.venv\Lib\site-packages\pip\_internal\metadata\importlib\_compat.py
_compat.py
Python
2,796
0.85
0.2
0
node-utils
346
2024-05-06T20:24:59.892226
GPL-3.0
false
4ca94dc4bc67410d96a247fb4d57546f
import email.message\nimport importlib.metadata\nimport pathlib\nimport zipfile\nfrom os import PathLike\nfrom typing import (\n Collection,\n Dict,\n Iterable,\n Iterator,\n Mapping,\n Optional,\n Sequence,\n Union,\n cast,\n)\n\nfrom pip._vendor.packaging.requirements import Requirement\nfrom pip._vendor.packaging.utils import NormalizedName, canonicalize_name\nfrom pip._vendor.packaging.version import Version\nfrom pip._vendor.packaging.version import parse as parse_version\n\nfrom pip._internal.exceptions import InvalidWheel, UnsupportedWheel\nfrom pip._internal.metadata.base import (\n BaseDistribution,\n BaseEntryPoint,\n InfoPath,\n Wheel,\n)\nfrom pip._internal.utils.misc import normalize_path\nfrom pip._internal.utils.packaging import get_requirement\nfrom pip._internal.utils.temp_dir import TempDirectory\nfrom pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file\n\nfrom ._compat import (\n BasePath,\n get_dist_canonical_name,\n parse_name_and_version_from_info_directory,\n)\n\n\nclass WheelDistribution(importlib.metadata.Distribution):\n """An ``importlib.metadata.Distribution`` read from a wheel.\n\n Although ``importlib.metadata.PathDistribution`` accepts ``zipfile.Path``,\n its implementation is too "lazy" for pip's needs (we can't keep the ZipFile\n handle open for the entire lifetime of the distribution object).\n\n This implementation eagerly reads the entire metadata directory into the\n memory instead, and operates from that.\n """\n\n def __init__(\n self,\n files: Mapping[pathlib.PurePosixPath, bytes],\n info_location: pathlib.PurePosixPath,\n ) -> None:\n self._files = files\n self.info_location = info_location\n\n @classmethod\n def from_zipfile(\n cls,\n zf: zipfile.ZipFile,\n name: str,\n location: str,\n ) -> "WheelDistribution":\n info_dir, _ = parse_wheel(zf, name)\n paths = (\n (name, pathlib.PurePosixPath(name.split("/", 1)[-1]))\n for name in zf.namelist()\n if name.startswith(f"{info_dir}/")\n )\n files = {\n relpath: read_wheel_metadata_file(zf, fullpath)\n for fullpath, relpath in paths\n }\n info_location = pathlib.PurePosixPath(location, info_dir)\n return cls(files, info_location)\n\n def iterdir(self, path: InfoPath) -> Iterator[pathlib.PurePosixPath]:\n # Only allow iterating through the metadata directory.\n if pathlib.PurePosixPath(str(path)) in self._files:\n return iter(self._files)\n raise FileNotFoundError(path)\n\n def read_text(self, filename: str) -> Optional[str]:\n try:\n data = self._files[pathlib.PurePosixPath(filename)]\n except KeyError:\n return None\n try:\n text = data.decode("utf-8")\n except UnicodeDecodeError as e:\n wheel = self.info_location.parent\n error = f"Error decoding metadata for {wheel}: {e} in {filename} file"\n raise UnsupportedWheel(error)\n return text\n\n def locate_file(self, path: Union[str, "PathLike[str]"]) -> pathlib.Path:\n # This method doesn't make sense for our in-memory wheel, but the API\n # requires us to define it.\n raise NotImplementedError\n\n\nclass Distribution(BaseDistribution):\n def __init__(\n self,\n dist: importlib.metadata.Distribution,\n info_location: Optional[BasePath],\n installed_location: Optional[BasePath],\n ) -> None:\n self._dist = dist\n self._info_location = info_location\n self._installed_location = installed_location\n\n @classmethod\n def from_directory(cls, directory: str) -> BaseDistribution:\n info_location = pathlib.Path(directory)\n dist = importlib.metadata.Distribution.at(info_location)\n return cls(dist, info_location, info_location.parent)\n\n @classmethod\n def from_metadata_file_contents(\n cls,\n metadata_contents: bytes,\n filename: str,\n project_name: str,\n ) -> BaseDistribution:\n # Generate temp dir to contain the metadata file, and write the file contents.\n temp_dir = pathlib.Path(\n TempDirectory(kind="metadata", globally_managed=True).path\n )\n metadata_path = temp_dir / "METADATA"\n metadata_path.write_bytes(metadata_contents)\n # Construct dist pointing to the newly created directory.\n dist = importlib.metadata.Distribution.at(metadata_path.parent)\n return cls(dist, metadata_path.parent, None)\n\n @classmethod\n def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution:\n try:\n with wheel.as_zipfile() as zf:\n dist = WheelDistribution.from_zipfile(zf, name, wheel.location)\n except zipfile.BadZipFile as e:\n raise InvalidWheel(wheel.location, name) from e\n return cls(dist, dist.info_location, pathlib.PurePosixPath(wheel.location))\n\n @property\n def location(self) -> Optional[str]:\n if self._info_location is None:\n return None\n return str(self._info_location.parent)\n\n @property\n def info_location(self) -> Optional[str]:\n if self._info_location is None:\n return None\n return str(self._info_location)\n\n @property\n def installed_location(self) -> Optional[str]:\n if self._installed_location is None:\n return None\n return normalize_path(str(self._installed_location))\n\n @property\n def canonical_name(self) -> NormalizedName:\n return get_dist_canonical_name(self._dist)\n\n @property\n def version(self) -> Version:\n if version := parse_name_and_version_from_info_directory(self._dist)[1]:\n return parse_version(version)\n return parse_version(self._dist.version)\n\n @property\n def raw_version(self) -> str:\n return self._dist.version\n\n def is_file(self, path: InfoPath) -> bool:\n return self._dist.read_text(str(path)) is not None\n\n def iter_distutils_script_names(self) -> Iterator[str]:\n # A distutils installation is always "flat" (not in e.g. egg form), so\n # if this distribution's info location is NOT a pathlib.Path (but e.g.\n # zipfile.Path), it can never contain any distutils scripts.\n if not isinstance(self._info_location, pathlib.Path):\n return\n for child in self._info_location.joinpath("scripts").iterdir():\n yield child.name\n\n def read_text(self, path: InfoPath) -> str:\n content = self._dist.read_text(str(path))\n if content is None:\n raise FileNotFoundError(path)\n return content\n\n def iter_entry_points(self) -> Iterable[BaseEntryPoint]:\n # importlib.metadata's EntryPoint structure satisfies BaseEntryPoint.\n return self._dist.entry_points\n\n def _metadata_impl(self) -> email.message.Message:\n # From Python 3.10+, importlib.metadata declares PackageMetadata as the\n # return type. This protocol is unfortunately a disaster now and misses\n # a ton of fields that we need, including get() and get_payload(). We\n # rely on the implementation that the object is actually a Message now,\n # until upstream can improve the protocol. (python/cpython#94952)\n return cast(email.message.Message, self._dist.metadata)\n\n def iter_provided_extras(self) -> Iterable[NormalizedName]:\n return [\n canonicalize_name(extra)\n for extra in self.metadata.get_all("Provides-Extra", [])\n ]\n\n def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:\n contexts: Sequence[Dict[str, str]] = [{"extra": e} for e in extras]\n for req_string in self.metadata.get_all("Requires-Dist", []):\n # strip() because email.message.Message.get_all() may return a leading \n\n # in case a long header was wrapped.\n req = get_requirement(req_string.strip())\n if not req.marker:\n yield req\n elif not extras and req.marker.evaluate({"extra": ""}):\n yield req\n elif any(req.marker.evaluate(context) for context in contexts):\n yield req\n
.venv\Lib\site-packages\pip\_internal\metadata\importlib\_dists.py
_dists.py
Python
8,279
0.95
0.210526
0.080808
awesome-app
376
2025-04-21T03:48:14.806666
BSD-3-Clause
false
a9239bd524f9db60e5c54d0f2c631585
import importlib.metadata\nimport logging\nimport os\nimport pathlib\nimport sys\nimport zipfile\nfrom typing import Iterator, List, Optional, Sequence, Set, Tuple\n\nfrom pip._vendor.packaging.utils import (\n InvalidWheelFilename,\n NormalizedName,\n canonicalize_name,\n parse_wheel_filename,\n)\n\nfrom pip._internal.metadata.base import BaseDistribution, BaseEnvironment\nfrom pip._internal.utils.filetypes import WHEEL_EXTENSION\n\nfrom ._compat import BadMetadata, BasePath, get_dist_canonical_name, get_info_location\nfrom ._dists import Distribution\n\nlogger = logging.getLogger(__name__)\n\n\ndef _looks_like_wheel(location: str) -> bool:\n if not location.endswith(WHEEL_EXTENSION):\n return False\n if not os.path.isfile(location):\n return False\n try:\n parse_wheel_filename(os.path.basename(location))\n except InvalidWheelFilename:\n return False\n return zipfile.is_zipfile(location)\n\n\nclass _DistributionFinder:\n """Finder to locate distributions.\n\n The main purpose of this class is to memoize found distributions' names, so\n only one distribution is returned for each package name. At lot of pip code\n assumes this (because it is setuptools's behavior), and not doing the same\n can potentially cause a distribution in lower precedence path to override a\n higher precedence one if the caller is not careful.\n\n Eventually we probably want to make it possible to see lower precedence\n installations as well. It's useful feature, after all.\n """\n\n FoundResult = Tuple[importlib.metadata.Distribution, Optional[BasePath]]\n\n def __init__(self) -> None:\n self._found_names: Set[NormalizedName] = set()\n\n def _find_impl(self, location: str) -> Iterator[FoundResult]:\n """Find distributions in a location."""\n # Skip looking inside a wheel. Since a package inside a wheel is not\n # always valid (due to .data directories etc.), its .dist-info entry\n # should not be considered an installed distribution.\n if _looks_like_wheel(location):\n return\n # To know exactly where we find a distribution, we have to feed in the\n # paths one by one, instead of dumping the list to importlib.metadata.\n for dist in importlib.metadata.distributions(path=[location]):\n info_location = get_info_location(dist)\n try:\n name = get_dist_canonical_name(dist)\n except BadMetadata as e:\n logger.warning("Skipping %s due to %s", info_location, e.reason)\n continue\n if name in self._found_names:\n continue\n self._found_names.add(name)\n yield dist, info_location\n\n def find(self, location: str) -> Iterator[BaseDistribution]:\n """Find distributions in a location.\n\n The path can be either a directory, or a ZIP archive.\n """\n for dist, info_location in self._find_impl(location):\n if info_location is None:\n installed_location: Optional[BasePath] = None\n else:\n installed_location = info_location.parent\n yield Distribution(dist, info_location, installed_location)\n\n def find_legacy_editables(self, location: str) -> Iterator[BaseDistribution]:\n """Read location in egg-link files and return distributions in there.\n\n The path should be a directory; otherwise this returns nothing. This\n follows how setuptools does this for compatibility. The first non-empty\n line in the egg-link is read as a path (resolved against the egg-link's\n containing directory if relative). Distributions found at that linked\n location are returned.\n """\n path = pathlib.Path(location)\n if not path.is_dir():\n return\n for child in path.iterdir():\n if child.suffix != ".egg-link":\n continue\n with child.open() as f:\n lines = (line.strip() for line in f)\n target_rel = next((line for line in lines if line), "")\n if not target_rel:\n continue\n target_location = str(path.joinpath(target_rel))\n for dist, info_location in self._find_impl(target_location):\n yield Distribution(dist, info_location, path)\n\n\nclass Environment(BaseEnvironment):\n def __init__(self, paths: Sequence[str]) -> None:\n self._paths = paths\n\n @classmethod\n def default(cls) -> BaseEnvironment:\n return cls(sys.path)\n\n @classmethod\n def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment:\n if paths is None:\n return cls(sys.path)\n return cls(paths)\n\n def _iter_distributions(self) -> Iterator[BaseDistribution]:\n finder = _DistributionFinder()\n for location in self._paths:\n yield from finder.find(location)\n yield from finder.find_legacy_editables(location)\n\n def get_distribution(self, name: str) -> Optional[BaseDistribution]:\n canonical_name = canonicalize_name(name)\n matches = (\n distribution\n for distribution in self.iter_all_distributions()\n if distribution.canonical_name == canonical_name\n )\n return next(matches, None)\n
.venv\Lib\site-packages\pip\_internal\metadata\importlib\_envs.py
_envs.py
Python
5,297
0.95
0.271429
0.042735
vue-tools
154
2025-07-01T02:49:27.433881
MIT
false
337c78973928f0fb2805e07b4b404687
from ._dists import Distribution\nfrom ._envs import Environment\n\n__all__ = ["NAME", "Distribution", "Environment"]\n\nNAME = "importlib"\n
.venv\Lib\site-packages\pip\_internal\metadata\importlib\__init__.py
__init__.py
Python
135
0.85
0
0
awesome-app
525
2024-12-13T02:39:49.312858
Apache-2.0
false
994b6ede7339c2d81df1ec2fcf571a53
\n\n
.venv\Lib\site-packages\pip\_internal\metadata\importlib\__pycache__\_compat.cpython-313.pyc
_compat.cpython-313.pyc
Other
4,538
0.95
0.035714
0
vue-tools
644
2025-03-11T20:01:59.144502
GPL-3.0
false
3f369a73ba798a008a5b079cfa6e09a4
\n\n
.venv\Lib\site-packages\pip\_internal\metadata\importlib\__pycache__\_dists.cpython-313.pyc
_dists.cpython-313.pyc
Other
13,125
0.95
0.026786
0
react-lib
150
2023-10-09T08:14:09.684924
BSD-3-Clause
false
618d63d2f5619499545fef24bdd31933
\n\n
.venv\Lib\site-packages\pip\_internal\metadata\importlib\__pycache__\_envs.cpython-313.pyc
_envs.cpython-313.pyc
Other
8,164
0.95
0.061728
0
node-utils
679
2024-03-20T00:09:28.955092
Apache-2.0
false
9225e2e5578dc8e6eb450566f2fb4cdd
\n\n
.venv\Lib\site-packages\pip\_internal\metadata\importlib\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
360
0.85
0
0
react-lib
560
2025-03-19T07:59:16.506654
BSD-3-Clause
false
febfccea136d39344215284fdda9ffd4
\n\n
.venv\Lib\site-packages\pip\_internal\metadata\__pycache__\base.cpython-313.pyc
base.cpython-313.pyc
Other
34,658
0.95
0.067989
0.009901
python-kit
304
2023-10-08T09:49:43.277419
MIT
false
435ceaadd9bb66c4ff66a47bb889604e
\n\n
.venv\Lib\site-packages\pip\_internal\metadata\__pycache__\pkg_resources.cpython-313.pyc
pkg_resources.cpython-313.pyc
Other
16,328
0.95
0.014184
0
node-utils
75
2025-03-17T21:05:00.320174
GPL-3.0
false
05d855e676cb2fd80835196ac6e1cf27
\n\n
.venv\Lib\site-packages\pip\_internal\metadata\__pycache__\_json.cpython-313.pyc
_json.cpython-313.pyc
Other
3,018
0.8
0
0
react-lib
946
2024-03-24T05:33:40.116539
Apache-2.0
false
278230f65e33159ec33cbe8aaf2e049d
\n\n
.venv\Lib\site-packages\pip\_internal\metadata\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
6,632
0.95
0.051282
0.029851
awesome-app
673
2025-05-26T13:24:40.362279
Apache-2.0
false
95565c559f5927350bbe0bb88a6cbce5
from dataclasses import dataclass\n\nfrom pip._vendor.packaging.version import Version\nfrom pip._vendor.packaging.version import parse as parse_version\n\nfrom pip._internal.models.link import Link\n\n\n@dataclass(frozen=True)\nclass InstallationCandidate:\n """Represents a potential "candidate" for installation."""\n\n __slots__ = ["name", "version", "link"]\n\n name: str\n version: Version\n link: Link\n\n def __init__(self, name: str, version: str, link: Link) -> None:\n object.__setattr__(self, "name", name)\n object.__setattr__(self, "version", parse_version(version))\n object.__setattr__(self, "link", link)\n\n def __str__(self) -> str:\n return f"{self.name!r} candidate (version {self.version} at {self.link})"\n
.venv\Lib\site-packages\pip\_internal\models\candidate.py
candidate.py
Python
753
0.85
0.16
0
python-kit
607
2024-09-16T00:04:53.500541
GPL-3.0
false
5c6959bb25f9ca06400891d2662be98a
"""PEP 610"""\n\nimport json\nimport re\nimport urllib.parse\nfrom dataclasses import dataclass\nfrom typing import Any, ClassVar, Dict, Iterable, Optional, Type, TypeVar, Union\n\n__all__ = [\n "DirectUrl",\n "DirectUrlValidationError",\n "DirInfo",\n "ArchiveInfo",\n "VcsInfo",\n]\n\nT = TypeVar("T")\n\nDIRECT_URL_METADATA_NAME = "direct_url.json"\nENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$")\n\n\nclass DirectUrlValidationError(Exception):\n pass\n\n\ndef _get(\n d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None\n) -> Optional[T]:\n """Get value from dictionary and verify expected type."""\n if key not in d:\n return default\n value = d[key]\n if not isinstance(value, expected_type):\n raise DirectUrlValidationError(\n f"{value!r} has unexpected type for {key} (expected {expected_type})"\n )\n return value\n\n\ndef _get_required(\n d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None\n) -> T:\n value = _get(d, expected_type, key, default)\n if value is None:\n raise DirectUrlValidationError(f"{key} must have a value")\n return value\n\n\ndef _exactly_one_of(infos: Iterable[Optional["InfoType"]]) -> "InfoType":\n infos = [info for info in infos if info is not None]\n if not infos:\n raise DirectUrlValidationError(\n "missing one of archive_info, dir_info, vcs_info"\n )\n if len(infos) > 1:\n raise DirectUrlValidationError(\n "more than one of archive_info, dir_info, vcs_info"\n )\n assert infos[0] is not None\n return infos[0]\n\n\ndef _filter_none(**kwargs: Any) -> Dict[str, Any]:\n """Make dict excluding None values."""\n return {k: v for k, v in kwargs.items() if v is not None}\n\n\n@dataclass\nclass VcsInfo:\n name: ClassVar = "vcs_info"\n\n vcs: str\n commit_id: str\n requested_revision: Optional[str] = None\n\n @classmethod\n def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]:\n if d is None:\n return None\n return cls(\n vcs=_get_required(d, str, "vcs"),\n commit_id=_get_required(d, str, "commit_id"),\n requested_revision=_get(d, str, "requested_revision"),\n )\n\n def _to_dict(self) -> Dict[str, Any]:\n return _filter_none(\n vcs=self.vcs,\n requested_revision=self.requested_revision,\n commit_id=self.commit_id,\n )\n\n\nclass ArchiveInfo:\n name = "archive_info"\n\n def __init__(\n self,\n hash: Optional[str] = None,\n hashes: Optional[Dict[str, str]] = None,\n ) -> None:\n # set hashes before hash, since the hash setter will further populate hashes\n self.hashes = hashes\n self.hash = hash\n\n @property\n def hash(self) -> Optional[str]:\n return self._hash\n\n @hash.setter\n def hash(self, value: Optional[str]) -> None:\n if value is not None:\n # Auto-populate the hashes key to upgrade to the new format automatically.\n # We don't back-populate the legacy hash key from hashes.\n try:\n hash_name, hash_value = value.split("=", 1)\n except ValueError:\n raise DirectUrlValidationError(\n f"invalid archive_info.hash format: {value!r}"\n )\n if self.hashes is None:\n self.hashes = {hash_name: hash_value}\n elif hash_name not in self.hashes:\n self.hashes = self.hashes.copy()\n self.hashes[hash_name] = hash_value\n self._hash = value\n\n @classmethod\n def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]:\n if d is None:\n return None\n return cls(hash=_get(d, str, "hash"), hashes=_get(d, dict, "hashes"))\n\n def _to_dict(self) -> Dict[str, Any]:\n return _filter_none(hash=self.hash, hashes=self.hashes)\n\n\n@dataclass\nclass DirInfo:\n name: ClassVar = "dir_info"\n\n editable: bool = False\n\n @classmethod\n def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]:\n if d is None:\n return None\n return cls(editable=_get_required(d, bool, "editable", default=False))\n\n def _to_dict(self) -> Dict[str, Any]:\n return _filter_none(editable=self.editable or None)\n\n\nInfoType = Union[ArchiveInfo, DirInfo, VcsInfo]\n\n\n@dataclass\nclass DirectUrl:\n url: str\n info: InfoType\n subdirectory: Optional[str] = None\n\n def _remove_auth_from_netloc(self, netloc: str) -> str:\n if "@" not in netloc:\n return netloc\n user_pass, netloc_no_user_pass = netloc.split("@", 1)\n if (\n isinstance(self.info, VcsInfo)\n and self.info.vcs == "git"\n and user_pass == "git"\n ):\n return netloc\n if ENV_VAR_RE.match(user_pass):\n return netloc\n return netloc_no_user_pass\n\n @property\n def redacted_url(self) -> str:\n """url with user:password part removed unless it is formed with\n environment variables as specified in PEP 610, or it is ``git``\n in the case of a git URL.\n """\n purl = urllib.parse.urlsplit(self.url)\n netloc = self._remove_auth_from_netloc(purl.netloc)\n surl = urllib.parse.urlunsplit(\n (purl.scheme, netloc, purl.path, purl.query, purl.fragment)\n )\n return surl\n\n def validate(self) -> None:\n self.from_dict(self.to_dict())\n\n @classmethod\n def from_dict(cls, d: Dict[str, Any]) -> "DirectUrl":\n return DirectUrl(\n url=_get_required(d, str, "url"),\n subdirectory=_get(d, str, "subdirectory"),\n info=_exactly_one_of(\n [\n ArchiveInfo._from_dict(_get(d, dict, "archive_info")),\n DirInfo._from_dict(_get(d, dict, "dir_info")),\n VcsInfo._from_dict(_get(d, dict, "vcs_info")),\n ]\n ),\n )\n\n def to_dict(self) -> Dict[str, Any]:\n res = _filter_none(\n url=self.redacted_url,\n subdirectory=self.subdirectory,\n )\n res[self.info.name] = self.info._to_dict()\n return res\n\n @classmethod\n def from_json(cls, s: str) -> "DirectUrl":\n return cls.from_dict(json.loads(s))\n\n def to_json(self) -> str:\n return json.dumps(self.to_dict(), sort_keys=True)\n\n def is_local_editable(self) -> bool:\n return isinstance(self.info, DirInfo) and self.info.editable\n
.venv\Lib\site-packages\pip\_internal\models\direct_url.py
direct_url.py
Python
6,576
0.95
0.200893
0.016575
react-lib
519
2024-02-21T07:01:22.199711
Apache-2.0
false
f6dfeaafc451a65a67bcfe23717e7b64
from typing import FrozenSet, Optional, Set\n\nfrom pip._vendor.packaging.utils import canonicalize_name\n\nfrom pip._internal.exceptions import CommandError\n\n\nclass FormatControl:\n """Helper for managing formats from which a package can be installed."""\n\n __slots__ = ["no_binary", "only_binary"]\n\n def __init__(\n self,\n no_binary: Optional[Set[str]] = None,\n only_binary: Optional[Set[str]] = None,\n ) -> None:\n if no_binary is None:\n no_binary = set()\n if only_binary is None:\n only_binary = set()\n\n self.no_binary = no_binary\n self.only_binary = only_binary\n\n def __eq__(self, other: object) -> bool:\n if not isinstance(other, self.__class__):\n return NotImplemented\n\n if self.__slots__ != other.__slots__:\n return False\n\n return all(getattr(self, k) == getattr(other, k) for k in self.__slots__)\n\n def __repr__(self) -> str:\n return f"{self.__class__.__name__}({self.no_binary}, {self.only_binary})"\n\n @staticmethod\n def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None:\n if value.startswith("-"):\n raise CommandError(\n "--no-binary / --only-binary option requires 1 argument."\n )\n new = value.split(",")\n while ":all:" in new:\n other.clear()\n target.clear()\n target.add(":all:")\n del new[: new.index(":all:") + 1]\n # Without a none, we want to discard everything as :all: covers it\n if ":none:" not in new:\n return\n for name in new:\n if name == ":none:":\n target.clear()\n continue\n name = canonicalize_name(name)\n other.discard(name)\n target.add(name)\n\n def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]:\n result = {"binary", "source"}\n if canonical_name in self.only_binary:\n result.discard("source")\n elif canonical_name in self.no_binary:\n result.discard("binary")\n elif ":all:" in self.only_binary:\n result.discard("source")\n elif ":all:" in self.no_binary:\n result.discard("binary")\n return frozenset(result)\n\n def disallow_binaries(self) -> None:\n self.handle_mutual_excludes(\n ":all:",\n self.no_binary,\n self.only_binary,\n )\n
.venv\Lib\site-packages\pip\_internal\models\format_control.py
format_control.py
Python
2,486
0.95
0.24359
0.015625
node-utils
106
2024-02-09T10:31:39.230459
Apache-2.0
false
bdc269c3f40962ae622812360a68c3f3
import urllib.parse\n\n\nclass PackageIndex:\n """Represents a Package Index and provides easier access to endpoints"""\n\n __slots__ = ["url", "netloc", "simple_url", "pypi_url", "file_storage_domain"]\n\n def __init__(self, url: str, file_storage_domain: str) -> None:\n super().__init__()\n self.url = url\n self.netloc = urllib.parse.urlsplit(url).netloc\n self.simple_url = self._url_for_path("simple")\n self.pypi_url = self._url_for_path("pypi")\n\n # This is part of a temporary hack used to block installs of PyPI\n # packages which depend on external urls only necessary until PyPI can\n # block such packages themselves\n self.file_storage_domain = file_storage_domain\n\n def _url_for_path(self, path: str) -> str:\n return urllib.parse.urljoin(self.url, path)\n\n\nPyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org")\nTestPyPI = PackageIndex(\n "https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org"\n)\n
.venv\Lib\site-packages\pip\_internal\models\index.py
index.py
Python
1,030
0.95
0.107143
0.15
vue-tools
885
2025-03-08T16:57:20.906128
MIT
false
f67480db56cf588a2ee92844959bbabf
from typing import Any, Dict, Sequence\n\nfrom pip._vendor.packaging.markers import default_environment\n\nfrom pip import __version__\nfrom pip._internal.req.req_install import InstallRequirement\n\n\nclass InstallationReport:\n def __init__(self, install_requirements: Sequence[InstallRequirement]):\n self._install_requirements = install_requirements\n\n @classmethod\n def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]:\n assert ireq.download_info, f"No download_info for {ireq}"\n res = {\n # PEP 610 json for the download URL. download_info.archive_info.hashes may\n # be absent when the requirement was installed from the wheel cache\n # and the cache entry was populated by an older pip version that did not\n # record origin.json.\n "download_info": ireq.download_info.to_dict(),\n # is_direct is true if the requirement was a direct URL reference (which\n # includes editable requirements), and false if the requirement was\n # downloaded from a PEP 503 index or --find-links.\n "is_direct": ireq.is_direct,\n # is_yanked is true if the requirement was yanked from the index, but\n # was still selected by pip to conform to PEP 592.\n "is_yanked": ireq.link.is_yanked if ireq.link else False,\n # requested is true if the requirement was specified by the user (aka\n # top level requirement), and false if it was installed as a dependency of a\n # requirement. https://peps.python.org/pep-0376/#requested\n "requested": ireq.user_supplied,\n # PEP 566 json encoding for metadata\n # https://www.python.org/dev/peps/pep-0566/#json-compatible-metadata\n "metadata": ireq.get_dist().metadata_dict,\n }\n if ireq.user_supplied and ireq.extras:\n # For top level requirements, the list of requested extras, if any.\n res["requested_extras"] = sorted(ireq.extras)\n return res\n\n def to_dict(self) -> Dict[str, Any]:\n return {\n "version": "1",\n "pip_version": __version__,\n "install": [\n self._install_req_to_dict(ireq) for ireq in self._install_requirements\n ],\n # https://peps.python.org/pep-0508/#environment-markers\n # TODO: currently, the resolver uses the default environment to evaluate\n # environment markers, so that is what we report here. In the future, it\n # should also take into account options such as --python-version or\n # --platform, perhaps under the form of an environment_override field?\n # https://github.com/pypa/pip/issues/11198\n "environment": default_environment(),\n }\n
.venv\Lib\site-packages\pip\_internal\models\installation_report.py
installation_report.py
Python
2,818
0.95
0.285714
0.42
react-lib
496
2023-09-10T13:55:53.361051
Apache-2.0
false
09657ab688e36ae6641f732999ff5e92