Add files using upload-large-folder tool
Browse files- pythonProject/.venv/Lib/site-packages/PIL/__init__.py +87 -0
- pythonProject/.venv/Lib/site-packages/PIL/__main__.py +7 -0
- pythonProject/.venv/Lib/site-packages/PIL/__pycache__/AvifImagePlugin.cpython-310.pyc +0 -0
- pythonProject/.venv/Lib/site-packages/PIL/__pycache__/BdfFontFile.cpython-310.pyc +0 -0
- pythonProject/.venv/Lib/site-packages/PIL/__pycache__/BlpImagePlugin.cpython-310.pyc +0 -0
- pythonProject/.venv/Lib/site-packages/PIL/__pycache__/BmpImagePlugin.cpython-310.pyc +0 -0
- pythonProject/.venv/Lib/site-packages/PIL/__pycache__/BufrStubImagePlugin.cpython-310.pyc +0 -0
- pythonProject/.venv/Lib/site-packages/PIL/__pycache__/ContainerIO.cpython-310.pyc +0 -0
- pythonProject/.venv/Lib/site-packages/PIL/__pycache__/CurImagePlugin.cpython-310.pyc +0 -0
- pythonProject/.venv/Lib/site-packages/PIL/__pycache__/DcxImagePlugin.cpython-310.pyc +0 -0
- pythonProject/.venv/Lib/site-packages/PIL/__pycache__/DdsImagePlugin.cpython-310.pyc +0 -0
- pythonProject/.venv/Lib/site-packages/PIL/__pycache__/EpsImagePlugin.cpython-310.pyc +0 -0
- pythonProject/.venv/Lib/site-packages/PIL/__pycache__/ExifTags.cpython-310.pyc +0 -0
- pythonProject/.venv/Lib/site-packages/PIL/__pycache__/features.cpython-310.pyc +0 -0
- pythonProject/.venv/Lib/site-packages/pip/_internal/__init__.py +19 -0
- pythonProject/.venv/Lib/site-packages/pip/_internal/cli/autocompletion.py +171 -0
- pythonProject/.venv/Lib/site-packages/pip/_internal/main.py +12 -0
- pythonProject/.venv/Lib/site-packages/pip/_internal/pyproject.py +174 -0
- pythonProject/.venv/Lib/site-packages/pip/_internal/self_outdated_check.py +242 -0
- pythonProject/.venv/Lib/site-packages/pip/_internal/wheel_builder.py +382 -0
pythonProject/.venv/Lib/site-packages/PIL/__init__.py
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Pillow (Fork of the Python Imaging Library)
|
| 2 |
+
|
| 3 |
+
Pillow is the friendly PIL fork by Jeffrey A. Clark and contributors.
|
| 4 |
+
https://github.com/python-pillow/Pillow/
|
| 5 |
+
|
| 6 |
+
Pillow is forked from PIL 1.1.7.
|
| 7 |
+
|
| 8 |
+
PIL is the Python Imaging Library by Fredrik Lundh and contributors.
|
| 9 |
+
Copyright (c) 1999 by Secret Labs AB.
|
| 10 |
+
|
| 11 |
+
Use PIL.__version__ for this Pillow version.
|
| 12 |
+
|
| 13 |
+
;-)
|
| 14 |
+
"""
|
| 15 |
+
|
| 16 |
+
from __future__ import annotations
|
| 17 |
+
|
| 18 |
+
from . import _version
|
| 19 |
+
|
| 20 |
+
# VERSION was removed in Pillow 6.0.0.
|
| 21 |
+
# PILLOW_VERSION was removed in Pillow 9.0.0.
|
| 22 |
+
# Use __version__ instead.
|
| 23 |
+
__version__ = _version.__version__
|
| 24 |
+
del _version
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
_plugins = [
|
| 28 |
+
"AvifImagePlugin",
|
| 29 |
+
"BlpImagePlugin",
|
| 30 |
+
"BmpImagePlugin",
|
| 31 |
+
"BufrStubImagePlugin",
|
| 32 |
+
"CurImagePlugin",
|
| 33 |
+
"DcxImagePlugin",
|
| 34 |
+
"DdsImagePlugin",
|
| 35 |
+
"EpsImagePlugin",
|
| 36 |
+
"FitsImagePlugin",
|
| 37 |
+
"FliImagePlugin",
|
| 38 |
+
"FpxImagePlugin",
|
| 39 |
+
"FtexImagePlugin",
|
| 40 |
+
"GbrImagePlugin",
|
| 41 |
+
"GifImagePlugin",
|
| 42 |
+
"GribStubImagePlugin",
|
| 43 |
+
"Hdf5StubImagePlugin",
|
| 44 |
+
"IcnsImagePlugin",
|
| 45 |
+
"IcoImagePlugin",
|
| 46 |
+
"ImImagePlugin",
|
| 47 |
+
"ImtImagePlugin",
|
| 48 |
+
"IptcImagePlugin",
|
| 49 |
+
"JpegImagePlugin",
|
| 50 |
+
"Jpeg2KImagePlugin",
|
| 51 |
+
"McIdasImagePlugin",
|
| 52 |
+
"MicImagePlugin",
|
| 53 |
+
"MpegImagePlugin",
|
| 54 |
+
"MpoImagePlugin",
|
| 55 |
+
"MspImagePlugin",
|
| 56 |
+
"PalmImagePlugin",
|
| 57 |
+
"PcdImagePlugin",
|
| 58 |
+
"PcxImagePlugin",
|
| 59 |
+
"PdfImagePlugin",
|
| 60 |
+
"PixarImagePlugin",
|
| 61 |
+
"PngImagePlugin",
|
| 62 |
+
"PpmImagePlugin",
|
| 63 |
+
"PsdImagePlugin",
|
| 64 |
+
"QoiImagePlugin",
|
| 65 |
+
"SgiImagePlugin",
|
| 66 |
+
"SpiderImagePlugin",
|
| 67 |
+
"SunImagePlugin",
|
| 68 |
+
"TgaImagePlugin",
|
| 69 |
+
"TiffImagePlugin",
|
| 70 |
+
"WebPImagePlugin",
|
| 71 |
+
"WmfImagePlugin",
|
| 72 |
+
"XbmImagePlugin",
|
| 73 |
+
"XpmImagePlugin",
|
| 74 |
+
"XVThumbImagePlugin",
|
| 75 |
+
]
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
class UnidentifiedImageError(OSError):
|
| 79 |
+
"""
|
| 80 |
+
Raised in :py:meth:`PIL.Image.open` if an image cannot be opened and identified.
|
| 81 |
+
|
| 82 |
+
If a PNG image raises this error, setting :data:`.ImageFile.LOAD_TRUNCATED_IMAGES`
|
| 83 |
+
to true may allow the image to be opened after all. The setting will ignore missing
|
| 84 |
+
data and checksum failures.
|
| 85 |
+
"""
|
| 86 |
+
|
| 87 |
+
pass
|
pythonProject/.venv/Lib/site-packages/PIL/__main__.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
|
| 5 |
+
from .features import pilinfo
|
| 6 |
+
|
| 7 |
+
pilinfo(supported_formats="--report" not in sys.argv)
|
pythonProject/.venv/Lib/site-packages/PIL/__pycache__/AvifImagePlugin.cpython-310.pyc
ADDED
|
Binary file (6.57 kB). View file
|
|
|
pythonProject/.venv/Lib/site-packages/PIL/__pycache__/BdfFontFile.cpython-310.pyc
ADDED
|
Binary file (2.54 kB). View file
|
|
|
pythonProject/.venv/Lib/site-packages/PIL/__pycache__/BlpImagePlugin.cpython-310.pyc
ADDED
|
Binary file (13.4 kB). View file
|
|
|
pythonProject/.venv/Lib/site-packages/PIL/__pycache__/BmpImagePlugin.cpython-310.pyc
ADDED
|
Binary file (9.87 kB). View file
|
|
|
pythonProject/.venv/Lib/site-packages/PIL/__pycache__/BufrStubImagePlugin.cpython-310.pyc
ADDED
|
Binary file (2 kB). View file
|
|
|
pythonProject/.venv/Lib/site-packages/PIL/__pycache__/ContainerIO.cpython-310.pyc
ADDED
|
Binary file (5.55 kB). View file
|
|
|
pythonProject/.venv/Lib/site-packages/PIL/__pycache__/CurImagePlugin.cpython-310.pyc
ADDED
|
Binary file (1.5 kB). View file
|
|
|
pythonProject/.venv/Lib/site-packages/PIL/__pycache__/DcxImagePlugin.cpython-310.pyc
ADDED
|
Binary file (1.85 kB). View file
|
|
|
pythonProject/.venv/Lib/site-packages/PIL/__pycache__/DdsImagePlugin.cpython-310.pyc
ADDED
|
Binary file (14.7 kB). View file
|
|
|
pythonProject/.venv/Lib/site-packages/PIL/__pycache__/EpsImagePlugin.cpython-310.pyc
ADDED
|
Binary file (9.22 kB). View file
|
|
|
pythonProject/.venv/Lib/site-packages/PIL/__pycache__/ExifTags.cpython-310.pyc
ADDED
|
Binary file (10.3 kB). View file
|
|
|
pythonProject/.venv/Lib/site-packages/PIL/__pycache__/features.cpython-310.pyc
ADDED
|
Binary file (10 kB). View file
|
|
|
pythonProject/.venv/Lib/site-packages/pip/_internal/__init__.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List, Optional
|
| 2 |
+
|
| 3 |
+
import pip._internal.utils.inject_securetransport # noqa
|
| 4 |
+
from pip._internal.utils import _log
|
| 5 |
+
|
| 6 |
+
# init_logging() must be called before any call to logging.getLogger()
|
| 7 |
+
# which happens at import of most modules.
|
| 8 |
+
_log.init_logging()
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def main(args: (Optional[List[str]]) = None) -> int:
|
| 12 |
+
"""This is preserved for old console scripts that may still be referencing
|
| 13 |
+
it.
|
| 14 |
+
|
| 15 |
+
For additional details, see https://github.com/pypa/pip/issues/7498.
|
| 16 |
+
"""
|
| 17 |
+
from pip._internal.utils.entrypoints import _wrapper
|
| 18 |
+
|
| 19 |
+
return _wrapper(args)
|
pythonProject/.venv/Lib/site-packages/pip/_internal/cli/autocompletion.py
ADDED
|
@@ -0,0 +1,171 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Logic that powers autocompletion installed by ``pip completion``.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import optparse
|
| 5 |
+
import os
|
| 6 |
+
import sys
|
| 7 |
+
from itertools import chain
|
| 8 |
+
from typing import Any, Iterable, List, Optional
|
| 9 |
+
|
| 10 |
+
from pip._internal.cli.main_parser import create_main_parser
|
| 11 |
+
from pip._internal.commands import commands_dict, create_command
|
| 12 |
+
from pip._internal.metadata import get_default_environment
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def autocomplete() -> None:
|
| 16 |
+
"""Entry Point for completion of main and subcommand options."""
|
| 17 |
+
# Don't complete if user hasn't sourced bash_completion file.
|
| 18 |
+
if "PIP_AUTO_COMPLETE" not in os.environ:
|
| 19 |
+
return
|
| 20 |
+
cwords = os.environ["COMP_WORDS"].split()[1:]
|
| 21 |
+
cword = int(os.environ["COMP_CWORD"])
|
| 22 |
+
try:
|
| 23 |
+
current = cwords[cword - 1]
|
| 24 |
+
except IndexError:
|
| 25 |
+
current = ""
|
| 26 |
+
|
| 27 |
+
parser = create_main_parser()
|
| 28 |
+
subcommands = list(commands_dict)
|
| 29 |
+
options = []
|
| 30 |
+
|
| 31 |
+
# subcommand
|
| 32 |
+
subcommand_name: Optional[str] = None
|
| 33 |
+
for word in cwords:
|
| 34 |
+
if word in subcommands:
|
| 35 |
+
subcommand_name = word
|
| 36 |
+
break
|
| 37 |
+
# subcommand options
|
| 38 |
+
if subcommand_name is not None:
|
| 39 |
+
# special case: 'help' subcommand has no options
|
| 40 |
+
if subcommand_name == "help":
|
| 41 |
+
sys.exit(1)
|
| 42 |
+
# special case: list locally installed dists for show and uninstall
|
| 43 |
+
should_list_installed = not current.startswith("-") and subcommand_name in [
|
| 44 |
+
"show",
|
| 45 |
+
"uninstall",
|
| 46 |
+
]
|
| 47 |
+
if should_list_installed:
|
| 48 |
+
env = get_default_environment()
|
| 49 |
+
lc = current.lower()
|
| 50 |
+
installed = [
|
| 51 |
+
dist.canonical_name
|
| 52 |
+
for dist in env.iter_installed_distributions(local_only=True)
|
| 53 |
+
if dist.canonical_name.startswith(lc)
|
| 54 |
+
and dist.canonical_name not in cwords[1:]
|
| 55 |
+
]
|
| 56 |
+
# if there are no dists installed, fall back to option completion
|
| 57 |
+
if installed:
|
| 58 |
+
for dist in installed:
|
| 59 |
+
print(dist)
|
| 60 |
+
sys.exit(1)
|
| 61 |
+
|
| 62 |
+
should_list_installables = (
|
| 63 |
+
not current.startswith("-") and subcommand_name == "install"
|
| 64 |
+
)
|
| 65 |
+
if should_list_installables:
|
| 66 |
+
for path in auto_complete_paths(current, "path"):
|
| 67 |
+
print(path)
|
| 68 |
+
sys.exit(1)
|
| 69 |
+
|
| 70 |
+
subcommand = create_command(subcommand_name)
|
| 71 |
+
|
| 72 |
+
for opt in subcommand.parser.option_list_all:
|
| 73 |
+
if opt.help != optparse.SUPPRESS_HELP:
|
| 74 |
+
for opt_str in opt._long_opts + opt._short_opts:
|
| 75 |
+
options.append((opt_str, opt.nargs))
|
| 76 |
+
|
| 77 |
+
# filter out previously specified options from available options
|
| 78 |
+
prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
|
| 79 |
+
options = [(x, v) for (x, v) in options if x not in prev_opts]
|
| 80 |
+
# filter options by current input
|
| 81 |
+
options = [(k, v) for k, v in options if k.startswith(current)]
|
| 82 |
+
# get completion type given cwords and available subcommand options
|
| 83 |
+
completion_type = get_path_completion_type(
|
| 84 |
+
cwords,
|
| 85 |
+
cword,
|
| 86 |
+
subcommand.parser.option_list_all,
|
| 87 |
+
)
|
| 88 |
+
# get completion files and directories if ``completion_type`` is
|
| 89 |
+
# ``<file>``, ``<dir>`` or ``<path>``
|
| 90 |
+
if completion_type:
|
| 91 |
+
paths = auto_complete_paths(current, completion_type)
|
| 92 |
+
options = [(path, 0) for path in paths]
|
| 93 |
+
for option in options:
|
| 94 |
+
opt_label = option[0]
|
| 95 |
+
# append '=' to options which require args
|
| 96 |
+
if option[1] and option[0][:2] == "--":
|
| 97 |
+
opt_label += "="
|
| 98 |
+
print(opt_label)
|
| 99 |
+
else:
|
| 100 |
+
# show main parser options only when necessary
|
| 101 |
+
|
| 102 |
+
opts = [i.option_list for i in parser.option_groups]
|
| 103 |
+
opts.append(parser.option_list)
|
| 104 |
+
flattened_opts = chain.from_iterable(opts)
|
| 105 |
+
if current.startswith("-"):
|
| 106 |
+
for opt in flattened_opts:
|
| 107 |
+
if opt.help != optparse.SUPPRESS_HELP:
|
| 108 |
+
subcommands += opt._long_opts + opt._short_opts
|
| 109 |
+
else:
|
| 110 |
+
# get completion type given cwords and all available options
|
| 111 |
+
completion_type = get_path_completion_type(cwords, cword, flattened_opts)
|
| 112 |
+
if completion_type:
|
| 113 |
+
subcommands = list(auto_complete_paths(current, completion_type))
|
| 114 |
+
|
| 115 |
+
print(" ".join([x for x in subcommands if x.startswith(current)]))
|
| 116 |
+
sys.exit(1)
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def get_path_completion_type(
|
| 120 |
+
cwords: List[str], cword: int, opts: Iterable[Any]
|
| 121 |
+
) -> Optional[str]:
|
| 122 |
+
"""Get the type of path completion (``file``, ``dir``, ``path`` or None)
|
| 123 |
+
|
| 124 |
+
:param cwords: same as the environmental variable ``COMP_WORDS``
|
| 125 |
+
:param cword: same as the environmental variable ``COMP_CWORD``
|
| 126 |
+
:param opts: The available options to check
|
| 127 |
+
:return: path completion type (``file``, ``dir``, ``path`` or None)
|
| 128 |
+
"""
|
| 129 |
+
if cword < 2 or not cwords[cword - 2].startswith("-"):
|
| 130 |
+
return None
|
| 131 |
+
for opt in opts:
|
| 132 |
+
if opt.help == optparse.SUPPRESS_HELP:
|
| 133 |
+
continue
|
| 134 |
+
for o in str(opt).split("/"):
|
| 135 |
+
if cwords[cword - 2].split("=")[0] == o:
|
| 136 |
+
if not opt.metavar or any(
|
| 137 |
+
x in ("path", "file", "dir") for x in opt.metavar.split("/")
|
| 138 |
+
):
|
| 139 |
+
return opt.metavar
|
| 140 |
+
return None
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]:
|
| 144 |
+
"""If ``completion_type`` is ``file`` or ``path``, list all regular files
|
| 145 |
+
and directories starting with ``current``; otherwise only list directories
|
| 146 |
+
starting with ``current``.
|
| 147 |
+
|
| 148 |
+
:param current: The word to be completed
|
| 149 |
+
:param completion_type: path completion type(``file``, ``path`` or ``dir``)
|
| 150 |
+
:return: A generator of regular files and/or directories
|
| 151 |
+
"""
|
| 152 |
+
directory, filename = os.path.split(current)
|
| 153 |
+
current_path = os.path.abspath(directory)
|
| 154 |
+
# Don't complete paths if they can't be accessed
|
| 155 |
+
if not os.access(current_path, os.R_OK):
|
| 156 |
+
return
|
| 157 |
+
filename = os.path.normcase(filename)
|
| 158 |
+
# list all files that start with ``filename``
|
| 159 |
+
file_list = (
|
| 160 |
+
x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename)
|
| 161 |
+
)
|
| 162 |
+
for f in file_list:
|
| 163 |
+
opt = os.path.join(current_path, f)
|
| 164 |
+
comp_file = os.path.normcase(os.path.join(directory, f))
|
| 165 |
+
# complete regular files when there is not ``<dir>`` after option
|
| 166 |
+
# complete directories when there is ``<file>``, ``<path>`` or
|
| 167 |
+
# ``<dir>``after option
|
| 168 |
+
if completion_type != "dir" and os.path.isfile(opt):
|
| 169 |
+
yield comp_file
|
| 170 |
+
elif os.path.isdir(opt):
|
| 171 |
+
yield os.path.join(comp_file, "")
|
pythonProject/.venv/Lib/site-packages/pip/_internal/main.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List, Optional
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def main(args: Optional[List[str]] = None) -> int:
|
| 5 |
+
"""This is preserved for old console scripts that may still be referencing
|
| 6 |
+
it.
|
| 7 |
+
|
| 8 |
+
For additional details, see https://github.com/pypa/pip/issues/7498.
|
| 9 |
+
"""
|
| 10 |
+
from pip._internal.utils.entrypoints import _wrapper
|
| 11 |
+
|
| 12 |
+
return _wrapper(args)
|
pythonProject/.venv/Lib/site-packages/pip/_internal/pyproject.py
ADDED
|
@@ -0,0 +1,174 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import importlib.util
|
| 2 |
+
import os
|
| 3 |
+
from collections import namedtuple
|
| 4 |
+
from typing import Any, List, Optional
|
| 5 |
+
|
| 6 |
+
from pip._vendor import tomli
|
| 7 |
+
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
| 8 |
+
|
| 9 |
+
from pip._internal.exceptions import (
|
| 10 |
+
InstallationError,
|
| 11 |
+
InvalidPyProjectBuildRequires,
|
| 12 |
+
MissingPyProjectBuildRequires,
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def _is_list_of_str(obj: Any) -> bool:
|
| 17 |
+
return isinstance(obj, list) and all(isinstance(item, str) for item in obj)
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def make_pyproject_path(unpacked_source_directory: str) -> str:
|
| 21 |
+
return os.path.join(unpacked_source_directory, "pyproject.toml")
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
BuildSystemDetails = namedtuple(
|
| 25 |
+
"BuildSystemDetails", ["requires", "backend", "check", "backend_path"]
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def load_pyproject_toml(
|
| 30 |
+
use_pep517: Optional[bool], pyproject_toml: str, setup_py: str, req_name: str
|
| 31 |
+
) -> Optional[BuildSystemDetails]:
|
| 32 |
+
"""Load the pyproject.toml file.
|
| 33 |
+
|
| 34 |
+
Parameters:
|
| 35 |
+
use_pep517 - Has the user requested PEP 517 processing? None
|
| 36 |
+
means the user hasn't explicitly specified.
|
| 37 |
+
pyproject_toml - Location of the project's pyproject.toml file
|
| 38 |
+
setup_py - Location of the project's setup.py file
|
| 39 |
+
req_name - The name of the requirement we're processing (for
|
| 40 |
+
error reporting)
|
| 41 |
+
|
| 42 |
+
Returns:
|
| 43 |
+
None if we should use the legacy code path, otherwise a tuple
|
| 44 |
+
(
|
| 45 |
+
requirements from pyproject.toml,
|
| 46 |
+
name of PEP 517 backend,
|
| 47 |
+
requirements we should check are installed after setting
|
| 48 |
+
up the build environment
|
| 49 |
+
directory paths to import the backend from (backend-path),
|
| 50 |
+
relative to the project root.
|
| 51 |
+
)
|
| 52 |
+
"""
|
| 53 |
+
has_pyproject = os.path.isfile(pyproject_toml)
|
| 54 |
+
has_setup = os.path.isfile(setup_py)
|
| 55 |
+
|
| 56 |
+
if not has_pyproject and not has_setup:
|
| 57 |
+
raise InstallationError(
|
| 58 |
+
f"{req_name} does not appear to be a Python project: "
|
| 59 |
+
f"neither 'setup.py' nor 'pyproject.toml' found."
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
if has_pyproject:
|
| 63 |
+
with open(pyproject_toml, encoding="utf-8") as f:
|
| 64 |
+
pp_toml = tomli.loads(f.read())
|
| 65 |
+
build_system = pp_toml.get("build-system")
|
| 66 |
+
else:
|
| 67 |
+
build_system = None
|
| 68 |
+
|
| 69 |
+
# The following cases must use PEP 517
|
| 70 |
+
# We check for use_pep517 being non-None and falsey because that means
|
| 71 |
+
# the user explicitly requested --no-use-pep517. The value 0 as
|
| 72 |
+
# opposed to False can occur when the value is provided via an
|
| 73 |
+
# environment variable or config file option (due to the quirk of
|
| 74 |
+
# strtobool() returning an integer in pip's configuration code).
|
| 75 |
+
if has_pyproject and not has_setup:
|
| 76 |
+
if use_pep517 is not None and not use_pep517:
|
| 77 |
+
raise InstallationError(
|
| 78 |
+
"Disabling PEP 517 processing is invalid: "
|
| 79 |
+
"project does not have a setup.py"
|
| 80 |
+
)
|
| 81 |
+
use_pep517 = True
|
| 82 |
+
elif build_system and "build-backend" in build_system:
|
| 83 |
+
if use_pep517 is not None and not use_pep517:
|
| 84 |
+
raise InstallationError(
|
| 85 |
+
"Disabling PEP 517 processing is invalid: "
|
| 86 |
+
"project specifies a build backend of {} "
|
| 87 |
+
"in pyproject.toml".format(build_system["build-backend"])
|
| 88 |
+
)
|
| 89 |
+
use_pep517 = True
|
| 90 |
+
|
| 91 |
+
# If we haven't worked out whether to use PEP 517 yet,
|
| 92 |
+
# and the user hasn't explicitly stated a preference,
|
| 93 |
+
# we do so if the project has a pyproject.toml file
|
| 94 |
+
# or if we cannot import setuptools.
|
| 95 |
+
|
| 96 |
+
# We fallback to PEP 517 when without setuptools,
|
| 97 |
+
# so setuptools can be installed as a default build backend.
|
| 98 |
+
# For more info see:
|
| 99 |
+
# https://discuss.python.org/t/pip-without-setuptools-could-the-experience-be-improved/11810/9
|
| 100 |
+
elif use_pep517 is None:
|
| 101 |
+
use_pep517 = has_pyproject or not importlib.util.find_spec("setuptools")
|
| 102 |
+
|
| 103 |
+
# At this point, we know whether we're going to use PEP 517.
|
| 104 |
+
assert use_pep517 is not None
|
| 105 |
+
|
| 106 |
+
# If we're using the legacy code path, there is nothing further
|
| 107 |
+
# for us to do here.
|
| 108 |
+
if not use_pep517:
|
| 109 |
+
return None
|
| 110 |
+
|
| 111 |
+
if build_system is None:
|
| 112 |
+
# Either the user has a pyproject.toml with no build-system
|
| 113 |
+
# section, or the user has no pyproject.toml, but has opted in
|
| 114 |
+
# explicitly via --use-pep517.
|
| 115 |
+
# In the absence of any explicit backend specification, we
|
| 116 |
+
# assume the setuptools backend that most closely emulates the
|
| 117 |
+
# traditional direct setup.py execution, and require wheel and
|
| 118 |
+
# a version of setuptools that supports that backend.
|
| 119 |
+
|
| 120 |
+
build_system = {
|
| 121 |
+
"requires": ["setuptools>=40.8.0", "wheel"],
|
| 122 |
+
"build-backend": "setuptools.build_meta:__legacy__",
|
| 123 |
+
}
|
| 124 |
+
|
| 125 |
+
# If we're using PEP 517, we have build system information (either
|
| 126 |
+
# from pyproject.toml, or defaulted by the code above).
|
| 127 |
+
# Note that at this point, we do not know if the user has actually
|
| 128 |
+
# specified a backend, though.
|
| 129 |
+
assert build_system is not None
|
| 130 |
+
|
| 131 |
+
# Ensure that the build-system section in pyproject.toml conforms
|
| 132 |
+
# to PEP 518.
|
| 133 |
+
|
| 134 |
+
# Specifying the build-system table but not the requires key is invalid
|
| 135 |
+
if "requires" not in build_system:
|
| 136 |
+
raise MissingPyProjectBuildRequires(package=req_name)
|
| 137 |
+
|
| 138 |
+
# Error out if requires is not a list of strings
|
| 139 |
+
requires = build_system["requires"]
|
| 140 |
+
if not _is_list_of_str(requires):
|
| 141 |
+
raise InvalidPyProjectBuildRequires(
|
| 142 |
+
package=req_name,
|
| 143 |
+
reason="It is not a list of strings.",
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
# Each requirement must be valid as per PEP 508
|
| 147 |
+
for requirement in requires:
|
| 148 |
+
try:
|
| 149 |
+
Requirement(requirement)
|
| 150 |
+
except InvalidRequirement as error:
|
| 151 |
+
raise InvalidPyProjectBuildRequires(
|
| 152 |
+
package=req_name,
|
| 153 |
+
reason=f"It contains an invalid requirement: {requirement!r}",
|
| 154 |
+
) from error
|
| 155 |
+
|
| 156 |
+
backend = build_system.get("build-backend")
|
| 157 |
+
backend_path = build_system.get("backend-path", [])
|
| 158 |
+
check: List[str] = []
|
| 159 |
+
if backend is None:
|
| 160 |
+
# If the user didn't specify a backend, we assume they want to use
|
| 161 |
+
# the setuptools backend. But we can't be sure they have included
|
| 162 |
+
# a version of setuptools which supplies the backend. So we
|
| 163 |
+
# make a note to check that this requirement is present once
|
| 164 |
+
# we have set up the environment.
|
| 165 |
+
# This is quite a lot of work to check for a very specific case. But
|
| 166 |
+
# the problem is, that case is potentially quite common - projects that
|
| 167 |
+
# adopted PEP 518 early for the ability to specify requirements to
|
| 168 |
+
# execute setup.py, but never considered needing to mention the build
|
| 169 |
+
# tools themselves. The original PEP 518 code had a similar check (but
|
| 170 |
+
# implemented in a different way).
|
| 171 |
+
backend = "setuptools.build_meta:__legacy__"
|
| 172 |
+
check = ["setuptools>=40.8.0"]
|
| 173 |
+
|
| 174 |
+
return BuildSystemDetails(requires, backend, check, backend_path)
|
pythonProject/.venv/Lib/site-packages/pip/_internal/self_outdated_check.py
ADDED
|
@@ -0,0 +1,242 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import datetime
|
| 2 |
+
import functools
|
| 3 |
+
import hashlib
|
| 4 |
+
import json
|
| 5 |
+
import logging
|
| 6 |
+
import optparse
|
| 7 |
+
import os.path
|
| 8 |
+
import sys
|
| 9 |
+
from dataclasses import dataclass
|
| 10 |
+
from typing import Any, Callable, Dict, Optional
|
| 11 |
+
|
| 12 |
+
from pip._vendor.packaging.version import parse as parse_version
|
| 13 |
+
from pip._vendor.rich.console import Group
|
| 14 |
+
from pip._vendor.rich.markup import escape
|
| 15 |
+
from pip._vendor.rich.text import Text
|
| 16 |
+
|
| 17 |
+
from pip._internal.index.collector import LinkCollector
|
| 18 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 19 |
+
from pip._internal.metadata import get_default_environment
|
| 20 |
+
from pip._internal.metadata.base import DistributionVersion
|
| 21 |
+
from pip._internal.models.selection_prefs import SelectionPreferences
|
| 22 |
+
from pip._internal.network.session import PipSession
|
| 23 |
+
from pip._internal.utils.compat import WINDOWS
|
| 24 |
+
from pip._internal.utils.entrypoints import (
|
| 25 |
+
get_best_invocation_for_this_pip,
|
| 26 |
+
get_best_invocation_for_this_python,
|
| 27 |
+
)
|
| 28 |
+
from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace
|
| 29 |
+
from pip._internal.utils.misc import ensure_dir
|
| 30 |
+
|
| 31 |
+
_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ"
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
logger = logging.getLogger(__name__)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def _get_statefile_name(key: str) -> str:
|
| 38 |
+
key_bytes = key.encode()
|
| 39 |
+
name = hashlib.sha224(key_bytes).hexdigest()
|
| 40 |
+
return name
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class SelfCheckState:
|
| 44 |
+
def __init__(self, cache_dir: str) -> None:
|
| 45 |
+
self._state: Dict[str, Any] = {}
|
| 46 |
+
self._statefile_path = None
|
| 47 |
+
|
| 48 |
+
# Try to load the existing state
|
| 49 |
+
if cache_dir:
|
| 50 |
+
self._statefile_path = os.path.join(
|
| 51 |
+
cache_dir, "selfcheck", _get_statefile_name(self.key)
|
| 52 |
+
)
|
| 53 |
+
try:
|
| 54 |
+
with open(self._statefile_path, encoding="utf-8") as statefile:
|
| 55 |
+
self._state = json.load(statefile)
|
| 56 |
+
except (OSError, ValueError, KeyError):
|
| 57 |
+
# Explicitly suppressing exceptions, since we don't want to
|
| 58 |
+
# error out if the cache file is invalid.
|
| 59 |
+
pass
|
| 60 |
+
|
| 61 |
+
@property
|
| 62 |
+
def key(self) -> str:
|
| 63 |
+
return sys.prefix
|
| 64 |
+
|
| 65 |
+
def get(self, current_time: datetime.datetime) -> Optional[str]:
|
| 66 |
+
"""Check if we have a not-outdated version loaded already."""
|
| 67 |
+
if not self._state:
|
| 68 |
+
return None
|
| 69 |
+
|
| 70 |
+
if "last_check" not in self._state:
|
| 71 |
+
return None
|
| 72 |
+
|
| 73 |
+
if "pypi_version" not in self._state:
|
| 74 |
+
return None
|
| 75 |
+
|
| 76 |
+
seven_days_in_seconds = 7 * 24 * 60 * 60
|
| 77 |
+
|
| 78 |
+
# Determine if we need to refresh the state
|
| 79 |
+
last_check = datetime.datetime.strptime(self._state["last_check"], _DATE_FMT)
|
| 80 |
+
seconds_since_last_check = (current_time - last_check).total_seconds()
|
| 81 |
+
if seconds_since_last_check > seven_days_in_seconds:
|
| 82 |
+
return None
|
| 83 |
+
|
| 84 |
+
return self._state["pypi_version"]
|
| 85 |
+
|
| 86 |
+
def set(self, pypi_version: str, current_time: datetime.datetime) -> None:
|
| 87 |
+
# If we do not have a path to cache in, don't bother saving.
|
| 88 |
+
if not self._statefile_path:
|
| 89 |
+
return
|
| 90 |
+
|
| 91 |
+
# Check to make sure that we own the directory
|
| 92 |
+
if not check_path_owner(os.path.dirname(self._statefile_path)):
|
| 93 |
+
return
|
| 94 |
+
|
| 95 |
+
# Now that we've ensured the directory is owned by this user, we'll go
|
| 96 |
+
# ahead and make sure that all our directories are created.
|
| 97 |
+
ensure_dir(os.path.dirname(self._statefile_path))
|
| 98 |
+
|
| 99 |
+
state = {
|
| 100 |
+
# Include the key so it's easy to tell which pip wrote the
|
| 101 |
+
# file.
|
| 102 |
+
"key": self.key,
|
| 103 |
+
"last_check": current_time.strftime(_DATE_FMT),
|
| 104 |
+
"pypi_version": pypi_version,
|
| 105 |
+
}
|
| 106 |
+
|
| 107 |
+
text = json.dumps(state, sort_keys=True, separators=(",", ":"))
|
| 108 |
+
|
| 109 |
+
with adjacent_tmp_file(self._statefile_path) as f:
|
| 110 |
+
f.write(text.encode())
|
| 111 |
+
|
| 112 |
+
try:
|
| 113 |
+
# Since we have a prefix-specific state file, we can just
|
| 114 |
+
# overwrite whatever is there, no need to check.
|
| 115 |
+
replace(f.name, self._statefile_path)
|
| 116 |
+
except OSError:
|
| 117 |
+
# Best effort.
|
| 118 |
+
pass
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
@dataclass
|
| 122 |
+
class UpgradePrompt:
|
| 123 |
+
old: str
|
| 124 |
+
new: str
|
| 125 |
+
|
| 126 |
+
def __rich__(self) -> Group:
|
| 127 |
+
if WINDOWS:
|
| 128 |
+
pip_cmd = f"{get_best_invocation_for_this_python()} -m pip"
|
| 129 |
+
else:
|
| 130 |
+
pip_cmd = get_best_invocation_for_this_pip()
|
| 131 |
+
|
| 132 |
+
notice = "[bold][[reset][blue]notice[reset][bold]][reset]"
|
| 133 |
+
return Group(
|
| 134 |
+
Text(),
|
| 135 |
+
Text.from_markup(
|
| 136 |
+
f"{notice} A new release of pip is available: "
|
| 137 |
+
f"[red]{self.old}[reset] -> [green]{self.new}[reset]"
|
| 138 |
+
),
|
| 139 |
+
Text.from_markup(
|
| 140 |
+
f"{notice} To update, run: "
|
| 141 |
+
f"[green]{escape(pip_cmd)} install --upgrade pip"
|
| 142 |
+
),
|
| 143 |
+
)
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def was_installed_by_pip(pkg: str) -> bool:
|
| 147 |
+
"""Checks whether pkg was installed by pip
|
| 148 |
+
|
| 149 |
+
This is used not to display the upgrade message when pip is in fact
|
| 150 |
+
installed by system package manager, such as dnf on Fedora.
|
| 151 |
+
"""
|
| 152 |
+
dist = get_default_environment().get_distribution(pkg)
|
| 153 |
+
return dist is not None and "pip" == dist.installer
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
def _get_current_remote_pip_version(
|
| 157 |
+
session: PipSession, options: optparse.Values
|
| 158 |
+
) -> Optional[str]:
|
| 159 |
+
# Lets use PackageFinder to see what the latest pip version is
|
| 160 |
+
link_collector = LinkCollector.create(
|
| 161 |
+
session,
|
| 162 |
+
options=options,
|
| 163 |
+
suppress_no_index=True,
|
| 164 |
+
)
|
| 165 |
+
|
| 166 |
+
# Pass allow_yanked=False so we don't suggest upgrading to a
|
| 167 |
+
# yanked version.
|
| 168 |
+
selection_prefs = SelectionPreferences(
|
| 169 |
+
allow_yanked=False,
|
| 170 |
+
allow_all_prereleases=False, # Explicitly set to False
|
| 171 |
+
)
|
| 172 |
+
|
| 173 |
+
finder = PackageFinder.create(
|
| 174 |
+
link_collector=link_collector,
|
| 175 |
+
selection_prefs=selection_prefs,
|
| 176 |
+
)
|
| 177 |
+
best_candidate = finder.find_best_candidate("pip").best_candidate
|
| 178 |
+
if best_candidate is None:
|
| 179 |
+
return None
|
| 180 |
+
|
| 181 |
+
return str(best_candidate.version)
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
def _self_version_check_logic(
|
| 185 |
+
*,
|
| 186 |
+
state: SelfCheckState,
|
| 187 |
+
current_time: datetime.datetime,
|
| 188 |
+
local_version: DistributionVersion,
|
| 189 |
+
get_remote_version: Callable[[], Optional[str]],
|
| 190 |
+
) -> Optional[UpgradePrompt]:
|
| 191 |
+
remote_version_str = state.get(current_time)
|
| 192 |
+
if remote_version_str is None:
|
| 193 |
+
remote_version_str = get_remote_version()
|
| 194 |
+
if remote_version_str is None:
|
| 195 |
+
logger.debug("No remote pip version found")
|
| 196 |
+
return None
|
| 197 |
+
state.set(remote_version_str, current_time)
|
| 198 |
+
|
| 199 |
+
remote_version = parse_version(remote_version_str)
|
| 200 |
+
logger.debug("Remote version of pip: %s", remote_version)
|
| 201 |
+
logger.debug("Local version of pip: %s", local_version)
|
| 202 |
+
|
| 203 |
+
pip_installed_by_pip = was_installed_by_pip("pip")
|
| 204 |
+
logger.debug("Was pip installed by pip? %s", pip_installed_by_pip)
|
| 205 |
+
if not pip_installed_by_pip:
|
| 206 |
+
return None # Only suggest upgrade if pip is installed by pip.
|
| 207 |
+
|
| 208 |
+
local_version_is_older = (
|
| 209 |
+
local_version < remote_version
|
| 210 |
+
and local_version.base_version != remote_version.base_version
|
| 211 |
+
)
|
| 212 |
+
if local_version_is_older:
|
| 213 |
+
return UpgradePrompt(old=str(local_version), new=remote_version_str)
|
| 214 |
+
|
| 215 |
+
return None
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
def pip_self_version_check(session: PipSession, options: optparse.Values) -> None:
|
| 219 |
+
"""Check for an update for pip.
|
| 220 |
+
|
| 221 |
+
Limit the frequency of checks to once per week. State is stored either in
|
| 222 |
+
the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
|
| 223 |
+
of the pip script path.
|
| 224 |
+
"""
|
| 225 |
+
installed_dist = get_default_environment().get_distribution("pip")
|
| 226 |
+
if not installed_dist:
|
| 227 |
+
return
|
| 228 |
+
|
| 229 |
+
try:
|
| 230 |
+
upgrade_prompt = _self_version_check_logic(
|
| 231 |
+
state=SelfCheckState(cache_dir=options.cache_dir),
|
| 232 |
+
current_time=datetime.datetime.utcnow(),
|
| 233 |
+
local_version=installed_dist.version,
|
| 234 |
+
get_remote_version=functools.partial(
|
| 235 |
+
_get_current_remote_pip_version, session, options
|
| 236 |
+
),
|
| 237 |
+
)
|
| 238 |
+
if upgrade_prompt is not None:
|
| 239 |
+
logger.warning("[present-rich] %s", upgrade_prompt)
|
| 240 |
+
except Exception:
|
| 241 |
+
logger.warning("There was an error checking the latest version of pip.")
|
| 242 |
+
logger.debug("See below for error", exc_info=True)
|
pythonProject/.venv/Lib/site-packages/pip/_internal/wheel_builder.py
ADDED
|
@@ -0,0 +1,382 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Orchestrator for building wheels from InstallRequirements.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import logging
|
| 5 |
+
import os.path
|
| 6 |
+
import re
|
| 7 |
+
import shutil
|
| 8 |
+
from typing import Callable, Iterable, List, Optional, Tuple
|
| 9 |
+
|
| 10 |
+
from pip._vendor.packaging.utils import canonicalize_name, canonicalize_version
|
| 11 |
+
from pip._vendor.packaging.version import InvalidVersion, Version
|
| 12 |
+
|
| 13 |
+
from pip._internal.cache import WheelCache
|
| 14 |
+
from pip._internal.exceptions import InvalidWheelFilename, UnsupportedWheel
|
| 15 |
+
from pip._internal.metadata import FilesystemWheel, get_wheel_distribution
|
| 16 |
+
from pip._internal.models.link import Link
|
| 17 |
+
from pip._internal.models.wheel import Wheel
|
| 18 |
+
from pip._internal.operations.build.wheel import build_wheel_pep517
|
| 19 |
+
from pip._internal.operations.build.wheel_editable import build_wheel_editable
|
| 20 |
+
from pip._internal.operations.build.wheel_legacy import build_wheel_legacy
|
| 21 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 22 |
+
from pip._internal.utils.deprecation import (
|
| 23 |
+
LegacyInstallReasonMissingWheelPackage,
|
| 24 |
+
LegacyInstallReasonNoBinaryForcesSetuptoolsInstall,
|
| 25 |
+
)
|
| 26 |
+
from pip._internal.utils.logging import indent_log
|
| 27 |
+
from pip._internal.utils.misc import ensure_dir, hash_file, is_wheel_installed
|
| 28 |
+
from pip._internal.utils.setuptools_build import make_setuptools_clean_args
|
| 29 |
+
from pip._internal.utils.subprocess import call_subprocess
|
| 30 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
| 31 |
+
from pip._internal.utils.urls import path_to_url
|
| 32 |
+
from pip._internal.vcs import vcs
|
| 33 |
+
|
| 34 |
+
logger = logging.getLogger(__name__)
|
| 35 |
+
|
| 36 |
+
_egg_info_re = re.compile(r"([a-z0-9_.]+)-([a-z0-9_.!+-]+)", re.IGNORECASE)
|
| 37 |
+
|
| 38 |
+
BdistWheelAllowedPredicate = Callable[[InstallRequirement], bool]
|
| 39 |
+
BuildResult = Tuple[List[InstallRequirement], List[InstallRequirement]]
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def _contains_egg_info(s: str) -> bool:
|
| 43 |
+
"""Determine whether the string looks like an egg_info.
|
| 44 |
+
|
| 45 |
+
:param s: The string to parse. E.g. foo-2.1
|
| 46 |
+
"""
|
| 47 |
+
return bool(_egg_info_re.search(s))
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def _should_build(
|
| 51 |
+
req: InstallRequirement,
|
| 52 |
+
need_wheel: bool,
|
| 53 |
+
check_bdist_wheel: Optional[BdistWheelAllowedPredicate] = None,
|
| 54 |
+
) -> bool:
|
| 55 |
+
"""Return whether an InstallRequirement should be built into a wheel."""
|
| 56 |
+
if req.constraint:
|
| 57 |
+
# never build requirements that are merely constraints
|
| 58 |
+
return False
|
| 59 |
+
if req.is_wheel:
|
| 60 |
+
if need_wheel:
|
| 61 |
+
logger.info(
|
| 62 |
+
"Skipping %s, due to already being wheel.",
|
| 63 |
+
req.name,
|
| 64 |
+
)
|
| 65 |
+
return False
|
| 66 |
+
|
| 67 |
+
if need_wheel:
|
| 68 |
+
# i.e. pip wheel, not pip install
|
| 69 |
+
return True
|
| 70 |
+
|
| 71 |
+
# From this point, this concerns the pip install command only
|
| 72 |
+
# (need_wheel=False).
|
| 73 |
+
|
| 74 |
+
if not req.source_dir:
|
| 75 |
+
return False
|
| 76 |
+
|
| 77 |
+
if req.editable:
|
| 78 |
+
# we only build PEP 660 editable requirements
|
| 79 |
+
return req.supports_pyproject_editable()
|
| 80 |
+
|
| 81 |
+
if req.use_pep517:
|
| 82 |
+
return True
|
| 83 |
+
|
| 84 |
+
assert check_bdist_wheel is not None
|
| 85 |
+
if not check_bdist_wheel(req):
|
| 86 |
+
# /!\ When we change this to unconditionally return True, we must also remove
|
| 87 |
+
# support for `--install-option`. Indeed, `--install-option` implies
|
| 88 |
+
# `--no-binary` so we can return False here and run `setup.py install`.
|
| 89 |
+
# `--global-option` and `--build-option` can remain until we drop support for
|
| 90 |
+
# building with `setup.py bdist_wheel`.
|
| 91 |
+
req.legacy_install_reason = LegacyInstallReasonNoBinaryForcesSetuptoolsInstall
|
| 92 |
+
return False
|
| 93 |
+
|
| 94 |
+
if not is_wheel_installed():
|
| 95 |
+
# we don't build legacy requirements if wheel is not installed
|
| 96 |
+
req.legacy_install_reason = LegacyInstallReasonMissingWheelPackage
|
| 97 |
+
return False
|
| 98 |
+
|
| 99 |
+
return True
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
def should_build_for_wheel_command(
|
| 103 |
+
req: InstallRequirement,
|
| 104 |
+
) -> bool:
|
| 105 |
+
return _should_build(req, need_wheel=True)
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def should_build_for_install_command(
|
| 109 |
+
req: InstallRequirement,
|
| 110 |
+
check_bdist_wheel_allowed: BdistWheelAllowedPredicate,
|
| 111 |
+
) -> bool:
|
| 112 |
+
return _should_build(
|
| 113 |
+
req, need_wheel=False, check_bdist_wheel=check_bdist_wheel_allowed
|
| 114 |
+
)
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
def _should_cache(
|
| 118 |
+
req: InstallRequirement,
|
| 119 |
+
) -> Optional[bool]:
|
| 120 |
+
"""
|
| 121 |
+
Return whether a built InstallRequirement can be stored in the persistent
|
| 122 |
+
wheel cache, assuming the wheel cache is available, and _should_build()
|
| 123 |
+
has determined a wheel needs to be built.
|
| 124 |
+
"""
|
| 125 |
+
if req.editable or not req.source_dir:
|
| 126 |
+
# never cache editable requirements
|
| 127 |
+
return False
|
| 128 |
+
|
| 129 |
+
if req.link and req.link.is_vcs:
|
| 130 |
+
# VCS checkout. Do not cache
|
| 131 |
+
# unless it points to an immutable commit hash.
|
| 132 |
+
assert not req.editable
|
| 133 |
+
assert req.source_dir
|
| 134 |
+
vcs_backend = vcs.get_backend_for_scheme(req.link.scheme)
|
| 135 |
+
assert vcs_backend
|
| 136 |
+
if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir):
|
| 137 |
+
return True
|
| 138 |
+
return False
|
| 139 |
+
|
| 140 |
+
assert req.link
|
| 141 |
+
base, ext = req.link.splitext()
|
| 142 |
+
if _contains_egg_info(base):
|
| 143 |
+
return True
|
| 144 |
+
|
| 145 |
+
# Otherwise, do not cache.
|
| 146 |
+
return False
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
def _get_cache_dir(
|
| 150 |
+
req: InstallRequirement,
|
| 151 |
+
wheel_cache: WheelCache,
|
| 152 |
+
) -> str:
|
| 153 |
+
"""Return the persistent or temporary cache directory where the built
|
| 154 |
+
wheel need to be stored.
|
| 155 |
+
"""
|
| 156 |
+
cache_available = bool(wheel_cache.cache_dir)
|
| 157 |
+
assert req.link
|
| 158 |
+
if cache_available and _should_cache(req):
|
| 159 |
+
cache_dir = wheel_cache.get_path_for_link(req.link)
|
| 160 |
+
else:
|
| 161 |
+
cache_dir = wheel_cache.get_ephem_path_for_link(req.link)
|
| 162 |
+
return cache_dir
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
def _verify_one(req: InstallRequirement, wheel_path: str) -> None:
|
| 166 |
+
canonical_name = canonicalize_name(req.name or "")
|
| 167 |
+
w = Wheel(os.path.basename(wheel_path))
|
| 168 |
+
if canonicalize_name(w.name) != canonical_name:
|
| 169 |
+
raise InvalidWheelFilename(
|
| 170 |
+
"Wheel has unexpected file name: expected {!r}, "
|
| 171 |
+
"got {!r}".format(canonical_name, w.name),
|
| 172 |
+
)
|
| 173 |
+
dist = get_wheel_distribution(FilesystemWheel(wheel_path), canonical_name)
|
| 174 |
+
dist_verstr = str(dist.version)
|
| 175 |
+
if canonicalize_version(dist_verstr) != canonicalize_version(w.version):
|
| 176 |
+
raise InvalidWheelFilename(
|
| 177 |
+
"Wheel has unexpected file name: expected {!r}, "
|
| 178 |
+
"got {!r}".format(dist_verstr, w.version),
|
| 179 |
+
)
|
| 180 |
+
metadata_version_value = dist.metadata_version
|
| 181 |
+
if metadata_version_value is None:
|
| 182 |
+
raise UnsupportedWheel("Missing Metadata-Version")
|
| 183 |
+
try:
|
| 184 |
+
metadata_version = Version(metadata_version_value)
|
| 185 |
+
except InvalidVersion:
|
| 186 |
+
msg = f"Invalid Metadata-Version: {metadata_version_value}"
|
| 187 |
+
raise UnsupportedWheel(msg)
|
| 188 |
+
if metadata_version >= Version("1.2") and not isinstance(dist.version, Version):
|
| 189 |
+
raise UnsupportedWheel(
|
| 190 |
+
"Metadata 1.2 mandates PEP 440 version, "
|
| 191 |
+
"but {!r} is not".format(dist_verstr)
|
| 192 |
+
)
|
| 193 |
+
|
| 194 |
+
|
| 195 |
+
def _build_one(
|
| 196 |
+
req: InstallRequirement,
|
| 197 |
+
output_dir: str,
|
| 198 |
+
verify: bool,
|
| 199 |
+
build_options: List[str],
|
| 200 |
+
global_options: List[str],
|
| 201 |
+
editable: bool,
|
| 202 |
+
) -> Optional[str]:
|
| 203 |
+
"""Build one wheel.
|
| 204 |
+
|
| 205 |
+
:return: The filename of the built wheel, or None if the build failed.
|
| 206 |
+
"""
|
| 207 |
+
artifact = "editable" if editable else "wheel"
|
| 208 |
+
try:
|
| 209 |
+
ensure_dir(output_dir)
|
| 210 |
+
except OSError as e:
|
| 211 |
+
logger.warning(
|
| 212 |
+
"Building %s for %s failed: %s",
|
| 213 |
+
artifact,
|
| 214 |
+
req.name,
|
| 215 |
+
e,
|
| 216 |
+
)
|
| 217 |
+
return None
|
| 218 |
+
|
| 219 |
+
# Install build deps into temporary directory (PEP 518)
|
| 220 |
+
with req.build_env:
|
| 221 |
+
wheel_path = _build_one_inside_env(
|
| 222 |
+
req, output_dir, build_options, global_options, editable
|
| 223 |
+
)
|
| 224 |
+
if wheel_path and verify:
|
| 225 |
+
try:
|
| 226 |
+
_verify_one(req, wheel_path)
|
| 227 |
+
except (InvalidWheelFilename, UnsupportedWheel) as e:
|
| 228 |
+
logger.warning("Built %s for %s is invalid: %s", artifact, req.name, e)
|
| 229 |
+
return None
|
| 230 |
+
return wheel_path
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
def _build_one_inside_env(
|
| 234 |
+
req: InstallRequirement,
|
| 235 |
+
output_dir: str,
|
| 236 |
+
build_options: List[str],
|
| 237 |
+
global_options: List[str],
|
| 238 |
+
editable: bool,
|
| 239 |
+
) -> Optional[str]:
|
| 240 |
+
with TempDirectory(kind="wheel") as temp_dir:
|
| 241 |
+
assert req.name
|
| 242 |
+
if req.use_pep517:
|
| 243 |
+
assert req.metadata_directory
|
| 244 |
+
assert req.pep517_backend
|
| 245 |
+
if global_options:
|
| 246 |
+
logger.warning(
|
| 247 |
+
"Ignoring --global-option when building %s using PEP 517", req.name
|
| 248 |
+
)
|
| 249 |
+
if build_options:
|
| 250 |
+
logger.warning(
|
| 251 |
+
"Ignoring --build-option when building %s using PEP 517", req.name
|
| 252 |
+
)
|
| 253 |
+
if editable:
|
| 254 |
+
wheel_path = build_wheel_editable(
|
| 255 |
+
name=req.name,
|
| 256 |
+
backend=req.pep517_backend,
|
| 257 |
+
metadata_directory=req.metadata_directory,
|
| 258 |
+
tempd=temp_dir.path,
|
| 259 |
+
)
|
| 260 |
+
else:
|
| 261 |
+
wheel_path = build_wheel_pep517(
|
| 262 |
+
name=req.name,
|
| 263 |
+
backend=req.pep517_backend,
|
| 264 |
+
metadata_directory=req.metadata_directory,
|
| 265 |
+
tempd=temp_dir.path,
|
| 266 |
+
)
|
| 267 |
+
else:
|
| 268 |
+
wheel_path = build_wheel_legacy(
|
| 269 |
+
name=req.name,
|
| 270 |
+
setup_py_path=req.setup_py_path,
|
| 271 |
+
source_dir=req.unpacked_source_directory,
|
| 272 |
+
global_options=global_options,
|
| 273 |
+
build_options=build_options,
|
| 274 |
+
tempd=temp_dir.path,
|
| 275 |
+
)
|
| 276 |
+
|
| 277 |
+
if wheel_path is not None:
|
| 278 |
+
wheel_name = os.path.basename(wheel_path)
|
| 279 |
+
dest_path = os.path.join(output_dir, wheel_name)
|
| 280 |
+
try:
|
| 281 |
+
wheel_hash, length = hash_file(wheel_path)
|
| 282 |
+
shutil.move(wheel_path, dest_path)
|
| 283 |
+
logger.info(
|
| 284 |
+
"Created wheel for %s: filename=%s size=%d sha256=%s",
|
| 285 |
+
req.name,
|
| 286 |
+
wheel_name,
|
| 287 |
+
length,
|
| 288 |
+
wheel_hash.hexdigest(),
|
| 289 |
+
)
|
| 290 |
+
logger.info("Stored in directory: %s", output_dir)
|
| 291 |
+
return dest_path
|
| 292 |
+
except Exception as e:
|
| 293 |
+
logger.warning(
|
| 294 |
+
"Building wheel for %s failed: %s",
|
| 295 |
+
req.name,
|
| 296 |
+
e,
|
| 297 |
+
)
|
| 298 |
+
# Ignore return, we can't do anything else useful.
|
| 299 |
+
if not req.use_pep517:
|
| 300 |
+
_clean_one_legacy(req, global_options)
|
| 301 |
+
return None
|
| 302 |
+
|
| 303 |
+
|
| 304 |
+
def _clean_one_legacy(req: InstallRequirement, global_options: List[str]) -> bool:
|
| 305 |
+
clean_args = make_setuptools_clean_args(
|
| 306 |
+
req.setup_py_path,
|
| 307 |
+
global_options=global_options,
|
| 308 |
+
)
|
| 309 |
+
|
| 310 |
+
logger.info("Running setup.py clean for %s", req.name)
|
| 311 |
+
try:
|
| 312 |
+
call_subprocess(
|
| 313 |
+
clean_args, command_desc="python setup.py clean", cwd=req.source_dir
|
| 314 |
+
)
|
| 315 |
+
return True
|
| 316 |
+
except Exception:
|
| 317 |
+
logger.error("Failed cleaning build dir for %s", req.name)
|
| 318 |
+
return False
|
| 319 |
+
|
| 320 |
+
|
| 321 |
+
def build(
|
| 322 |
+
requirements: Iterable[InstallRequirement],
|
| 323 |
+
wheel_cache: WheelCache,
|
| 324 |
+
verify: bool,
|
| 325 |
+
build_options: List[str],
|
| 326 |
+
global_options: List[str],
|
| 327 |
+
) -> BuildResult:
|
| 328 |
+
"""Build wheels.
|
| 329 |
+
|
| 330 |
+
:return: The list of InstallRequirement that succeeded to build and
|
| 331 |
+
the list of InstallRequirement that failed to build.
|
| 332 |
+
"""
|
| 333 |
+
if not requirements:
|
| 334 |
+
return [], []
|
| 335 |
+
|
| 336 |
+
# Build the wheels.
|
| 337 |
+
logger.info(
|
| 338 |
+
"Building wheels for collected packages: %s",
|
| 339 |
+
", ".join(req.name for req in requirements), # type: ignore
|
| 340 |
+
)
|
| 341 |
+
|
| 342 |
+
with indent_log():
|
| 343 |
+
build_successes, build_failures = [], []
|
| 344 |
+
for req in requirements:
|
| 345 |
+
assert req.name
|
| 346 |
+
cache_dir = _get_cache_dir(req, wheel_cache)
|
| 347 |
+
wheel_file = _build_one(
|
| 348 |
+
req,
|
| 349 |
+
cache_dir,
|
| 350 |
+
verify,
|
| 351 |
+
build_options,
|
| 352 |
+
global_options,
|
| 353 |
+
req.editable and req.permit_editable_wheels,
|
| 354 |
+
)
|
| 355 |
+
if wheel_file:
|
| 356 |
+
# Record the download origin in the cache
|
| 357 |
+
if req.download_info is not None:
|
| 358 |
+
# download_info is guaranteed to be set because when we build an
|
| 359 |
+
# InstallRequirement it has been through the preparer before, but
|
| 360 |
+
# let's be cautious.
|
| 361 |
+
wheel_cache.record_download_origin(cache_dir, req.download_info)
|
| 362 |
+
# Update the link for this.
|
| 363 |
+
req.link = Link(path_to_url(wheel_file))
|
| 364 |
+
req.local_file_path = req.link.file_path
|
| 365 |
+
assert req.link.is_wheel
|
| 366 |
+
build_successes.append(req)
|
| 367 |
+
else:
|
| 368 |
+
build_failures.append(req)
|
| 369 |
+
|
| 370 |
+
# notify success/failure
|
| 371 |
+
if build_successes:
|
| 372 |
+
logger.info(
|
| 373 |
+
"Successfully built %s",
|
| 374 |
+
" ".join([req.name for req in build_successes]), # type: ignore
|
| 375 |
+
)
|
| 376 |
+
if build_failures:
|
| 377 |
+
logger.info(
|
| 378 |
+
"Failed to build %s",
|
| 379 |
+
" ".join([req.name for req in build_failures]), # type: ignore
|
| 380 |
+
)
|
| 381 |
+
# Return a list of requirements that failed to build
|
| 382 |
+
return build_successes, build_failures
|