Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- llava/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/_log.py +38 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/compat.py +79 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/compatibility_tags.py +188 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/datetime.py +11 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/deprecation.py +124 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/direct_url_helpers.py +87 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/egg_link.py +80 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/entrypoints.py +84 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/filesystem.py +149 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/filetypes.py +27 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/glibc.py +101 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/hashes.py +147 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/logging.py +354 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/misc.py +773 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/packaging.py +58 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/retry.py +42 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/subprocess.py +245 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/temp_dir.py +296 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/unpacking.py +337 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/urls.py +55 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/virtualenv.py +104 -0
- llava/lib/python3.10/site-packages/pip/_internal/utils/wheel.py +134 -0
- minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiosignal/__pycache__/__init__.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/async_timeout/__init__.py +276 -0
- minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/async_timeout/__pycache__/__init__.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/async_timeout/py.typed +1 -0
- minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/multidict-6.1.0.dist-info/INSTALLER +1 -0
- minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/multidict-6.1.0.dist-info/LICENSE +13 -0
- minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/multidict-6.1.0.dist-info/METADATA +140 -0
- minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/multidict-6.1.0.dist-info/WHEEL +6 -0
- minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/multidict-6.1.0.dist-info/top_level.txt +1 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_convolution_mode_ops.h +28 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_ctc_loss.h +53 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_empty_affine_quantized.h +113 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_addcmul_compositeexplicitautograd_dispatch.h +34 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_norm_ops.h +39 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_int_mm_native.h +24 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_is_all_true_ops.h +28 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_sparse_mask_projection.h +34 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/amin_native.h +23 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/atan2_cuda_dispatch.h +26 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/col_indices_copy.h +39 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/combinations_native.h +21 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/conv_depthwise3d.h +91 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/cudnn_is_acceptable_compositeimplicitautograd_dispatch.h +23 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/diagonal_scatter_native.h +22 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/dropout.h +35 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/expm1_ops.h +50 -0
- parrot/lib/python3.10/site-packages/torch/include/ATen/ops/fake_quantize_per_channel_affine_native.h +21 -0
llava/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-310.pyc
ADDED
|
Binary file (2.08 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/utils/_log.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Customize logging
|
| 2 |
+
|
| 3 |
+
Defines custom logger class for the `logger.verbose(...)` method.
|
| 4 |
+
|
| 5 |
+
init_logging() must be called before any other modules that call logging.getLogger.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import logging
|
| 9 |
+
from typing import Any, cast
|
| 10 |
+
|
| 11 |
+
# custom log level for `--verbose` output
|
| 12 |
+
# between DEBUG and INFO
|
| 13 |
+
VERBOSE = 15
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class VerboseLogger(logging.Logger):
|
| 17 |
+
"""Custom Logger, defining a verbose log-level
|
| 18 |
+
|
| 19 |
+
VERBOSE is between INFO and DEBUG.
|
| 20 |
+
"""
|
| 21 |
+
|
| 22 |
+
def verbose(self, msg: str, *args: Any, **kwargs: Any) -> None:
|
| 23 |
+
return self.log(VERBOSE, msg, *args, **kwargs)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def getLogger(name: str) -> VerboseLogger:
|
| 27 |
+
"""logging.getLogger, but ensures our VerboseLogger class is returned"""
|
| 28 |
+
return cast(VerboseLogger, logging.getLogger(name))
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def init_logging() -> None:
|
| 32 |
+
"""Register our VerboseLogger and VERBOSE log level.
|
| 33 |
+
|
| 34 |
+
Should be called before any calls to getLogger(),
|
| 35 |
+
i.e. in pip._internal.__init__
|
| 36 |
+
"""
|
| 37 |
+
logging.setLoggerClass(VerboseLogger)
|
| 38 |
+
logging.addLevelName(VERBOSE, "VERBOSE")
|
llava/lib/python3.10/site-packages/pip/_internal/utils/compat.py
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Stuff that differs in different Python versions and platform
|
| 2 |
+
distributions."""
|
| 3 |
+
|
| 4 |
+
import importlib.resources
|
| 5 |
+
import logging
|
| 6 |
+
import os
|
| 7 |
+
import sys
|
| 8 |
+
from typing import IO
|
| 9 |
+
|
| 10 |
+
__all__ = ["get_path_uid", "stdlib_pkgs", "WINDOWS"]
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
logger = logging.getLogger(__name__)
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
def has_tls() -> bool:
|
| 17 |
+
try:
|
| 18 |
+
import _ssl # noqa: F401 # ignore unused
|
| 19 |
+
|
| 20 |
+
return True
|
| 21 |
+
except ImportError:
|
| 22 |
+
pass
|
| 23 |
+
|
| 24 |
+
from pip._vendor.urllib3.util import IS_PYOPENSSL
|
| 25 |
+
|
| 26 |
+
return IS_PYOPENSSL
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def get_path_uid(path: str) -> int:
|
| 30 |
+
"""
|
| 31 |
+
Return path's uid.
|
| 32 |
+
|
| 33 |
+
Does not follow symlinks:
|
| 34 |
+
https://github.com/pypa/pip/pull/935#discussion_r5307003
|
| 35 |
+
|
| 36 |
+
Placed this function in compat due to differences on AIX and
|
| 37 |
+
Jython, that should eventually go away.
|
| 38 |
+
|
| 39 |
+
:raises OSError: When path is a symlink or can't be read.
|
| 40 |
+
"""
|
| 41 |
+
if hasattr(os, "O_NOFOLLOW"):
|
| 42 |
+
fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
|
| 43 |
+
file_uid = os.fstat(fd).st_uid
|
| 44 |
+
os.close(fd)
|
| 45 |
+
else: # AIX and Jython
|
| 46 |
+
# WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
|
| 47 |
+
if not os.path.islink(path):
|
| 48 |
+
# older versions of Jython don't have `os.fstat`
|
| 49 |
+
file_uid = os.stat(path).st_uid
|
| 50 |
+
else:
|
| 51 |
+
# raise OSError for parity with os.O_NOFOLLOW above
|
| 52 |
+
raise OSError(f"{path} is a symlink; Will not return uid for symlinks")
|
| 53 |
+
return file_uid
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
# The importlib.resources.open_text function was deprecated in 3.11 with suggested
|
| 57 |
+
# replacement we use below.
|
| 58 |
+
if sys.version_info < (3, 11):
|
| 59 |
+
open_text_resource = importlib.resources.open_text
|
| 60 |
+
else:
|
| 61 |
+
|
| 62 |
+
def open_text_resource(
|
| 63 |
+
package: str, resource: str, encoding: str = "utf-8", errors: str = "strict"
|
| 64 |
+
) -> IO[str]:
|
| 65 |
+
return (importlib.resources.files(package) / resource).open(
|
| 66 |
+
"r", encoding=encoding, errors=errors
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
# packages in the stdlib that may have installation metadata, but should not be
|
| 71 |
+
# considered 'installed'. this theoretically could be determined based on
|
| 72 |
+
# dist.location (py27:`sysconfig.get_paths()['stdlib']`,
|
| 73 |
+
# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
|
| 74 |
+
# make this ineffective, so hard-coding
|
| 75 |
+
stdlib_pkgs = {"python", "wsgiref", "argparse"}
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
# windows detection, covers cpython and ironpython
|
| 79 |
+
WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt")
|
llava/lib/python3.10/site-packages/pip/_internal/utils/compatibility_tags.py
ADDED
|
@@ -0,0 +1,188 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Generate and work with PEP 425 Compatibility Tags.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import re
|
| 5 |
+
from typing import List, Optional, Tuple
|
| 6 |
+
|
| 7 |
+
from pip._vendor.packaging.tags import (
|
| 8 |
+
PythonVersion,
|
| 9 |
+
Tag,
|
| 10 |
+
compatible_tags,
|
| 11 |
+
cpython_tags,
|
| 12 |
+
generic_tags,
|
| 13 |
+
interpreter_name,
|
| 14 |
+
interpreter_version,
|
| 15 |
+
ios_platforms,
|
| 16 |
+
mac_platforms,
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
_apple_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)")
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def version_info_to_nodot(version_info: Tuple[int, ...]) -> str:
|
| 23 |
+
# Only use up to the first two numbers.
|
| 24 |
+
return "".join(map(str, version_info[:2]))
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def _mac_platforms(arch: str) -> List[str]:
|
| 28 |
+
match = _apple_arch_pat.match(arch)
|
| 29 |
+
if match:
|
| 30 |
+
name, major, minor, actual_arch = match.groups()
|
| 31 |
+
mac_version = (int(major), int(minor))
|
| 32 |
+
arches = [
|
| 33 |
+
# Since we have always only checked that the platform starts
|
| 34 |
+
# with "macosx", for backwards-compatibility we extract the
|
| 35 |
+
# actual prefix provided by the user in case they provided
|
| 36 |
+
# something like "macosxcustom_". It may be good to remove
|
| 37 |
+
# this as undocumented or deprecate it in the future.
|
| 38 |
+
"{}_{}".format(name, arch[len("macosx_") :])
|
| 39 |
+
for arch in mac_platforms(mac_version, actual_arch)
|
| 40 |
+
]
|
| 41 |
+
else:
|
| 42 |
+
# arch pattern didn't match (?!)
|
| 43 |
+
arches = [arch]
|
| 44 |
+
return arches
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def _ios_platforms(arch: str) -> List[str]:
|
| 48 |
+
match = _apple_arch_pat.match(arch)
|
| 49 |
+
if match:
|
| 50 |
+
name, major, minor, actual_multiarch = match.groups()
|
| 51 |
+
ios_version = (int(major), int(minor))
|
| 52 |
+
arches = [
|
| 53 |
+
# Since we have always only checked that the platform starts
|
| 54 |
+
# with "ios", for backwards-compatibility we extract the
|
| 55 |
+
# actual prefix provided by the user in case they provided
|
| 56 |
+
# something like "ioscustom_". It may be good to remove
|
| 57 |
+
# this as undocumented or deprecate it in the future.
|
| 58 |
+
"{}_{}".format(name, arch[len("ios_") :])
|
| 59 |
+
for arch in ios_platforms(ios_version, actual_multiarch)
|
| 60 |
+
]
|
| 61 |
+
else:
|
| 62 |
+
# arch pattern didn't match (?!)
|
| 63 |
+
arches = [arch]
|
| 64 |
+
return arches
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def _custom_manylinux_platforms(arch: str) -> List[str]:
|
| 68 |
+
arches = [arch]
|
| 69 |
+
arch_prefix, arch_sep, arch_suffix = arch.partition("_")
|
| 70 |
+
if arch_prefix == "manylinux2014":
|
| 71 |
+
# manylinux1/manylinux2010 wheels run on most manylinux2014 systems
|
| 72 |
+
# with the exception of wheels depending on ncurses. PEP 599 states
|
| 73 |
+
# manylinux1/manylinux2010 wheels should be considered
|
| 74 |
+
# manylinux2014 wheels:
|
| 75 |
+
# https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels
|
| 76 |
+
if arch_suffix in {"i686", "x86_64"}:
|
| 77 |
+
arches.append("manylinux2010" + arch_sep + arch_suffix)
|
| 78 |
+
arches.append("manylinux1" + arch_sep + arch_suffix)
|
| 79 |
+
elif arch_prefix == "manylinux2010":
|
| 80 |
+
# manylinux1 wheels run on most manylinux2010 systems with the
|
| 81 |
+
# exception of wheels depending on ncurses. PEP 571 states
|
| 82 |
+
# manylinux1 wheels should be considered manylinux2010 wheels:
|
| 83 |
+
# https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels
|
| 84 |
+
arches.append("manylinux1" + arch_sep + arch_suffix)
|
| 85 |
+
return arches
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
def _get_custom_platforms(arch: str) -> List[str]:
|
| 89 |
+
arch_prefix, arch_sep, arch_suffix = arch.partition("_")
|
| 90 |
+
if arch.startswith("macosx"):
|
| 91 |
+
arches = _mac_platforms(arch)
|
| 92 |
+
elif arch.startswith("ios"):
|
| 93 |
+
arches = _ios_platforms(arch)
|
| 94 |
+
elif arch_prefix in ["manylinux2014", "manylinux2010"]:
|
| 95 |
+
arches = _custom_manylinux_platforms(arch)
|
| 96 |
+
else:
|
| 97 |
+
arches = [arch]
|
| 98 |
+
return arches
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def _expand_allowed_platforms(platforms: Optional[List[str]]) -> Optional[List[str]]:
|
| 102 |
+
if not platforms:
|
| 103 |
+
return None
|
| 104 |
+
|
| 105 |
+
seen = set()
|
| 106 |
+
result = []
|
| 107 |
+
|
| 108 |
+
for p in platforms:
|
| 109 |
+
if p in seen:
|
| 110 |
+
continue
|
| 111 |
+
additions = [c for c in _get_custom_platforms(p) if c not in seen]
|
| 112 |
+
seen.update(additions)
|
| 113 |
+
result.extend(additions)
|
| 114 |
+
|
| 115 |
+
return result
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
def _get_python_version(version: str) -> PythonVersion:
|
| 119 |
+
if len(version) > 1:
|
| 120 |
+
return int(version[0]), int(version[1:])
|
| 121 |
+
else:
|
| 122 |
+
return (int(version[0]),)
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
def _get_custom_interpreter(
|
| 126 |
+
implementation: Optional[str] = None, version: Optional[str] = None
|
| 127 |
+
) -> str:
|
| 128 |
+
if implementation is None:
|
| 129 |
+
implementation = interpreter_name()
|
| 130 |
+
if version is None:
|
| 131 |
+
version = interpreter_version()
|
| 132 |
+
return f"{implementation}{version}"
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def get_supported(
|
| 136 |
+
version: Optional[str] = None,
|
| 137 |
+
platforms: Optional[List[str]] = None,
|
| 138 |
+
impl: Optional[str] = None,
|
| 139 |
+
abis: Optional[List[str]] = None,
|
| 140 |
+
) -> List[Tag]:
|
| 141 |
+
"""Return a list of supported tags for each version specified in
|
| 142 |
+
`versions`.
|
| 143 |
+
|
| 144 |
+
:param version: a string version, of the form "33" or "32",
|
| 145 |
+
or None. The version will be assumed to support our ABI.
|
| 146 |
+
:param platform: specify a list of platforms you want valid
|
| 147 |
+
tags for, or None. If None, use the local system platform.
|
| 148 |
+
:param impl: specify the exact implementation you want valid
|
| 149 |
+
tags for, or None. If None, use the local interpreter impl.
|
| 150 |
+
:param abis: specify a list of abis you want valid
|
| 151 |
+
tags for, or None. If None, use the local interpreter abi.
|
| 152 |
+
"""
|
| 153 |
+
supported: List[Tag] = []
|
| 154 |
+
|
| 155 |
+
python_version: Optional[PythonVersion] = None
|
| 156 |
+
if version is not None:
|
| 157 |
+
python_version = _get_python_version(version)
|
| 158 |
+
|
| 159 |
+
interpreter = _get_custom_interpreter(impl, version)
|
| 160 |
+
|
| 161 |
+
platforms = _expand_allowed_platforms(platforms)
|
| 162 |
+
|
| 163 |
+
is_cpython = (impl or interpreter_name()) == "cp"
|
| 164 |
+
if is_cpython:
|
| 165 |
+
supported.extend(
|
| 166 |
+
cpython_tags(
|
| 167 |
+
python_version=python_version,
|
| 168 |
+
abis=abis,
|
| 169 |
+
platforms=platforms,
|
| 170 |
+
)
|
| 171 |
+
)
|
| 172 |
+
else:
|
| 173 |
+
supported.extend(
|
| 174 |
+
generic_tags(
|
| 175 |
+
interpreter=interpreter,
|
| 176 |
+
abis=abis,
|
| 177 |
+
platforms=platforms,
|
| 178 |
+
)
|
| 179 |
+
)
|
| 180 |
+
supported.extend(
|
| 181 |
+
compatible_tags(
|
| 182 |
+
python_version=python_version,
|
| 183 |
+
interpreter=interpreter,
|
| 184 |
+
platforms=platforms,
|
| 185 |
+
)
|
| 186 |
+
)
|
| 187 |
+
|
| 188 |
+
return supported
|
llava/lib/python3.10/site-packages/pip/_internal/utils/datetime.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""For when pip wants to check the date or time.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import datetime
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
def today_is_later_than(year: int, month: int, day: int) -> bool:
|
| 8 |
+
today = datetime.date.today()
|
| 9 |
+
given = datetime.date(year, month, day)
|
| 10 |
+
|
| 11 |
+
return today > given
|
llava/lib/python3.10/site-packages/pip/_internal/utils/deprecation.py
ADDED
|
@@ -0,0 +1,124 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
A module that implements tooling to enable easy warnings about deprecations.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import logging
|
| 6 |
+
import warnings
|
| 7 |
+
from typing import Any, Optional, TextIO, Type, Union
|
| 8 |
+
|
| 9 |
+
from pip._vendor.packaging.version import parse
|
| 10 |
+
|
| 11 |
+
from pip import __version__ as current_version # NOTE: tests patch this name.
|
| 12 |
+
|
| 13 |
+
DEPRECATION_MSG_PREFIX = "DEPRECATION: "
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class PipDeprecationWarning(Warning):
|
| 17 |
+
pass
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
_original_showwarning: Any = None
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
# Warnings <-> Logging Integration
|
| 24 |
+
def _showwarning(
|
| 25 |
+
message: Union[Warning, str],
|
| 26 |
+
category: Type[Warning],
|
| 27 |
+
filename: str,
|
| 28 |
+
lineno: int,
|
| 29 |
+
file: Optional[TextIO] = None,
|
| 30 |
+
line: Optional[str] = None,
|
| 31 |
+
) -> None:
|
| 32 |
+
if file is not None:
|
| 33 |
+
if _original_showwarning is not None:
|
| 34 |
+
_original_showwarning(message, category, filename, lineno, file, line)
|
| 35 |
+
elif issubclass(category, PipDeprecationWarning):
|
| 36 |
+
# We use a specially named logger which will handle all of the
|
| 37 |
+
# deprecation messages for pip.
|
| 38 |
+
logger = logging.getLogger("pip._internal.deprecations")
|
| 39 |
+
logger.warning(message)
|
| 40 |
+
else:
|
| 41 |
+
_original_showwarning(message, category, filename, lineno, file, line)
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def install_warning_logger() -> None:
|
| 45 |
+
# Enable our Deprecation Warnings
|
| 46 |
+
warnings.simplefilter("default", PipDeprecationWarning, append=True)
|
| 47 |
+
|
| 48 |
+
global _original_showwarning
|
| 49 |
+
|
| 50 |
+
if _original_showwarning is None:
|
| 51 |
+
_original_showwarning = warnings.showwarning
|
| 52 |
+
warnings.showwarning = _showwarning
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def deprecated(
|
| 56 |
+
*,
|
| 57 |
+
reason: str,
|
| 58 |
+
replacement: Optional[str],
|
| 59 |
+
gone_in: Optional[str],
|
| 60 |
+
feature_flag: Optional[str] = None,
|
| 61 |
+
issue: Optional[int] = None,
|
| 62 |
+
) -> None:
|
| 63 |
+
"""Helper to deprecate existing functionality.
|
| 64 |
+
|
| 65 |
+
reason:
|
| 66 |
+
Textual reason shown to the user about why this functionality has
|
| 67 |
+
been deprecated. Should be a complete sentence.
|
| 68 |
+
replacement:
|
| 69 |
+
Textual suggestion shown to the user about what alternative
|
| 70 |
+
functionality they can use.
|
| 71 |
+
gone_in:
|
| 72 |
+
The version of pip does this functionality should get removed in.
|
| 73 |
+
Raises an error if pip's current version is greater than or equal to
|
| 74 |
+
this.
|
| 75 |
+
feature_flag:
|
| 76 |
+
Command-line flag of the form --use-feature={feature_flag} for testing
|
| 77 |
+
upcoming functionality.
|
| 78 |
+
issue:
|
| 79 |
+
Issue number on the tracker that would serve as a useful place for
|
| 80 |
+
users to find related discussion and provide feedback.
|
| 81 |
+
"""
|
| 82 |
+
|
| 83 |
+
# Determine whether or not the feature is already gone in this version.
|
| 84 |
+
is_gone = gone_in is not None and parse(current_version) >= parse(gone_in)
|
| 85 |
+
|
| 86 |
+
message_parts = [
|
| 87 |
+
(reason, f"{DEPRECATION_MSG_PREFIX}{{}}"),
|
| 88 |
+
(
|
| 89 |
+
gone_in,
|
| 90 |
+
(
|
| 91 |
+
"pip {} will enforce this behaviour change."
|
| 92 |
+
if not is_gone
|
| 93 |
+
else "Since pip {}, this is no longer supported."
|
| 94 |
+
),
|
| 95 |
+
),
|
| 96 |
+
(
|
| 97 |
+
replacement,
|
| 98 |
+
"A possible replacement is {}.",
|
| 99 |
+
),
|
| 100 |
+
(
|
| 101 |
+
feature_flag,
|
| 102 |
+
(
|
| 103 |
+
"You can use the flag --use-feature={} to test the upcoming behaviour."
|
| 104 |
+
if not is_gone
|
| 105 |
+
else None
|
| 106 |
+
),
|
| 107 |
+
),
|
| 108 |
+
(
|
| 109 |
+
issue,
|
| 110 |
+
"Discussion can be found at https://github.com/pypa/pip/issues/{}",
|
| 111 |
+
),
|
| 112 |
+
]
|
| 113 |
+
|
| 114 |
+
message = " ".join(
|
| 115 |
+
format_str.format(value)
|
| 116 |
+
for value, format_str in message_parts
|
| 117 |
+
if format_str is not None and value is not None
|
| 118 |
+
)
|
| 119 |
+
|
| 120 |
+
# Raise as an error if this behaviour is deprecated.
|
| 121 |
+
if is_gone:
|
| 122 |
+
raise PipDeprecationWarning(message)
|
| 123 |
+
|
| 124 |
+
warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)
|
llava/lib/python3.10/site-packages/pip/_internal/utils/direct_url_helpers.py
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Optional
|
| 2 |
+
|
| 3 |
+
from pip._internal.models.direct_url import ArchiveInfo, DirectUrl, DirInfo, VcsInfo
|
| 4 |
+
from pip._internal.models.link import Link
|
| 5 |
+
from pip._internal.utils.urls import path_to_url
|
| 6 |
+
from pip._internal.vcs import vcs
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def direct_url_as_pep440_direct_reference(direct_url: DirectUrl, name: str) -> str:
|
| 10 |
+
"""Convert a DirectUrl to a pip requirement string."""
|
| 11 |
+
direct_url.validate() # if invalid, this is a pip bug
|
| 12 |
+
requirement = name + " @ "
|
| 13 |
+
fragments = []
|
| 14 |
+
if isinstance(direct_url.info, VcsInfo):
|
| 15 |
+
requirement += (
|
| 16 |
+
f"{direct_url.info.vcs}+{direct_url.url}@{direct_url.info.commit_id}"
|
| 17 |
+
)
|
| 18 |
+
elif isinstance(direct_url.info, ArchiveInfo):
|
| 19 |
+
requirement += direct_url.url
|
| 20 |
+
if direct_url.info.hash:
|
| 21 |
+
fragments.append(direct_url.info.hash)
|
| 22 |
+
else:
|
| 23 |
+
assert isinstance(direct_url.info, DirInfo)
|
| 24 |
+
requirement += direct_url.url
|
| 25 |
+
if direct_url.subdirectory:
|
| 26 |
+
fragments.append("subdirectory=" + direct_url.subdirectory)
|
| 27 |
+
if fragments:
|
| 28 |
+
requirement += "#" + "&".join(fragments)
|
| 29 |
+
return requirement
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def direct_url_for_editable(source_dir: str) -> DirectUrl:
|
| 33 |
+
return DirectUrl(
|
| 34 |
+
url=path_to_url(source_dir),
|
| 35 |
+
info=DirInfo(editable=True),
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def direct_url_from_link(
|
| 40 |
+
link: Link, source_dir: Optional[str] = None, link_is_in_wheel_cache: bool = False
|
| 41 |
+
) -> DirectUrl:
|
| 42 |
+
if link.is_vcs:
|
| 43 |
+
vcs_backend = vcs.get_backend_for_scheme(link.scheme)
|
| 44 |
+
assert vcs_backend
|
| 45 |
+
url, requested_revision, _ = vcs_backend.get_url_rev_and_auth(
|
| 46 |
+
link.url_without_fragment
|
| 47 |
+
)
|
| 48 |
+
# For VCS links, we need to find out and add commit_id.
|
| 49 |
+
if link_is_in_wheel_cache:
|
| 50 |
+
# If the requested VCS link corresponds to a cached
|
| 51 |
+
# wheel, it means the requested revision was an
|
| 52 |
+
# immutable commit hash, otherwise it would not have
|
| 53 |
+
# been cached. In that case we don't have a source_dir
|
| 54 |
+
# with the VCS checkout.
|
| 55 |
+
assert requested_revision
|
| 56 |
+
commit_id = requested_revision
|
| 57 |
+
else:
|
| 58 |
+
# If the wheel was not in cache, it means we have
|
| 59 |
+
# had to checkout from VCS to build and we have a source_dir
|
| 60 |
+
# which we can inspect to find out the commit id.
|
| 61 |
+
assert source_dir
|
| 62 |
+
commit_id = vcs_backend.get_revision(source_dir)
|
| 63 |
+
return DirectUrl(
|
| 64 |
+
url=url,
|
| 65 |
+
info=VcsInfo(
|
| 66 |
+
vcs=vcs_backend.name,
|
| 67 |
+
commit_id=commit_id,
|
| 68 |
+
requested_revision=requested_revision,
|
| 69 |
+
),
|
| 70 |
+
subdirectory=link.subdirectory_fragment,
|
| 71 |
+
)
|
| 72 |
+
elif link.is_existing_dir():
|
| 73 |
+
return DirectUrl(
|
| 74 |
+
url=link.url_without_fragment,
|
| 75 |
+
info=DirInfo(),
|
| 76 |
+
subdirectory=link.subdirectory_fragment,
|
| 77 |
+
)
|
| 78 |
+
else:
|
| 79 |
+
hash = None
|
| 80 |
+
hash_name = link.hash_name
|
| 81 |
+
if hash_name:
|
| 82 |
+
hash = f"{hash_name}={link.hash}"
|
| 83 |
+
return DirectUrl(
|
| 84 |
+
url=link.url_without_fragment,
|
| 85 |
+
info=ArchiveInfo(hash=hash),
|
| 86 |
+
subdirectory=link.subdirectory_fragment,
|
| 87 |
+
)
|
llava/lib/python3.10/site-packages/pip/_internal/utils/egg_link.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import re
|
| 3 |
+
import sys
|
| 4 |
+
from typing import List, Optional
|
| 5 |
+
|
| 6 |
+
from pip._internal.locations import site_packages, user_site
|
| 7 |
+
from pip._internal.utils.virtualenv import (
|
| 8 |
+
running_under_virtualenv,
|
| 9 |
+
virtualenv_no_global,
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
__all__ = [
|
| 13 |
+
"egg_link_path_from_sys_path",
|
| 14 |
+
"egg_link_path_from_location",
|
| 15 |
+
]
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def _egg_link_names(raw_name: str) -> List[str]:
|
| 19 |
+
"""
|
| 20 |
+
Convert a Name metadata value to a .egg-link name, by applying
|
| 21 |
+
the same substitution as pkg_resources's safe_name function.
|
| 22 |
+
Note: we cannot use canonicalize_name because it has a different logic.
|
| 23 |
+
|
| 24 |
+
We also look for the raw name (without normalization) as setuptools 69 changed
|
| 25 |
+
the way it names .egg-link files (https://github.com/pypa/setuptools/issues/4167).
|
| 26 |
+
"""
|
| 27 |
+
return [
|
| 28 |
+
re.sub("[^A-Za-z0-9.]+", "-", raw_name) + ".egg-link",
|
| 29 |
+
f"{raw_name}.egg-link",
|
| 30 |
+
]
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def egg_link_path_from_sys_path(raw_name: str) -> Optional[str]:
|
| 34 |
+
"""
|
| 35 |
+
Look for a .egg-link file for project name, by walking sys.path.
|
| 36 |
+
"""
|
| 37 |
+
egg_link_names = _egg_link_names(raw_name)
|
| 38 |
+
for path_item in sys.path:
|
| 39 |
+
for egg_link_name in egg_link_names:
|
| 40 |
+
egg_link = os.path.join(path_item, egg_link_name)
|
| 41 |
+
if os.path.isfile(egg_link):
|
| 42 |
+
return egg_link
|
| 43 |
+
return None
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def egg_link_path_from_location(raw_name: str) -> Optional[str]:
|
| 47 |
+
"""
|
| 48 |
+
Return the path for the .egg-link file if it exists, otherwise, None.
|
| 49 |
+
|
| 50 |
+
There's 3 scenarios:
|
| 51 |
+
1) not in a virtualenv
|
| 52 |
+
try to find in site.USER_SITE, then site_packages
|
| 53 |
+
2) in a no-global virtualenv
|
| 54 |
+
try to find in site_packages
|
| 55 |
+
3) in a yes-global virtualenv
|
| 56 |
+
try to find in site_packages, then site.USER_SITE
|
| 57 |
+
(don't look in global location)
|
| 58 |
+
|
| 59 |
+
For #1 and #3, there could be odd cases, where there's an egg-link in 2
|
| 60 |
+
locations.
|
| 61 |
+
|
| 62 |
+
This method will just return the first one found.
|
| 63 |
+
"""
|
| 64 |
+
sites: List[str] = []
|
| 65 |
+
if running_under_virtualenv():
|
| 66 |
+
sites.append(site_packages)
|
| 67 |
+
if not virtualenv_no_global() and user_site:
|
| 68 |
+
sites.append(user_site)
|
| 69 |
+
else:
|
| 70 |
+
if user_site:
|
| 71 |
+
sites.append(user_site)
|
| 72 |
+
sites.append(site_packages)
|
| 73 |
+
|
| 74 |
+
egg_link_names = _egg_link_names(raw_name)
|
| 75 |
+
for site in sites:
|
| 76 |
+
for egg_link_name in egg_link_names:
|
| 77 |
+
egglink = os.path.join(site, egg_link_name)
|
| 78 |
+
if os.path.isfile(egglink):
|
| 79 |
+
return egglink
|
| 80 |
+
return None
|
llava/lib/python3.10/site-packages/pip/_internal/utils/entrypoints.py
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import itertools
|
| 2 |
+
import os
|
| 3 |
+
import shutil
|
| 4 |
+
import sys
|
| 5 |
+
from typing import List, Optional
|
| 6 |
+
|
| 7 |
+
from pip._internal.cli.main import main
|
| 8 |
+
from pip._internal.utils.compat import WINDOWS
|
| 9 |
+
|
| 10 |
+
_EXECUTABLE_NAMES = [
|
| 11 |
+
"pip",
|
| 12 |
+
f"pip{sys.version_info.major}",
|
| 13 |
+
f"pip{sys.version_info.major}.{sys.version_info.minor}",
|
| 14 |
+
]
|
| 15 |
+
if WINDOWS:
|
| 16 |
+
_allowed_extensions = {"", ".exe"}
|
| 17 |
+
_EXECUTABLE_NAMES = [
|
| 18 |
+
"".join(parts)
|
| 19 |
+
for parts in itertools.product(_EXECUTABLE_NAMES, _allowed_extensions)
|
| 20 |
+
]
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def _wrapper(args: Optional[List[str]] = None) -> int:
|
| 24 |
+
"""Central wrapper for all old entrypoints.
|
| 25 |
+
|
| 26 |
+
Historically pip has had several entrypoints defined. Because of issues
|
| 27 |
+
arising from PATH, sys.path, multiple Pythons, their interactions, and most
|
| 28 |
+
of them having a pip installed, users suffer every time an entrypoint gets
|
| 29 |
+
moved.
|
| 30 |
+
|
| 31 |
+
To alleviate this pain, and provide a mechanism for warning users and
|
| 32 |
+
directing them to an appropriate place for help, we now define all of
|
| 33 |
+
our old entrypoints as wrappers for the current one.
|
| 34 |
+
"""
|
| 35 |
+
sys.stderr.write(
|
| 36 |
+
"WARNING: pip is being invoked by an old script wrapper. This will "
|
| 37 |
+
"fail in a future version of pip.\n"
|
| 38 |
+
"Please see https://github.com/pypa/pip/issues/5599 for advice on "
|
| 39 |
+
"fixing the underlying issue.\n"
|
| 40 |
+
"To avoid this problem you can invoke Python with '-m pip' instead of "
|
| 41 |
+
"running pip directly.\n"
|
| 42 |
+
)
|
| 43 |
+
return main(args)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def get_best_invocation_for_this_pip() -> str:
|
| 47 |
+
"""Try to figure out the best way to invoke pip in the current environment."""
|
| 48 |
+
binary_directory = "Scripts" if WINDOWS else "bin"
|
| 49 |
+
binary_prefix = os.path.join(sys.prefix, binary_directory)
|
| 50 |
+
|
| 51 |
+
# Try to use pip[X[.Y]] names, if those executables for this environment are
|
| 52 |
+
# the first on PATH with that name.
|
| 53 |
+
path_parts = os.path.normcase(os.environ.get("PATH", "")).split(os.pathsep)
|
| 54 |
+
exe_are_in_PATH = os.path.normcase(binary_prefix) in path_parts
|
| 55 |
+
if exe_are_in_PATH:
|
| 56 |
+
for exe_name in _EXECUTABLE_NAMES:
|
| 57 |
+
found_executable = shutil.which(exe_name)
|
| 58 |
+
binary_executable = os.path.join(binary_prefix, exe_name)
|
| 59 |
+
if (
|
| 60 |
+
found_executable
|
| 61 |
+
and os.path.exists(binary_executable)
|
| 62 |
+
and os.path.samefile(
|
| 63 |
+
found_executable,
|
| 64 |
+
binary_executable,
|
| 65 |
+
)
|
| 66 |
+
):
|
| 67 |
+
return exe_name
|
| 68 |
+
|
| 69 |
+
# Use the `-m` invocation, if there's no "nice" invocation.
|
| 70 |
+
return f"{get_best_invocation_for_this_python()} -m pip"
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def get_best_invocation_for_this_python() -> str:
|
| 74 |
+
"""Try to figure out the best way to invoke the current Python."""
|
| 75 |
+
exe = sys.executable
|
| 76 |
+
exe_name = os.path.basename(exe)
|
| 77 |
+
|
| 78 |
+
# Try to use the basename, if it's the first executable.
|
| 79 |
+
found_executable = shutil.which(exe_name)
|
| 80 |
+
if found_executable and os.path.samefile(found_executable, exe):
|
| 81 |
+
return exe_name
|
| 82 |
+
|
| 83 |
+
# Use the full executable name, because we couldn't find something simpler.
|
| 84 |
+
return exe
|
llava/lib/python3.10/site-packages/pip/_internal/utils/filesystem.py
ADDED
|
@@ -0,0 +1,149 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import fnmatch
|
| 2 |
+
import os
|
| 3 |
+
import os.path
|
| 4 |
+
import random
|
| 5 |
+
import sys
|
| 6 |
+
from contextlib import contextmanager
|
| 7 |
+
from tempfile import NamedTemporaryFile
|
| 8 |
+
from typing import Any, BinaryIO, Generator, List, Union, cast
|
| 9 |
+
|
| 10 |
+
from pip._internal.utils.compat import get_path_uid
|
| 11 |
+
from pip._internal.utils.misc import format_size
|
| 12 |
+
from pip._internal.utils.retry import retry
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def check_path_owner(path: str) -> bool:
|
| 16 |
+
# If we don't have a way to check the effective uid of this process, then
|
| 17 |
+
# we'll just assume that we own the directory.
|
| 18 |
+
if sys.platform == "win32" or not hasattr(os, "geteuid"):
|
| 19 |
+
return True
|
| 20 |
+
|
| 21 |
+
assert os.path.isabs(path)
|
| 22 |
+
|
| 23 |
+
previous = None
|
| 24 |
+
while path != previous:
|
| 25 |
+
if os.path.lexists(path):
|
| 26 |
+
# Check if path is writable by current user.
|
| 27 |
+
if os.geteuid() == 0:
|
| 28 |
+
# Special handling for root user in order to handle properly
|
| 29 |
+
# cases where users use sudo without -H flag.
|
| 30 |
+
try:
|
| 31 |
+
path_uid = get_path_uid(path)
|
| 32 |
+
except OSError:
|
| 33 |
+
return False
|
| 34 |
+
return path_uid == 0
|
| 35 |
+
else:
|
| 36 |
+
return os.access(path, os.W_OK)
|
| 37 |
+
else:
|
| 38 |
+
previous, path = path, os.path.dirname(path)
|
| 39 |
+
return False # assume we don't own the path
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
@contextmanager
|
| 43 |
+
def adjacent_tmp_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]:
|
| 44 |
+
"""Return a file-like object pointing to a tmp file next to path.
|
| 45 |
+
|
| 46 |
+
The file is created securely and is ensured to be written to disk
|
| 47 |
+
after the context reaches its end.
|
| 48 |
+
|
| 49 |
+
kwargs will be passed to tempfile.NamedTemporaryFile to control
|
| 50 |
+
the way the temporary file will be opened.
|
| 51 |
+
"""
|
| 52 |
+
with NamedTemporaryFile(
|
| 53 |
+
delete=False,
|
| 54 |
+
dir=os.path.dirname(path),
|
| 55 |
+
prefix=os.path.basename(path),
|
| 56 |
+
suffix=".tmp",
|
| 57 |
+
**kwargs,
|
| 58 |
+
) as f:
|
| 59 |
+
result = cast(BinaryIO, f)
|
| 60 |
+
try:
|
| 61 |
+
yield result
|
| 62 |
+
finally:
|
| 63 |
+
result.flush()
|
| 64 |
+
os.fsync(result.fileno())
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
replace = retry(stop_after_delay=1, wait=0.25)(os.replace)
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
# test_writable_dir and _test_writable_dir_win are copied from Flit,
|
| 71 |
+
# with the author's agreement to also place them under pip's license.
|
| 72 |
+
def test_writable_dir(path: str) -> bool:
|
| 73 |
+
"""Check if a directory is writable.
|
| 74 |
+
|
| 75 |
+
Uses os.access() on POSIX, tries creating files on Windows.
|
| 76 |
+
"""
|
| 77 |
+
# If the directory doesn't exist, find the closest parent that does.
|
| 78 |
+
while not os.path.isdir(path):
|
| 79 |
+
parent = os.path.dirname(path)
|
| 80 |
+
if parent == path:
|
| 81 |
+
break # Should never get here, but infinite loops are bad
|
| 82 |
+
path = parent
|
| 83 |
+
|
| 84 |
+
if os.name == "posix":
|
| 85 |
+
return os.access(path, os.W_OK)
|
| 86 |
+
|
| 87 |
+
return _test_writable_dir_win(path)
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def _test_writable_dir_win(path: str) -> bool:
|
| 91 |
+
# os.access doesn't work on Windows: http://bugs.python.org/issue2528
|
| 92 |
+
# and we can't use tempfile: http://bugs.python.org/issue22107
|
| 93 |
+
basename = "accesstest_deleteme_fishfingers_custard_"
|
| 94 |
+
alphabet = "abcdefghijklmnopqrstuvwxyz0123456789"
|
| 95 |
+
for _ in range(10):
|
| 96 |
+
name = basename + "".join(random.choice(alphabet) for _ in range(6))
|
| 97 |
+
file = os.path.join(path, name)
|
| 98 |
+
try:
|
| 99 |
+
fd = os.open(file, os.O_RDWR | os.O_CREAT | os.O_EXCL)
|
| 100 |
+
except FileExistsError:
|
| 101 |
+
pass
|
| 102 |
+
except PermissionError:
|
| 103 |
+
# This could be because there's a directory with the same name.
|
| 104 |
+
# But it's highly unlikely there's a directory called that,
|
| 105 |
+
# so we'll assume it's because the parent dir is not writable.
|
| 106 |
+
# This could as well be because the parent dir is not readable,
|
| 107 |
+
# due to non-privileged user access.
|
| 108 |
+
return False
|
| 109 |
+
else:
|
| 110 |
+
os.close(fd)
|
| 111 |
+
os.unlink(file)
|
| 112 |
+
return True
|
| 113 |
+
|
| 114 |
+
# This should never be reached
|
| 115 |
+
raise OSError("Unexpected condition testing for writable directory")
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
def find_files(path: str, pattern: str) -> List[str]:
|
| 119 |
+
"""Returns a list of absolute paths of files beneath path, recursively,
|
| 120 |
+
with filenames which match the UNIX-style shell glob pattern."""
|
| 121 |
+
result: List[str] = []
|
| 122 |
+
for root, _, files in os.walk(path):
|
| 123 |
+
matches = fnmatch.filter(files, pattern)
|
| 124 |
+
result.extend(os.path.join(root, f) for f in matches)
|
| 125 |
+
return result
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def file_size(path: str) -> Union[int, float]:
|
| 129 |
+
# If it's a symlink, return 0.
|
| 130 |
+
if os.path.islink(path):
|
| 131 |
+
return 0
|
| 132 |
+
return os.path.getsize(path)
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def format_file_size(path: str) -> str:
|
| 136 |
+
return format_size(file_size(path))
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
def directory_size(path: str) -> Union[int, float]:
|
| 140 |
+
size = 0.0
|
| 141 |
+
for root, _dirs, files in os.walk(path):
|
| 142 |
+
for filename in files:
|
| 143 |
+
file_path = os.path.join(root, filename)
|
| 144 |
+
size += file_size(file_path)
|
| 145 |
+
return size
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def format_directory_size(path: str) -> str:
|
| 149 |
+
return format_size(directory_size(path))
|
llava/lib/python3.10/site-packages/pip/_internal/utils/filetypes.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Filetype information.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
from typing import Tuple
|
| 5 |
+
|
| 6 |
+
from pip._internal.utils.misc import splitext
|
| 7 |
+
|
| 8 |
+
WHEEL_EXTENSION = ".whl"
|
| 9 |
+
BZ2_EXTENSIONS: Tuple[str, ...] = (".tar.bz2", ".tbz")
|
| 10 |
+
XZ_EXTENSIONS: Tuple[str, ...] = (
|
| 11 |
+
".tar.xz",
|
| 12 |
+
".txz",
|
| 13 |
+
".tlz",
|
| 14 |
+
".tar.lz",
|
| 15 |
+
".tar.lzma",
|
| 16 |
+
)
|
| 17 |
+
ZIP_EXTENSIONS: Tuple[str, ...] = (".zip", WHEEL_EXTENSION)
|
| 18 |
+
TAR_EXTENSIONS: Tuple[str, ...] = (".tar.gz", ".tgz", ".tar")
|
| 19 |
+
ARCHIVE_EXTENSIONS = ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def is_archive_file(name: str) -> bool:
|
| 23 |
+
"""Return True if `name` is a considered as an archive file."""
|
| 24 |
+
ext = splitext(name)[1].lower()
|
| 25 |
+
if ext in ARCHIVE_EXTENSIONS:
|
| 26 |
+
return True
|
| 27 |
+
return False
|
llava/lib/python3.10/site-packages/pip/_internal/utils/glibc.py
ADDED
|
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sys
|
| 3 |
+
from typing import Optional, Tuple
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def glibc_version_string() -> Optional[str]:
|
| 7 |
+
"Returns glibc version string, or None if not using glibc."
|
| 8 |
+
return glibc_version_string_confstr() or glibc_version_string_ctypes()
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def glibc_version_string_confstr() -> Optional[str]:
|
| 12 |
+
"Primary implementation of glibc_version_string using os.confstr."
|
| 13 |
+
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
|
| 14 |
+
# to be broken or missing. This strategy is used in the standard library
|
| 15 |
+
# platform module:
|
| 16 |
+
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183
|
| 17 |
+
if sys.platform == "win32":
|
| 18 |
+
return None
|
| 19 |
+
try:
|
| 20 |
+
gnu_libc_version = os.confstr("CS_GNU_LIBC_VERSION")
|
| 21 |
+
if gnu_libc_version is None:
|
| 22 |
+
return None
|
| 23 |
+
# os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17":
|
| 24 |
+
_, version = gnu_libc_version.split()
|
| 25 |
+
except (AttributeError, OSError, ValueError):
|
| 26 |
+
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
|
| 27 |
+
return None
|
| 28 |
+
return version
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def glibc_version_string_ctypes() -> Optional[str]:
|
| 32 |
+
"Fallback implementation of glibc_version_string using ctypes."
|
| 33 |
+
|
| 34 |
+
try:
|
| 35 |
+
import ctypes
|
| 36 |
+
except ImportError:
|
| 37 |
+
return None
|
| 38 |
+
|
| 39 |
+
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
| 40 |
+
# manpage says, "If filename is NULL, then the returned handle is for the
|
| 41 |
+
# main program". This way we can let the linker do the work to figure out
|
| 42 |
+
# which libc our process is actually using.
|
| 43 |
+
#
|
| 44 |
+
# We must also handle the special case where the executable is not a
|
| 45 |
+
# dynamically linked executable. This can occur when using musl libc,
|
| 46 |
+
# for example. In this situation, dlopen() will error, leading to an
|
| 47 |
+
# OSError. Interestingly, at least in the case of musl, there is no
|
| 48 |
+
# errno set on the OSError. The single string argument used to construct
|
| 49 |
+
# OSError comes from libc itself and is therefore not portable to
|
| 50 |
+
# hard code here. In any case, failure to call dlopen() means we
|
| 51 |
+
# can't proceed, so we bail on our attempt.
|
| 52 |
+
try:
|
| 53 |
+
process_namespace = ctypes.CDLL(None)
|
| 54 |
+
except OSError:
|
| 55 |
+
return None
|
| 56 |
+
|
| 57 |
+
try:
|
| 58 |
+
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
| 59 |
+
except AttributeError:
|
| 60 |
+
# Symbol doesn't exist -> therefore, we are not linked to
|
| 61 |
+
# glibc.
|
| 62 |
+
return None
|
| 63 |
+
|
| 64 |
+
# Call gnu_get_libc_version, which returns a string like "2.5"
|
| 65 |
+
gnu_get_libc_version.restype = ctypes.c_char_p
|
| 66 |
+
version_str: str = gnu_get_libc_version()
|
| 67 |
+
# py2 / py3 compatibility:
|
| 68 |
+
if not isinstance(version_str, str):
|
| 69 |
+
version_str = version_str.decode("ascii")
|
| 70 |
+
|
| 71 |
+
return version_str
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
# platform.libc_ver regularly returns completely nonsensical glibc
|
| 75 |
+
# versions. E.g. on my computer, platform says:
|
| 76 |
+
#
|
| 77 |
+
# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
|
| 78 |
+
# ('glibc', '2.7')
|
| 79 |
+
# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
|
| 80 |
+
# ('glibc', '2.9')
|
| 81 |
+
#
|
| 82 |
+
# But the truth is:
|
| 83 |
+
#
|
| 84 |
+
# ~$ ldd --version
|
| 85 |
+
# ldd (Debian GLIBC 2.22-11) 2.22
|
| 86 |
+
#
|
| 87 |
+
# This is unfortunate, because it means that the linehaul data on libc
|
| 88 |
+
# versions that was generated by pip 8.1.2 and earlier is useless and
|
| 89 |
+
# misleading. Solution: instead of using platform, use our code that actually
|
| 90 |
+
# works.
|
| 91 |
+
def libc_ver() -> Tuple[str, str]:
|
| 92 |
+
"""Try to determine the glibc version
|
| 93 |
+
|
| 94 |
+
Returns a tuple of strings (lib, version) which default to empty strings
|
| 95 |
+
in case the lookup fails.
|
| 96 |
+
"""
|
| 97 |
+
glibc_version = glibc_version_string()
|
| 98 |
+
if glibc_version is None:
|
| 99 |
+
return ("", "")
|
| 100 |
+
else:
|
| 101 |
+
return ("glibc", glibc_version)
|
llava/lib/python3.10/site-packages/pip/_internal/utils/hashes.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import hashlib
|
| 2 |
+
from typing import TYPE_CHECKING, BinaryIO, Dict, Iterable, List, NoReturn, Optional
|
| 3 |
+
|
| 4 |
+
from pip._internal.exceptions import HashMismatch, HashMissing, InstallationError
|
| 5 |
+
from pip._internal.utils.misc import read_chunks
|
| 6 |
+
|
| 7 |
+
if TYPE_CHECKING:
|
| 8 |
+
from hashlib import _Hash
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
# The recommended hash algo of the moment. Change this whenever the state of
|
| 12 |
+
# the art changes; it won't hurt backward compatibility.
|
| 13 |
+
FAVORITE_HASH = "sha256"
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
# Names of hashlib algorithms allowed by the --hash option and ``pip hash``
|
| 17 |
+
# Currently, those are the ones at least as collision-resistant as sha256.
|
| 18 |
+
STRONG_HASHES = ["sha256", "sha384", "sha512"]
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class Hashes:
|
| 22 |
+
"""A wrapper that builds multiple hashes at once and checks them against
|
| 23 |
+
known-good values
|
| 24 |
+
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
def __init__(self, hashes: Optional[Dict[str, List[str]]] = None) -> None:
|
| 28 |
+
"""
|
| 29 |
+
:param hashes: A dict of algorithm names pointing to lists of allowed
|
| 30 |
+
hex digests
|
| 31 |
+
"""
|
| 32 |
+
allowed = {}
|
| 33 |
+
if hashes is not None:
|
| 34 |
+
for alg, keys in hashes.items():
|
| 35 |
+
# Make sure values are always sorted (to ease equality checks)
|
| 36 |
+
allowed[alg] = [k.lower() for k in sorted(keys)]
|
| 37 |
+
self._allowed = allowed
|
| 38 |
+
|
| 39 |
+
def __and__(self, other: "Hashes") -> "Hashes":
|
| 40 |
+
if not isinstance(other, Hashes):
|
| 41 |
+
return NotImplemented
|
| 42 |
+
|
| 43 |
+
# If either of the Hashes object is entirely empty (i.e. no hash
|
| 44 |
+
# specified at all), all hashes from the other object are allowed.
|
| 45 |
+
if not other:
|
| 46 |
+
return self
|
| 47 |
+
if not self:
|
| 48 |
+
return other
|
| 49 |
+
|
| 50 |
+
# Otherwise only hashes that present in both objects are allowed.
|
| 51 |
+
new = {}
|
| 52 |
+
for alg, values in other._allowed.items():
|
| 53 |
+
if alg not in self._allowed:
|
| 54 |
+
continue
|
| 55 |
+
new[alg] = [v for v in values if v in self._allowed[alg]]
|
| 56 |
+
return Hashes(new)
|
| 57 |
+
|
| 58 |
+
@property
|
| 59 |
+
def digest_count(self) -> int:
|
| 60 |
+
return sum(len(digests) for digests in self._allowed.values())
|
| 61 |
+
|
| 62 |
+
def is_hash_allowed(self, hash_name: str, hex_digest: str) -> bool:
|
| 63 |
+
"""Return whether the given hex digest is allowed."""
|
| 64 |
+
return hex_digest in self._allowed.get(hash_name, [])
|
| 65 |
+
|
| 66 |
+
def check_against_chunks(self, chunks: Iterable[bytes]) -> None:
|
| 67 |
+
"""Check good hashes against ones built from iterable of chunks of
|
| 68 |
+
data.
|
| 69 |
+
|
| 70 |
+
Raise HashMismatch if none match.
|
| 71 |
+
|
| 72 |
+
"""
|
| 73 |
+
gots = {}
|
| 74 |
+
for hash_name in self._allowed.keys():
|
| 75 |
+
try:
|
| 76 |
+
gots[hash_name] = hashlib.new(hash_name)
|
| 77 |
+
except (ValueError, TypeError):
|
| 78 |
+
raise InstallationError(f"Unknown hash name: {hash_name}")
|
| 79 |
+
|
| 80 |
+
for chunk in chunks:
|
| 81 |
+
for hash in gots.values():
|
| 82 |
+
hash.update(chunk)
|
| 83 |
+
|
| 84 |
+
for hash_name, got in gots.items():
|
| 85 |
+
if got.hexdigest() in self._allowed[hash_name]:
|
| 86 |
+
return
|
| 87 |
+
self._raise(gots)
|
| 88 |
+
|
| 89 |
+
def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn":
|
| 90 |
+
raise HashMismatch(self._allowed, gots)
|
| 91 |
+
|
| 92 |
+
def check_against_file(self, file: BinaryIO) -> None:
|
| 93 |
+
"""Check good hashes against a file-like object
|
| 94 |
+
|
| 95 |
+
Raise HashMismatch if none match.
|
| 96 |
+
|
| 97 |
+
"""
|
| 98 |
+
return self.check_against_chunks(read_chunks(file))
|
| 99 |
+
|
| 100 |
+
def check_against_path(self, path: str) -> None:
|
| 101 |
+
with open(path, "rb") as file:
|
| 102 |
+
return self.check_against_file(file)
|
| 103 |
+
|
| 104 |
+
def has_one_of(self, hashes: Dict[str, str]) -> bool:
|
| 105 |
+
"""Return whether any of the given hashes are allowed."""
|
| 106 |
+
for hash_name, hex_digest in hashes.items():
|
| 107 |
+
if self.is_hash_allowed(hash_name, hex_digest):
|
| 108 |
+
return True
|
| 109 |
+
return False
|
| 110 |
+
|
| 111 |
+
def __bool__(self) -> bool:
|
| 112 |
+
"""Return whether I know any known-good hashes."""
|
| 113 |
+
return bool(self._allowed)
|
| 114 |
+
|
| 115 |
+
def __eq__(self, other: object) -> bool:
|
| 116 |
+
if not isinstance(other, Hashes):
|
| 117 |
+
return NotImplemented
|
| 118 |
+
return self._allowed == other._allowed
|
| 119 |
+
|
| 120 |
+
def __hash__(self) -> int:
|
| 121 |
+
return hash(
|
| 122 |
+
",".join(
|
| 123 |
+
sorted(
|
| 124 |
+
":".join((alg, digest))
|
| 125 |
+
for alg, digest_list in self._allowed.items()
|
| 126 |
+
for digest in digest_list
|
| 127 |
+
)
|
| 128 |
+
)
|
| 129 |
+
)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
class MissingHashes(Hashes):
|
| 133 |
+
"""A workalike for Hashes used when we're missing a hash for a requirement
|
| 134 |
+
|
| 135 |
+
It computes the actual hash of the requirement and raises a HashMissing
|
| 136 |
+
exception showing it to the user.
|
| 137 |
+
|
| 138 |
+
"""
|
| 139 |
+
|
| 140 |
+
def __init__(self) -> None:
|
| 141 |
+
"""Don't offer the ``hashes`` kwarg."""
|
| 142 |
+
# Pass our favorite hash in to generate a "gotten hash". With the
|
| 143 |
+
# empty list, it will never match, so an error will always raise.
|
| 144 |
+
super().__init__(hashes={FAVORITE_HASH: []})
|
| 145 |
+
|
| 146 |
+
def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn":
|
| 147 |
+
raise HashMissing(gots[FAVORITE_HASH].hexdigest())
|
llava/lib/python3.10/site-packages/pip/_internal/utils/logging.py
ADDED
|
@@ -0,0 +1,354 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import errno
|
| 3 |
+
import logging
|
| 4 |
+
import logging.handlers
|
| 5 |
+
import os
|
| 6 |
+
import sys
|
| 7 |
+
import threading
|
| 8 |
+
from dataclasses import dataclass
|
| 9 |
+
from io import TextIOWrapper
|
| 10 |
+
from logging import Filter
|
| 11 |
+
from typing import Any, ClassVar, Generator, List, Optional, TextIO, Type
|
| 12 |
+
|
| 13 |
+
from pip._vendor.rich.console import (
|
| 14 |
+
Console,
|
| 15 |
+
ConsoleOptions,
|
| 16 |
+
ConsoleRenderable,
|
| 17 |
+
RenderableType,
|
| 18 |
+
RenderResult,
|
| 19 |
+
RichCast,
|
| 20 |
+
)
|
| 21 |
+
from pip._vendor.rich.highlighter import NullHighlighter
|
| 22 |
+
from pip._vendor.rich.logging import RichHandler
|
| 23 |
+
from pip._vendor.rich.segment import Segment
|
| 24 |
+
from pip._vendor.rich.style import Style
|
| 25 |
+
|
| 26 |
+
from pip._internal.utils._log import VERBOSE, getLogger
|
| 27 |
+
from pip._internal.utils.compat import WINDOWS
|
| 28 |
+
from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX
|
| 29 |
+
from pip._internal.utils.misc import ensure_dir
|
| 30 |
+
|
| 31 |
+
_log_state = threading.local()
|
| 32 |
+
subprocess_logger = getLogger("pip.subprocessor")
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class BrokenStdoutLoggingError(Exception):
|
| 36 |
+
"""
|
| 37 |
+
Raised if BrokenPipeError occurs for the stdout stream while logging.
|
| 38 |
+
"""
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def _is_broken_pipe_error(exc_class: Type[BaseException], exc: BaseException) -> bool:
|
| 42 |
+
if exc_class is BrokenPipeError:
|
| 43 |
+
return True
|
| 44 |
+
|
| 45 |
+
# On Windows, a broken pipe can show up as EINVAL rather than EPIPE:
|
| 46 |
+
# https://bugs.python.org/issue19612
|
| 47 |
+
# https://bugs.python.org/issue30418
|
| 48 |
+
if not WINDOWS:
|
| 49 |
+
return False
|
| 50 |
+
|
| 51 |
+
return isinstance(exc, OSError) and exc.errno in (errno.EINVAL, errno.EPIPE)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
@contextlib.contextmanager
|
| 55 |
+
def indent_log(num: int = 2) -> Generator[None, None, None]:
|
| 56 |
+
"""
|
| 57 |
+
A context manager which will cause the log output to be indented for any
|
| 58 |
+
log messages emitted inside it.
|
| 59 |
+
"""
|
| 60 |
+
# For thread-safety
|
| 61 |
+
_log_state.indentation = get_indentation()
|
| 62 |
+
_log_state.indentation += num
|
| 63 |
+
try:
|
| 64 |
+
yield
|
| 65 |
+
finally:
|
| 66 |
+
_log_state.indentation -= num
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def get_indentation() -> int:
|
| 70 |
+
return getattr(_log_state, "indentation", 0)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
class IndentingFormatter(logging.Formatter):
|
| 74 |
+
default_time_format = "%Y-%m-%dT%H:%M:%S"
|
| 75 |
+
|
| 76 |
+
def __init__(
|
| 77 |
+
self,
|
| 78 |
+
*args: Any,
|
| 79 |
+
add_timestamp: bool = False,
|
| 80 |
+
**kwargs: Any,
|
| 81 |
+
) -> None:
|
| 82 |
+
"""
|
| 83 |
+
A logging.Formatter that obeys the indent_log() context manager.
|
| 84 |
+
|
| 85 |
+
:param add_timestamp: A bool indicating output lines should be prefixed
|
| 86 |
+
with their record's timestamp.
|
| 87 |
+
"""
|
| 88 |
+
self.add_timestamp = add_timestamp
|
| 89 |
+
super().__init__(*args, **kwargs)
|
| 90 |
+
|
| 91 |
+
def get_message_start(self, formatted: str, levelno: int) -> str:
|
| 92 |
+
"""
|
| 93 |
+
Return the start of the formatted log message (not counting the
|
| 94 |
+
prefix to add to each line).
|
| 95 |
+
"""
|
| 96 |
+
if levelno < logging.WARNING:
|
| 97 |
+
return ""
|
| 98 |
+
if formatted.startswith(DEPRECATION_MSG_PREFIX):
|
| 99 |
+
# Then the message already has a prefix. We don't want it to
|
| 100 |
+
# look like "WARNING: DEPRECATION: ...."
|
| 101 |
+
return ""
|
| 102 |
+
if levelno < logging.ERROR:
|
| 103 |
+
return "WARNING: "
|
| 104 |
+
|
| 105 |
+
return "ERROR: "
|
| 106 |
+
|
| 107 |
+
def format(self, record: logging.LogRecord) -> str:
|
| 108 |
+
"""
|
| 109 |
+
Calls the standard formatter, but will indent all of the log message
|
| 110 |
+
lines by our current indentation level.
|
| 111 |
+
"""
|
| 112 |
+
formatted = super().format(record)
|
| 113 |
+
message_start = self.get_message_start(formatted, record.levelno)
|
| 114 |
+
formatted = message_start + formatted
|
| 115 |
+
|
| 116 |
+
prefix = ""
|
| 117 |
+
if self.add_timestamp:
|
| 118 |
+
prefix = f"{self.formatTime(record)} "
|
| 119 |
+
prefix += " " * get_indentation()
|
| 120 |
+
formatted = "".join([prefix + line for line in formatted.splitlines(True)])
|
| 121 |
+
return formatted
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
@dataclass
|
| 125 |
+
class IndentedRenderable:
|
| 126 |
+
renderable: RenderableType
|
| 127 |
+
indent: int
|
| 128 |
+
|
| 129 |
+
def __rich_console__(
|
| 130 |
+
self, console: Console, options: ConsoleOptions
|
| 131 |
+
) -> RenderResult:
|
| 132 |
+
segments = console.render(self.renderable, options)
|
| 133 |
+
lines = Segment.split_lines(segments)
|
| 134 |
+
for line in lines:
|
| 135 |
+
yield Segment(" " * self.indent)
|
| 136 |
+
yield from line
|
| 137 |
+
yield Segment("\n")
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
class PipConsole(Console):
|
| 141 |
+
def on_broken_pipe(self) -> None:
|
| 142 |
+
# Reraise the original exception, rich 13.8.0+ exits by default
|
| 143 |
+
# instead, preventing our handler from firing.
|
| 144 |
+
raise BrokenPipeError() from None
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
class RichPipStreamHandler(RichHandler):
|
| 148 |
+
KEYWORDS: ClassVar[Optional[List[str]]] = []
|
| 149 |
+
|
| 150 |
+
def __init__(self, stream: Optional[TextIO], no_color: bool) -> None:
|
| 151 |
+
super().__init__(
|
| 152 |
+
console=PipConsole(file=stream, no_color=no_color, soft_wrap=True),
|
| 153 |
+
show_time=False,
|
| 154 |
+
show_level=False,
|
| 155 |
+
show_path=False,
|
| 156 |
+
highlighter=NullHighlighter(),
|
| 157 |
+
)
|
| 158 |
+
|
| 159 |
+
# Our custom override on Rich's logger, to make things work as we need them to.
|
| 160 |
+
def emit(self, record: logging.LogRecord) -> None:
|
| 161 |
+
style: Optional[Style] = None
|
| 162 |
+
|
| 163 |
+
# If we are given a diagnostic error to present, present it with indentation.
|
| 164 |
+
if getattr(record, "rich", False):
|
| 165 |
+
assert isinstance(record.args, tuple)
|
| 166 |
+
(rich_renderable,) = record.args
|
| 167 |
+
assert isinstance(
|
| 168 |
+
rich_renderable, (ConsoleRenderable, RichCast, str)
|
| 169 |
+
), f"{rich_renderable} is not rich-console-renderable"
|
| 170 |
+
|
| 171 |
+
renderable: RenderableType = IndentedRenderable(
|
| 172 |
+
rich_renderable, indent=get_indentation()
|
| 173 |
+
)
|
| 174 |
+
else:
|
| 175 |
+
message = self.format(record)
|
| 176 |
+
renderable = self.render_message(record, message)
|
| 177 |
+
if record.levelno is not None:
|
| 178 |
+
if record.levelno >= logging.ERROR:
|
| 179 |
+
style = Style(color="red")
|
| 180 |
+
elif record.levelno >= logging.WARNING:
|
| 181 |
+
style = Style(color="yellow")
|
| 182 |
+
|
| 183 |
+
try:
|
| 184 |
+
self.console.print(renderable, overflow="ignore", crop=False, style=style)
|
| 185 |
+
except Exception:
|
| 186 |
+
self.handleError(record)
|
| 187 |
+
|
| 188 |
+
def handleError(self, record: logging.LogRecord) -> None:
|
| 189 |
+
"""Called when logging is unable to log some output."""
|
| 190 |
+
|
| 191 |
+
exc_class, exc = sys.exc_info()[:2]
|
| 192 |
+
# If a broken pipe occurred while calling write() or flush() on the
|
| 193 |
+
# stdout stream in logging's Handler.emit(), then raise our special
|
| 194 |
+
# exception so we can handle it in main() instead of logging the
|
| 195 |
+
# broken pipe error and continuing.
|
| 196 |
+
if (
|
| 197 |
+
exc_class
|
| 198 |
+
and exc
|
| 199 |
+
and self.console.file is sys.stdout
|
| 200 |
+
and _is_broken_pipe_error(exc_class, exc)
|
| 201 |
+
):
|
| 202 |
+
raise BrokenStdoutLoggingError()
|
| 203 |
+
|
| 204 |
+
return super().handleError(record)
|
| 205 |
+
|
| 206 |
+
|
| 207 |
+
class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
|
| 208 |
+
def _open(self) -> TextIOWrapper:
|
| 209 |
+
ensure_dir(os.path.dirname(self.baseFilename))
|
| 210 |
+
return super()._open()
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
class MaxLevelFilter(Filter):
|
| 214 |
+
def __init__(self, level: int) -> None:
|
| 215 |
+
self.level = level
|
| 216 |
+
|
| 217 |
+
def filter(self, record: logging.LogRecord) -> bool:
|
| 218 |
+
return record.levelno < self.level
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
class ExcludeLoggerFilter(Filter):
|
| 222 |
+
"""
|
| 223 |
+
A logging Filter that excludes records from a logger (or its children).
|
| 224 |
+
"""
|
| 225 |
+
|
| 226 |
+
def filter(self, record: logging.LogRecord) -> bool:
|
| 227 |
+
# The base Filter class allows only records from a logger (or its
|
| 228 |
+
# children).
|
| 229 |
+
return not super().filter(record)
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
def setup_logging(verbosity: int, no_color: bool, user_log_file: Optional[str]) -> int:
|
| 233 |
+
"""Configures and sets up all of the logging
|
| 234 |
+
|
| 235 |
+
Returns the requested logging level, as its integer value.
|
| 236 |
+
"""
|
| 237 |
+
|
| 238 |
+
# Determine the level to be logging at.
|
| 239 |
+
if verbosity >= 2:
|
| 240 |
+
level_number = logging.DEBUG
|
| 241 |
+
elif verbosity == 1:
|
| 242 |
+
level_number = VERBOSE
|
| 243 |
+
elif verbosity == -1:
|
| 244 |
+
level_number = logging.WARNING
|
| 245 |
+
elif verbosity == -2:
|
| 246 |
+
level_number = logging.ERROR
|
| 247 |
+
elif verbosity <= -3:
|
| 248 |
+
level_number = logging.CRITICAL
|
| 249 |
+
else:
|
| 250 |
+
level_number = logging.INFO
|
| 251 |
+
|
| 252 |
+
level = logging.getLevelName(level_number)
|
| 253 |
+
|
| 254 |
+
# The "root" logger should match the "console" level *unless* we also need
|
| 255 |
+
# to log to a user log file.
|
| 256 |
+
include_user_log = user_log_file is not None
|
| 257 |
+
if include_user_log:
|
| 258 |
+
additional_log_file = user_log_file
|
| 259 |
+
root_level = "DEBUG"
|
| 260 |
+
else:
|
| 261 |
+
additional_log_file = "/dev/null"
|
| 262 |
+
root_level = level
|
| 263 |
+
|
| 264 |
+
# Disable any logging besides WARNING unless we have DEBUG level logging
|
| 265 |
+
# enabled for vendored libraries.
|
| 266 |
+
vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
|
| 267 |
+
|
| 268 |
+
# Shorthands for clarity
|
| 269 |
+
log_streams = {
|
| 270 |
+
"stdout": "ext://sys.stdout",
|
| 271 |
+
"stderr": "ext://sys.stderr",
|
| 272 |
+
}
|
| 273 |
+
handler_classes = {
|
| 274 |
+
"stream": "pip._internal.utils.logging.RichPipStreamHandler",
|
| 275 |
+
"file": "pip._internal.utils.logging.BetterRotatingFileHandler",
|
| 276 |
+
}
|
| 277 |
+
handlers = ["console", "console_errors", "console_subprocess"] + (
|
| 278 |
+
["user_log"] if include_user_log else []
|
| 279 |
+
)
|
| 280 |
+
|
| 281 |
+
logging.config.dictConfig(
|
| 282 |
+
{
|
| 283 |
+
"version": 1,
|
| 284 |
+
"disable_existing_loggers": False,
|
| 285 |
+
"filters": {
|
| 286 |
+
"exclude_warnings": {
|
| 287 |
+
"()": "pip._internal.utils.logging.MaxLevelFilter",
|
| 288 |
+
"level": logging.WARNING,
|
| 289 |
+
},
|
| 290 |
+
"restrict_to_subprocess": {
|
| 291 |
+
"()": "logging.Filter",
|
| 292 |
+
"name": subprocess_logger.name,
|
| 293 |
+
},
|
| 294 |
+
"exclude_subprocess": {
|
| 295 |
+
"()": "pip._internal.utils.logging.ExcludeLoggerFilter",
|
| 296 |
+
"name": subprocess_logger.name,
|
| 297 |
+
},
|
| 298 |
+
},
|
| 299 |
+
"formatters": {
|
| 300 |
+
"indent": {
|
| 301 |
+
"()": IndentingFormatter,
|
| 302 |
+
"format": "%(message)s",
|
| 303 |
+
},
|
| 304 |
+
"indent_with_timestamp": {
|
| 305 |
+
"()": IndentingFormatter,
|
| 306 |
+
"format": "%(message)s",
|
| 307 |
+
"add_timestamp": True,
|
| 308 |
+
},
|
| 309 |
+
},
|
| 310 |
+
"handlers": {
|
| 311 |
+
"console": {
|
| 312 |
+
"level": level,
|
| 313 |
+
"class": handler_classes["stream"],
|
| 314 |
+
"no_color": no_color,
|
| 315 |
+
"stream": log_streams["stdout"],
|
| 316 |
+
"filters": ["exclude_subprocess", "exclude_warnings"],
|
| 317 |
+
"formatter": "indent",
|
| 318 |
+
},
|
| 319 |
+
"console_errors": {
|
| 320 |
+
"level": "WARNING",
|
| 321 |
+
"class": handler_classes["stream"],
|
| 322 |
+
"no_color": no_color,
|
| 323 |
+
"stream": log_streams["stderr"],
|
| 324 |
+
"filters": ["exclude_subprocess"],
|
| 325 |
+
"formatter": "indent",
|
| 326 |
+
},
|
| 327 |
+
# A handler responsible for logging to the console messages
|
| 328 |
+
# from the "subprocessor" logger.
|
| 329 |
+
"console_subprocess": {
|
| 330 |
+
"level": level,
|
| 331 |
+
"class": handler_classes["stream"],
|
| 332 |
+
"stream": log_streams["stderr"],
|
| 333 |
+
"no_color": no_color,
|
| 334 |
+
"filters": ["restrict_to_subprocess"],
|
| 335 |
+
"formatter": "indent",
|
| 336 |
+
},
|
| 337 |
+
"user_log": {
|
| 338 |
+
"level": "DEBUG",
|
| 339 |
+
"class": handler_classes["file"],
|
| 340 |
+
"filename": additional_log_file,
|
| 341 |
+
"encoding": "utf-8",
|
| 342 |
+
"delay": True,
|
| 343 |
+
"formatter": "indent_with_timestamp",
|
| 344 |
+
},
|
| 345 |
+
},
|
| 346 |
+
"root": {
|
| 347 |
+
"level": root_level,
|
| 348 |
+
"handlers": handlers,
|
| 349 |
+
},
|
| 350 |
+
"loggers": {"pip._vendor": {"level": vendored_log_level}},
|
| 351 |
+
}
|
| 352 |
+
)
|
| 353 |
+
|
| 354 |
+
return level_number
|
llava/lib/python3.10/site-packages/pip/_internal/utils/misc.py
ADDED
|
@@ -0,0 +1,773 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import errno
|
| 2 |
+
import getpass
|
| 3 |
+
import hashlib
|
| 4 |
+
import logging
|
| 5 |
+
import os
|
| 6 |
+
import posixpath
|
| 7 |
+
import shutil
|
| 8 |
+
import stat
|
| 9 |
+
import sys
|
| 10 |
+
import sysconfig
|
| 11 |
+
import urllib.parse
|
| 12 |
+
from dataclasses import dataclass
|
| 13 |
+
from functools import partial
|
| 14 |
+
from io import StringIO
|
| 15 |
+
from itertools import filterfalse, tee, zip_longest
|
| 16 |
+
from pathlib import Path
|
| 17 |
+
from types import FunctionType, TracebackType
|
| 18 |
+
from typing import (
|
| 19 |
+
Any,
|
| 20 |
+
BinaryIO,
|
| 21 |
+
Callable,
|
| 22 |
+
Generator,
|
| 23 |
+
Iterable,
|
| 24 |
+
Iterator,
|
| 25 |
+
List,
|
| 26 |
+
Mapping,
|
| 27 |
+
Optional,
|
| 28 |
+
Sequence,
|
| 29 |
+
TextIO,
|
| 30 |
+
Tuple,
|
| 31 |
+
Type,
|
| 32 |
+
TypeVar,
|
| 33 |
+
Union,
|
| 34 |
+
cast,
|
| 35 |
+
)
|
| 36 |
+
|
| 37 |
+
from pip._vendor.packaging.requirements import Requirement
|
| 38 |
+
from pip._vendor.pyproject_hooks import BuildBackendHookCaller
|
| 39 |
+
|
| 40 |
+
from pip import __version__
|
| 41 |
+
from pip._internal.exceptions import CommandError, ExternallyManagedEnvironment
|
| 42 |
+
from pip._internal.locations import get_major_minor_version
|
| 43 |
+
from pip._internal.utils.compat import WINDOWS
|
| 44 |
+
from pip._internal.utils.retry import retry
|
| 45 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
| 46 |
+
|
| 47 |
+
__all__ = [
|
| 48 |
+
"rmtree",
|
| 49 |
+
"display_path",
|
| 50 |
+
"backup_dir",
|
| 51 |
+
"ask",
|
| 52 |
+
"splitext",
|
| 53 |
+
"format_size",
|
| 54 |
+
"is_installable_dir",
|
| 55 |
+
"normalize_path",
|
| 56 |
+
"renames",
|
| 57 |
+
"get_prog",
|
| 58 |
+
"ensure_dir",
|
| 59 |
+
"remove_auth_from_url",
|
| 60 |
+
"check_externally_managed",
|
| 61 |
+
"ConfiguredBuildBackendHookCaller",
|
| 62 |
+
]
|
| 63 |
+
|
| 64 |
+
logger = logging.getLogger(__name__)
|
| 65 |
+
|
| 66 |
+
T = TypeVar("T")
|
| 67 |
+
ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType]
|
| 68 |
+
VersionInfo = Tuple[int, int, int]
|
| 69 |
+
NetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]]
|
| 70 |
+
OnExc = Callable[[FunctionType, Path, BaseException], Any]
|
| 71 |
+
OnErr = Callable[[FunctionType, Path, ExcInfo], Any]
|
| 72 |
+
|
| 73 |
+
FILE_CHUNK_SIZE = 1024 * 1024
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def get_pip_version() -> str:
|
| 77 |
+
pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..")
|
| 78 |
+
pip_pkg_dir = os.path.abspath(pip_pkg_dir)
|
| 79 |
+
|
| 80 |
+
return f"pip {__version__} from {pip_pkg_dir} (python {get_major_minor_version()})"
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def normalize_version_info(py_version_info: Tuple[int, ...]) -> Tuple[int, int, int]:
|
| 84 |
+
"""
|
| 85 |
+
Convert a tuple of ints representing a Python version to one of length
|
| 86 |
+
three.
|
| 87 |
+
|
| 88 |
+
:param py_version_info: a tuple of ints representing a Python version,
|
| 89 |
+
or None to specify no version. The tuple can have any length.
|
| 90 |
+
|
| 91 |
+
:return: a tuple of length three if `py_version_info` is non-None.
|
| 92 |
+
Otherwise, return `py_version_info` unchanged (i.e. None).
|
| 93 |
+
"""
|
| 94 |
+
if len(py_version_info) < 3:
|
| 95 |
+
py_version_info += (3 - len(py_version_info)) * (0,)
|
| 96 |
+
elif len(py_version_info) > 3:
|
| 97 |
+
py_version_info = py_version_info[:3]
|
| 98 |
+
|
| 99 |
+
return cast("VersionInfo", py_version_info)
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
def ensure_dir(path: str) -> None:
|
| 103 |
+
"""os.path.makedirs without EEXIST."""
|
| 104 |
+
try:
|
| 105 |
+
os.makedirs(path)
|
| 106 |
+
except OSError as e:
|
| 107 |
+
# Windows can raise spurious ENOTEMPTY errors. See #6426.
|
| 108 |
+
if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY:
|
| 109 |
+
raise
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
def get_prog() -> str:
|
| 113 |
+
try:
|
| 114 |
+
prog = os.path.basename(sys.argv[0])
|
| 115 |
+
if prog in ("__main__.py", "-c"):
|
| 116 |
+
return f"{sys.executable} -m pip"
|
| 117 |
+
else:
|
| 118 |
+
return prog
|
| 119 |
+
except (AttributeError, TypeError, IndexError):
|
| 120 |
+
pass
|
| 121 |
+
return "pip"
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
# Retry every half second for up to 3 seconds
|
| 125 |
+
@retry(stop_after_delay=3, wait=0.5)
|
| 126 |
+
def rmtree(
|
| 127 |
+
dir: str, ignore_errors: bool = False, onexc: Optional[OnExc] = None
|
| 128 |
+
) -> None:
|
| 129 |
+
if ignore_errors:
|
| 130 |
+
onexc = _onerror_ignore
|
| 131 |
+
if onexc is None:
|
| 132 |
+
onexc = _onerror_reraise
|
| 133 |
+
handler: OnErr = partial(rmtree_errorhandler, onexc=onexc)
|
| 134 |
+
if sys.version_info >= (3, 12):
|
| 135 |
+
# See https://docs.python.org/3.12/whatsnew/3.12.html#shutil.
|
| 136 |
+
shutil.rmtree(dir, onexc=handler) # type: ignore
|
| 137 |
+
else:
|
| 138 |
+
shutil.rmtree(dir, onerror=handler) # type: ignore
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def _onerror_ignore(*_args: Any) -> None:
|
| 142 |
+
pass
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
def _onerror_reraise(*_args: Any) -> None:
|
| 146 |
+
raise # noqa: PLE0704 - Bare exception used to reraise existing exception
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
def rmtree_errorhandler(
|
| 150 |
+
func: FunctionType,
|
| 151 |
+
path: Path,
|
| 152 |
+
exc_info: Union[ExcInfo, BaseException],
|
| 153 |
+
*,
|
| 154 |
+
onexc: OnExc = _onerror_reraise,
|
| 155 |
+
) -> None:
|
| 156 |
+
"""
|
| 157 |
+
`rmtree` error handler to 'force' a file remove (i.e. like `rm -f`).
|
| 158 |
+
|
| 159 |
+
* If a file is readonly then it's write flag is set and operation is
|
| 160 |
+
retried.
|
| 161 |
+
|
| 162 |
+
* `onerror` is the original callback from `rmtree(... onerror=onerror)`
|
| 163 |
+
that is chained at the end if the "rm -f" still fails.
|
| 164 |
+
"""
|
| 165 |
+
try:
|
| 166 |
+
st_mode = os.stat(path).st_mode
|
| 167 |
+
except OSError:
|
| 168 |
+
# it's equivalent to os.path.exists
|
| 169 |
+
return
|
| 170 |
+
|
| 171 |
+
if not st_mode & stat.S_IWRITE:
|
| 172 |
+
# convert to read/write
|
| 173 |
+
try:
|
| 174 |
+
os.chmod(path, st_mode | stat.S_IWRITE)
|
| 175 |
+
except OSError:
|
| 176 |
+
pass
|
| 177 |
+
else:
|
| 178 |
+
# use the original function to repeat the operation
|
| 179 |
+
try:
|
| 180 |
+
func(path)
|
| 181 |
+
return
|
| 182 |
+
except OSError:
|
| 183 |
+
pass
|
| 184 |
+
|
| 185 |
+
if not isinstance(exc_info, BaseException):
|
| 186 |
+
_, exc_info, _ = exc_info
|
| 187 |
+
onexc(func, path, exc_info)
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
def display_path(path: str) -> str:
|
| 191 |
+
"""Gives the display value for a given path, making it relative to cwd
|
| 192 |
+
if possible."""
|
| 193 |
+
path = os.path.normcase(os.path.abspath(path))
|
| 194 |
+
if path.startswith(os.getcwd() + os.path.sep):
|
| 195 |
+
path = "." + path[len(os.getcwd()) :]
|
| 196 |
+
return path
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
def backup_dir(dir: str, ext: str = ".bak") -> str:
|
| 200 |
+
"""Figure out the name of a directory to back up the given dir to
|
| 201 |
+
(adding .bak, .bak2, etc)"""
|
| 202 |
+
n = 1
|
| 203 |
+
extension = ext
|
| 204 |
+
while os.path.exists(dir + extension):
|
| 205 |
+
n += 1
|
| 206 |
+
extension = ext + str(n)
|
| 207 |
+
return dir + extension
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
def ask_path_exists(message: str, options: Iterable[str]) -> str:
|
| 211 |
+
for action in os.environ.get("PIP_EXISTS_ACTION", "").split():
|
| 212 |
+
if action in options:
|
| 213 |
+
return action
|
| 214 |
+
return ask(message, options)
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
def _check_no_input(message: str) -> None:
|
| 218 |
+
"""Raise an error if no input is allowed."""
|
| 219 |
+
if os.environ.get("PIP_NO_INPUT"):
|
| 220 |
+
raise Exception(
|
| 221 |
+
f"No input was expected ($PIP_NO_INPUT set); question: {message}"
|
| 222 |
+
)
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
def ask(message: str, options: Iterable[str]) -> str:
|
| 226 |
+
"""Ask the message interactively, with the given possible responses"""
|
| 227 |
+
while 1:
|
| 228 |
+
_check_no_input(message)
|
| 229 |
+
response = input(message)
|
| 230 |
+
response = response.strip().lower()
|
| 231 |
+
if response not in options:
|
| 232 |
+
print(
|
| 233 |
+
"Your response ({!r}) was not one of the expected responses: "
|
| 234 |
+
"{}".format(response, ", ".join(options))
|
| 235 |
+
)
|
| 236 |
+
else:
|
| 237 |
+
return response
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
def ask_input(message: str) -> str:
|
| 241 |
+
"""Ask for input interactively."""
|
| 242 |
+
_check_no_input(message)
|
| 243 |
+
return input(message)
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
def ask_password(message: str) -> str:
|
| 247 |
+
"""Ask for a password interactively."""
|
| 248 |
+
_check_no_input(message)
|
| 249 |
+
return getpass.getpass(message)
|
| 250 |
+
|
| 251 |
+
|
| 252 |
+
def strtobool(val: str) -> int:
|
| 253 |
+
"""Convert a string representation of truth to true (1) or false (0).
|
| 254 |
+
|
| 255 |
+
True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
|
| 256 |
+
are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
|
| 257 |
+
'val' is anything else.
|
| 258 |
+
"""
|
| 259 |
+
val = val.lower()
|
| 260 |
+
if val in ("y", "yes", "t", "true", "on", "1"):
|
| 261 |
+
return 1
|
| 262 |
+
elif val in ("n", "no", "f", "false", "off", "0"):
|
| 263 |
+
return 0
|
| 264 |
+
else:
|
| 265 |
+
raise ValueError(f"invalid truth value {val!r}")
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
def format_size(bytes: float) -> str:
|
| 269 |
+
if bytes > 1000 * 1000:
|
| 270 |
+
return f"{bytes / 1000.0 / 1000:.1f} MB"
|
| 271 |
+
elif bytes > 10 * 1000:
|
| 272 |
+
return f"{int(bytes / 1000)} kB"
|
| 273 |
+
elif bytes > 1000:
|
| 274 |
+
return f"{bytes / 1000.0:.1f} kB"
|
| 275 |
+
else:
|
| 276 |
+
return f"{int(bytes)} bytes"
|
| 277 |
+
|
| 278 |
+
|
| 279 |
+
def tabulate(rows: Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]]:
|
| 280 |
+
"""Return a list of formatted rows and a list of column sizes.
|
| 281 |
+
|
| 282 |
+
For example::
|
| 283 |
+
|
| 284 |
+
>>> tabulate([['foobar', 2000], [0xdeadbeef]])
|
| 285 |
+
(['foobar 2000', '3735928559'], [10, 4])
|
| 286 |
+
"""
|
| 287 |
+
rows = [tuple(map(str, row)) for row in rows]
|
| 288 |
+
sizes = [max(map(len, col)) for col in zip_longest(*rows, fillvalue="")]
|
| 289 |
+
table = [" ".join(map(str.ljust, row, sizes)).rstrip() for row in rows]
|
| 290 |
+
return table, sizes
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
def is_installable_dir(path: str) -> bool:
|
| 294 |
+
"""Is path is a directory containing pyproject.toml or setup.py?
|
| 295 |
+
|
| 296 |
+
If pyproject.toml exists, this is a PEP 517 project. Otherwise we look for
|
| 297 |
+
a legacy setuptools layout by identifying setup.py. We don't check for the
|
| 298 |
+
setup.cfg because using it without setup.py is only available for PEP 517
|
| 299 |
+
projects, which are already covered by the pyproject.toml check.
|
| 300 |
+
"""
|
| 301 |
+
if not os.path.isdir(path):
|
| 302 |
+
return False
|
| 303 |
+
if os.path.isfile(os.path.join(path, "pyproject.toml")):
|
| 304 |
+
return True
|
| 305 |
+
if os.path.isfile(os.path.join(path, "setup.py")):
|
| 306 |
+
return True
|
| 307 |
+
return False
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
def read_chunks(
|
| 311 |
+
file: BinaryIO, size: int = FILE_CHUNK_SIZE
|
| 312 |
+
) -> Generator[bytes, None, None]:
|
| 313 |
+
"""Yield pieces of data from a file-like object until EOF."""
|
| 314 |
+
while True:
|
| 315 |
+
chunk = file.read(size)
|
| 316 |
+
if not chunk:
|
| 317 |
+
break
|
| 318 |
+
yield chunk
|
| 319 |
+
|
| 320 |
+
|
| 321 |
+
def normalize_path(path: str, resolve_symlinks: bool = True) -> str:
|
| 322 |
+
"""
|
| 323 |
+
Convert a path to its canonical, case-normalized, absolute version.
|
| 324 |
+
|
| 325 |
+
"""
|
| 326 |
+
path = os.path.expanduser(path)
|
| 327 |
+
if resolve_symlinks:
|
| 328 |
+
path = os.path.realpath(path)
|
| 329 |
+
else:
|
| 330 |
+
path = os.path.abspath(path)
|
| 331 |
+
return os.path.normcase(path)
|
| 332 |
+
|
| 333 |
+
|
| 334 |
+
def splitext(path: str) -> Tuple[str, str]:
|
| 335 |
+
"""Like os.path.splitext, but take off .tar too"""
|
| 336 |
+
base, ext = posixpath.splitext(path)
|
| 337 |
+
if base.lower().endswith(".tar"):
|
| 338 |
+
ext = base[-4:] + ext
|
| 339 |
+
base = base[:-4]
|
| 340 |
+
return base, ext
|
| 341 |
+
|
| 342 |
+
|
| 343 |
+
def renames(old: str, new: str) -> None:
|
| 344 |
+
"""Like os.renames(), but handles renaming across devices."""
|
| 345 |
+
# Implementation borrowed from os.renames().
|
| 346 |
+
head, tail = os.path.split(new)
|
| 347 |
+
if head and tail and not os.path.exists(head):
|
| 348 |
+
os.makedirs(head)
|
| 349 |
+
|
| 350 |
+
shutil.move(old, new)
|
| 351 |
+
|
| 352 |
+
head, tail = os.path.split(old)
|
| 353 |
+
if head and tail:
|
| 354 |
+
try:
|
| 355 |
+
os.removedirs(head)
|
| 356 |
+
except OSError:
|
| 357 |
+
pass
|
| 358 |
+
|
| 359 |
+
|
| 360 |
+
def is_local(path: str) -> bool:
|
| 361 |
+
"""
|
| 362 |
+
Return True if path is within sys.prefix, if we're running in a virtualenv.
|
| 363 |
+
|
| 364 |
+
If we're not in a virtualenv, all paths are considered "local."
|
| 365 |
+
|
| 366 |
+
Caution: this function assumes the head of path has been normalized
|
| 367 |
+
with normalize_path.
|
| 368 |
+
"""
|
| 369 |
+
if not running_under_virtualenv():
|
| 370 |
+
return True
|
| 371 |
+
return path.startswith(normalize_path(sys.prefix))
|
| 372 |
+
|
| 373 |
+
|
| 374 |
+
def write_output(msg: Any, *args: Any) -> None:
|
| 375 |
+
logger.info(msg, *args)
|
| 376 |
+
|
| 377 |
+
|
| 378 |
+
class StreamWrapper(StringIO):
|
| 379 |
+
orig_stream: TextIO
|
| 380 |
+
|
| 381 |
+
@classmethod
|
| 382 |
+
def from_stream(cls, orig_stream: TextIO) -> "StreamWrapper":
|
| 383 |
+
ret = cls()
|
| 384 |
+
ret.orig_stream = orig_stream
|
| 385 |
+
return ret
|
| 386 |
+
|
| 387 |
+
# compileall.compile_dir() needs stdout.encoding to print to stdout
|
| 388 |
+
# type ignore is because TextIOBase.encoding is writeable
|
| 389 |
+
@property
|
| 390 |
+
def encoding(self) -> str: # type: ignore
|
| 391 |
+
return self.orig_stream.encoding
|
| 392 |
+
|
| 393 |
+
|
| 394 |
+
# Simulates an enum
|
| 395 |
+
def enum(*sequential: Any, **named: Any) -> Type[Any]:
|
| 396 |
+
enums = dict(zip(sequential, range(len(sequential))), **named)
|
| 397 |
+
reverse = {value: key for key, value in enums.items()}
|
| 398 |
+
enums["reverse_mapping"] = reverse
|
| 399 |
+
return type("Enum", (), enums)
|
| 400 |
+
|
| 401 |
+
|
| 402 |
+
def build_netloc(host: str, port: Optional[int]) -> str:
|
| 403 |
+
"""
|
| 404 |
+
Build a netloc from a host-port pair
|
| 405 |
+
"""
|
| 406 |
+
if port is None:
|
| 407 |
+
return host
|
| 408 |
+
if ":" in host:
|
| 409 |
+
# Only wrap host with square brackets when it is IPv6
|
| 410 |
+
host = f"[{host}]"
|
| 411 |
+
return f"{host}:{port}"
|
| 412 |
+
|
| 413 |
+
|
| 414 |
+
def build_url_from_netloc(netloc: str, scheme: str = "https") -> str:
|
| 415 |
+
"""
|
| 416 |
+
Build a full URL from a netloc.
|
| 417 |
+
"""
|
| 418 |
+
if netloc.count(":") >= 2 and "@" not in netloc and "[" not in netloc:
|
| 419 |
+
# It must be a bare IPv6 address, so wrap it with brackets.
|
| 420 |
+
netloc = f"[{netloc}]"
|
| 421 |
+
return f"{scheme}://{netloc}"
|
| 422 |
+
|
| 423 |
+
|
| 424 |
+
def parse_netloc(netloc: str) -> Tuple[Optional[str], Optional[int]]:
|
| 425 |
+
"""
|
| 426 |
+
Return the host-port pair from a netloc.
|
| 427 |
+
"""
|
| 428 |
+
url = build_url_from_netloc(netloc)
|
| 429 |
+
parsed = urllib.parse.urlparse(url)
|
| 430 |
+
return parsed.hostname, parsed.port
|
| 431 |
+
|
| 432 |
+
|
| 433 |
+
def split_auth_from_netloc(netloc: str) -> NetlocTuple:
|
| 434 |
+
"""
|
| 435 |
+
Parse out and remove the auth information from a netloc.
|
| 436 |
+
|
| 437 |
+
Returns: (netloc, (username, password)).
|
| 438 |
+
"""
|
| 439 |
+
if "@" not in netloc:
|
| 440 |
+
return netloc, (None, None)
|
| 441 |
+
|
| 442 |
+
# Split from the right because that's how urllib.parse.urlsplit()
|
| 443 |
+
# behaves if more than one @ is present (which can be checked using
|
| 444 |
+
# the password attribute of urlsplit()'s return value).
|
| 445 |
+
auth, netloc = netloc.rsplit("@", 1)
|
| 446 |
+
pw: Optional[str] = None
|
| 447 |
+
if ":" in auth:
|
| 448 |
+
# Split from the left because that's how urllib.parse.urlsplit()
|
| 449 |
+
# behaves if more than one : is present (which again can be checked
|
| 450 |
+
# using the password attribute of the return value)
|
| 451 |
+
user, pw = auth.split(":", 1)
|
| 452 |
+
else:
|
| 453 |
+
user, pw = auth, None
|
| 454 |
+
|
| 455 |
+
user = urllib.parse.unquote(user)
|
| 456 |
+
if pw is not None:
|
| 457 |
+
pw = urllib.parse.unquote(pw)
|
| 458 |
+
|
| 459 |
+
return netloc, (user, pw)
|
| 460 |
+
|
| 461 |
+
|
| 462 |
+
def redact_netloc(netloc: str) -> str:
|
| 463 |
+
"""
|
| 464 |
+
Replace the sensitive data in a netloc with "****", if it exists.
|
| 465 |
+
|
| 466 |
+
For example:
|
| 467 |
+
- "user:pass@example.com" returns "user:****@example.com"
|
| 468 |
+
- "accesstoken@example.com" returns "****@example.com"
|
| 469 |
+
"""
|
| 470 |
+
netloc, (user, password) = split_auth_from_netloc(netloc)
|
| 471 |
+
if user is None:
|
| 472 |
+
return netloc
|
| 473 |
+
if password is None:
|
| 474 |
+
user = "****"
|
| 475 |
+
password = ""
|
| 476 |
+
else:
|
| 477 |
+
user = urllib.parse.quote(user)
|
| 478 |
+
password = ":****"
|
| 479 |
+
return f"{user}{password}@{netloc}"
|
| 480 |
+
|
| 481 |
+
|
| 482 |
+
def _transform_url(
|
| 483 |
+
url: str, transform_netloc: Callable[[str], Tuple[Any, ...]]
|
| 484 |
+
) -> Tuple[str, NetlocTuple]:
|
| 485 |
+
"""Transform and replace netloc in a url.
|
| 486 |
+
|
| 487 |
+
transform_netloc is a function taking the netloc and returning a
|
| 488 |
+
tuple. The first element of this tuple is the new netloc. The
|
| 489 |
+
entire tuple is returned.
|
| 490 |
+
|
| 491 |
+
Returns a tuple containing the transformed url as item 0 and the
|
| 492 |
+
original tuple returned by transform_netloc as item 1.
|
| 493 |
+
"""
|
| 494 |
+
purl = urllib.parse.urlsplit(url)
|
| 495 |
+
netloc_tuple = transform_netloc(purl.netloc)
|
| 496 |
+
# stripped url
|
| 497 |
+
url_pieces = (purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment)
|
| 498 |
+
surl = urllib.parse.urlunsplit(url_pieces)
|
| 499 |
+
return surl, cast("NetlocTuple", netloc_tuple)
|
| 500 |
+
|
| 501 |
+
|
| 502 |
+
def _get_netloc(netloc: str) -> NetlocTuple:
|
| 503 |
+
return split_auth_from_netloc(netloc)
|
| 504 |
+
|
| 505 |
+
|
| 506 |
+
def _redact_netloc(netloc: str) -> Tuple[str]:
|
| 507 |
+
return (redact_netloc(netloc),)
|
| 508 |
+
|
| 509 |
+
|
| 510 |
+
def split_auth_netloc_from_url(
|
| 511 |
+
url: str,
|
| 512 |
+
) -> Tuple[str, str, Tuple[Optional[str], Optional[str]]]:
|
| 513 |
+
"""
|
| 514 |
+
Parse a url into separate netloc, auth, and url with no auth.
|
| 515 |
+
|
| 516 |
+
Returns: (url_without_auth, netloc, (username, password))
|
| 517 |
+
"""
|
| 518 |
+
url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc)
|
| 519 |
+
return url_without_auth, netloc, auth
|
| 520 |
+
|
| 521 |
+
|
| 522 |
+
def remove_auth_from_url(url: str) -> str:
|
| 523 |
+
"""Return a copy of url with 'username:password@' removed."""
|
| 524 |
+
# username/pass params are passed to subversion through flags
|
| 525 |
+
# and are not recognized in the url.
|
| 526 |
+
return _transform_url(url, _get_netloc)[0]
|
| 527 |
+
|
| 528 |
+
|
| 529 |
+
def redact_auth_from_url(url: str) -> str:
|
| 530 |
+
"""Replace the password in a given url with ****."""
|
| 531 |
+
return _transform_url(url, _redact_netloc)[0]
|
| 532 |
+
|
| 533 |
+
|
| 534 |
+
def redact_auth_from_requirement(req: Requirement) -> str:
|
| 535 |
+
"""Replace the password in a given requirement url with ****."""
|
| 536 |
+
if not req.url:
|
| 537 |
+
return str(req)
|
| 538 |
+
return str(req).replace(req.url, redact_auth_from_url(req.url))
|
| 539 |
+
|
| 540 |
+
|
| 541 |
+
@dataclass(frozen=True)
|
| 542 |
+
class HiddenText:
|
| 543 |
+
secret: str
|
| 544 |
+
redacted: str
|
| 545 |
+
|
| 546 |
+
def __repr__(self) -> str:
|
| 547 |
+
return f"<HiddenText {str(self)!r}>"
|
| 548 |
+
|
| 549 |
+
def __str__(self) -> str:
|
| 550 |
+
return self.redacted
|
| 551 |
+
|
| 552 |
+
# This is useful for testing.
|
| 553 |
+
def __eq__(self, other: Any) -> bool:
|
| 554 |
+
if type(self) is not type(other):
|
| 555 |
+
return False
|
| 556 |
+
|
| 557 |
+
# The string being used for redaction doesn't also have to match,
|
| 558 |
+
# just the raw, original string.
|
| 559 |
+
return self.secret == other.secret
|
| 560 |
+
|
| 561 |
+
|
| 562 |
+
def hide_value(value: str) -> HiddenText:
|
| 563 |
+
return HiddenText(value, redacted="****")
|
| 564 |
+
|
| 565 |
+
|
| 566 |
+
def hide_url(url: str) -> HiddenText:
|
| 567 |
+
redacted = redact_auth_from_url(url)
|
| 568 |
+
return HiddenText(url, redacted=redacted)
|
| 569 |
+
|
| 570 |
+
|
| 571 |
+
def protect_pip_from_modification_on_windows(modifying_pip: bool) -> None:
|
| 572 |
+
"""Protection of pip.exe from modification on Windows
|
| 573 |
+
|
| 574 |
+
On Windows, any operation modifying pip should be run as:
|
| 575 |
+
python -m pip ...
|
| 576 |
+
"""
|
| 577 |
+
pip_names = [
|
| 578 |
+
"pip",
|
| 579 |
+
f"pip{sys.version_info.major}",
|
| 580 |
+
f"pip{sys.version_info.major}.{sys.version_info.minor}",
|
| 581 |
+
]
|
| 582 |
+
|
| 583 |
+
# See https://github.com/pypa/pip/issues/1299 for more discussion
|
| 584 |
+
should_show_use_python_msg = (
|
| 585 |
+
modifying_pip and WINDOWS and os.path.basename(sys.argv[0]) in pip_names
|
| 586 |
+
)
|
| 587 |
+
|
| 588 |
+
if should_show_use_python_msg:
|
| 589 |
+
new_command = [sys.executable, "-m", "pip"] + sys.argv[1:]
|
| 590 |
+
raise CommandError(
|
| 591 |
+
"To modify pip, please run the following command:\n{}".format(
|
| 592 |
+
" ".join(new_command)
|
| 593 |
+
)
|
| 594 |
+
)
|
| 595 |
+
|
| 596 |
+
|
| 597 |
+
def check_externally_managed() -> None:
|
| 598 |
+
"""Check whether the current environment is externally managed.
|
| 599 |
+
|
| 600 |
+
If the ``EXTERNALLY-MANAGED`` config file is found, the current environment
|
| 601 |
+
is considered externally managed, and an ExternallyManagedEnvironment is
|
| 602 |
+
raised.
|
| 603 |
+
"""
|
| 604 |
+
if running_under_virtualenv():
|
| 605 |
+
return
|
| 606 |
+
marker = os.path.join(sysconfig.get_path("stdlib"), "EXTERNALLY-MANAGED")
|
| 607 |
+
if not os.path.isfile(marker):
|
| 608 |
+
return
|
| 609 |
+
raise ExternallyManagedEnvironment.from_config(marker)
|
| 610 |
+
|
| 611 |
+
|
| 612 |
+
def is_console_interactive() -> bool:
|
| 613 |
+
"""Is this console interactive?"""
|
| 614 |
+
return sys.stdin is not None and sys.stdin.isatty()
|
| 615 |
+
|
| 616 |
+
|
| 617 |
+
def hash_file(path: str, blocksize: int = 1 << 20) -> Tuple[Any, int]:
|
| 618 |
+
"""Return (hash, length) for path using hashlib.sha256()"""
|
| 619 |
+
|
| 620 |
+
h = hashlib.sha256()
|
| 621 |
+
length = 0
|
| 622 |
+
with open(path, "rb") as f:
|
| 623 |
+
for block in read_chunks(f, size=blocksize):
|
| 624 |
+
length += len(block)
|
| 625 |
+
h.update(block)
|
| 626 |
+
return h, length
|
| 627 |
+
|
| 628 |
+
|
| 629 |
+
def pairwise(iterable: Iterable[Any]) -> Iterator[Tuple[Any, Any]]:
|
| 630 |
+
"""
|
| 631 |
+
Return paired elements.
|
| 632 |
+
|
| 633 |
+
For example:
|
| 634 |
+
s -> (s0, s1), (s2, s3), (s4, s5), ...
|
| 635 |
+
"""
|
| 636 |
+
iterable = iter(iterable)
|
| 637 |
+
return zip_longest(iterable, iterable)
|
| 638 |
+
|
| 639 |
+
|
| 640 |
+
def partition(
|
| 641 |
+
pred: Callable[[T], bool], iterable: Iterable[T]
|
| 642 |
+
) -> Tuple[Iterable[T], Iterable[T]]:
|
| 643 |
+
"""
|
| 644 |
+
Use a predicate to partition entries into false entries and true entries,
|
| 645 |
+
like
|
| 646 |
+
|
| 647 |
+
partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9
|
| 648 |
+
"""
|
| 649 |
+
t1, t2 = tee(iterable)
|
| 650 |
+
return filterfalse(pred, t1), filter(pred, t2)
|
| 651 |
+
|
| 652 |
+
|
| 653 |
+
class ConfiguredBuildBackendHookCaller(BuildBackendHookCaller):
|
| 654 |
+
def __init__(
|
| 655 |
+
self,
|
| 656 |
+
config_holder: Any,
|
| 657 |
+
source_dir: str,
|
| 658 |
+
build_backend: str,
|
| 659 |
+
backend_path: Optional[str] = None,
|
| 660 |
+
runner: Optional[Callable[..., None]] = None,
|
| 661 |
+
python_executable: Optional[str] = None,
|
| 662 |
+
):
|
| 663 |
+
super().__init__(
|
| 664 |
+
source_dir, build_backend, backend_path, runner, python_executable
|
| 665 |
+
)
|
| 666 |
+
self.config_holder = config_holder
|
| 667 |
+
|
| 668 |
+
def build_wheel(
|
| 669 |
+
self,
|
| 670 |
+
wheel_directory: str,
|
| 671 |
+
config_settings: Optional[Mapping[str, Any]] = None,
|
| 672 |
+
metadata_directory: Optional[str] = None,
|
| 673 |
+
) -> str:
|
| 674 |
+
cs = self.config_holder.config_settings
|
| 675 |
+
return super().build_wheel(
|
| 676 |
+
wheel_directory, config_settings=cs, metadata_directory=metadata_directory
|
| 677 |
+
)
|
| 678 |
+
|
| 679 |
+
def build_sdist(
|
| 680 |
+
self,
|
| 681 |
+
sdist_directory: str,
|
| 682 |
+
config_settings: Optional[Mapping[str, Any]] = None,
|
| 683 |
+
) -> str:
|
| 684 |
+
cs = self.config_holder.config_settings
|
| 685 |
+
return super().build_sdist(sdist_directory, config_settings=cs)
|
| 686 |
+
|
| 687 |
+
def build_editable(
|
| 688 |
+
self,
|
| 689 |
+
wheel_directory: str,
|
| 690 |
+
config_settings: Optional[Mapping[str, Any]] = None,
|
| 691 |
+
metadata_directory: Optional[str] = None,
|
| 692 |
+
) -> str:
|
| 693 |
+
cs = self.config_holder.config_settings
|
| 694 |
+
return super().build_editable(
|
| 695 |
+
wheel_directory, config_settings=cs, metadata_directory=metadata_directory
|
| 696 |
+
)
|
| 697 |
+
|
| 698 |
+
def get_requires_for_build_wheel(
|
| 699 |
+
self, config_settings: Optional[Mapping[str, Any]] = None
|
| 700 |
+
) -> Sequence[str]:
|
| 701 |
+
cs = self.config_holder.config_settings
|
| 702 |
+
return super().get_requires_for_build_wheel(config_settings=cs)
|
| 703 |
+
|
| 704 |
+
def get_requires_for_build_sdist(
|
| 705 |
+
self, config_settings: Optional[Mapping[str, Any]] = None
|
| 706 |
+
) -> Sequence[str]:
|
| 707 |
+
cs = self.config_holder.config_settings
|
| 708 |
+
return super().get_requires_for_build_sdist(config_settings=cs)
|
| 709 |
+
|
| 710 |
+
def get_requires_for_build_editable(
|
| 711 |
+
self, config_settings: Optional[Mapping[str, Any]] = None
|
| 712 |
+
) -> Sequence[str]:
|
| 713 |
+
cs = self.config_holder.config_settings
|
| 714 |
+
return super().get_requires_for_build_editable(config_settings=cs)
|
| 715 |
+
|
| 716 |
+
def prepare_metadata_for_build_wheel(
|
| 717 |
+
self,
|
| 718 |
+
metadata_directory: str,
|
| 719 |
+
config_settings: Optional[Mapping[str, Any]] = None,
|
| 720 |
+
_allow_fallback: bool = True,
|
| 721 |
+
) -> str:
|
| 722 |
+
cs = self.config_holder.config_settings
|
| 723 |
+
return super().prepare_metadata_for_build_wheel(
|
| 724 |
+
metadata_directory=metadata_directory,
|
| 725 |
+
config_settings=cs,
|
| 726 |
+
_allow_fallback=_allow_fallback,
|
| 727 |
+
)
|
| 728 |
+
|
| 729 |
+
def prepare_metadata_for_build_editable(
|
| 730 |
+
self,
|
| 731 |
+
metadata_directory: str,
|
| 732 |
+
config_settings: Optional[Mapping[str, Any]] = None,
|
| 733 |
+
_allow_fallback: bool = True,
|
| 734 |
+
) -> Optional[str]:
|
| 735 |
+
cs = self.config_holder.config_settings
|
| 736 |
+
return super().prepare_metadata_for_build_editable(
|
| 737 |
+
metadata_directory=metadata_directory,
|
| 738 |
+
config_settings=cs,
|
| 739 |
+
_allow_fallback=_allow_fallback,
|
| 740 |
+
)
|
| 741 |
+
|
| 742 |
+
|
| 743 |
+
def warn_if_run_as_root() -> None:
|
| 744 |
+
"""Output a warning for sudo users on Unix.
|
| 745 |
+
|
| 746 |
+
In a virtual environment, sudo pip still writes to virtualenv.
|
| 747 |
+
On Windows, users may run pip as Administrator without issues.
|
| 748 |
+
This warning only applies to Unix root users outside of virtualenv.
|
| 749 |
+
"""
|
| 750 |
+
if running_under_virtualenv():
|
| 751 |
+
return
|
| 752 |
+
if not hasattr(os, "getuid"):
|
| 753 |
+
return
|
| 754 |
+
# On Windows, there are no "system managed" Python packages. Installing as
|
| 755 |
+
# Administrator via pip is the correct way of updating system environments.
|
| 756 |
+
#
|
| 757 |
+
# We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform
|
| 758 |
+
# checks: https://mypy.readthedocs.io/en/stable/common_issues.html
|
| 759 |
+
if sys.platform == "win32" or sys.platform == "cygwin":
|
| 760 |
+
return
|
| 761 |
+
|
| 762 |
+
if os.getuid() != 0:
|
| 763 |
+
return
|
| 764 |
+
|
| 765 |
+
logger.warning(
|
| 766 |
+
"Running pip as the 'root' user can result in broken permissions and "
|
| 767 |
+
"conflicting behaviour with the system package manager, possibly "
|
| 768 |
+
"rendering your system unusable. "
|
| 769 |
+
"It is recommended to use a virtual environment instead: "
|
| 770 |
+
"https://pip.pypa.io/warnings/venv. "
|
| 771 |
+
"Use the --root-user-action option if you know what you are doing and "
|
| 772 |
+
"want to suppress this warning."
|
| 773 |
+
)
|
llava/lib/python3.10/site-packages/pip/_internal/utils/packaging.py
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import logging
|
| 3 |
+
import re
|
| 4 |
+
from typing import NewType, Optional, Tuple, cast
|
| 5 |
+
|
| 6 |
+
from pip._vendor.packaging import specifiers, version
|
| 7 |
+
from pip._vendor.packaging.requirements import Requirement
|
| 8 |
+
|
| 9 |
+
NormalizedExtra = NewType("NormalizedExtra", str)
|
| 10 |
+
|
| 11 |
+
logger = logging.getLogger(__name__)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
@functools.lru_cache(maxsize=32)
|
| 15 |
+
def check_requires_python(
|
| 16 |
+
requires_python: Optional[str], version_info: Tuple[int, ...]
|
| 17 |
+
) -> bool:
|
| 18 |
+
"""
|
| 19 |
+
Check if the given Python version matches a "Requires-Python" specifier.
|
| 20 |
+
|
| 21 |
+
:param version_info: A 3-tuple of ints representing a Python
|
| 22 |
+
major-minor-micro version to check (e.g. `sys.version_info[:3]`).
|
| 23 |
+
|
| 24 |
+
:return: `True` if the given Python version satisfies the requirement.
|
| 25 |
+
Otherwise, return `False`.
|
| 26 |
+
|
| 27 |
+
:raises InvalidSpecifier: If `requires_python` has an invalid format.
|
| 28 |
+
"""
|
| 29 |
+
if requires_python is None:
|
| 30 |
+
# The package provides no information
|
| 31 |
+
return True
|
| 32 |
+
requires_python_specifier = specifiers.SpecifierSet(requires_python)
|
| 33 |
+
|
| 34 |
+
python_version = version.parse(".".join(map(str, version_info)))
|
| 35 |
+
return python_version in requires_python_specifier
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
@functools.lru_cache(maxsize=2048)
|
| 39 |
+
def get_requirement(req_string: str) -> Requirement:
|
| 40 |
+
"""Construct a packaging.Requirement object with caching"""
|
| 41 |
+
# Parsing requirement strings is expensive, and is also expected to happen
|
| 42 |
+
# with a low diversity of different arguments (at least relative the number
|
| 43 |
+
# constructed). This method adds a cache to requirement object creation to
|
| 44 |
+
# minimize repeated parsing of the same string to construct equivalent
|
| 45 |
+
# Requirement objects.
|
| 46 |
+
return Requirement(req_string)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def safe_extra(extra: str) -> NormalizedExtra:
|
| 50 |
+
"""Convert an arbitrary string to a standard 'extra' name
|
| 51 |
+
|
| 52 |
+
Any runs of non-alphanumeric characters are replaced with a single '_',
|
| 53 |
+
and the result is always lowercased.
|
| 54 |
+
|
| 55 |
+
This function is duplicated from ``pkg_resources``. Note that this is not
|
| 56 |
+
the same to either ``canonicalize_name`` or ``_egg_link_name``.
|
| 57 |
+
"""
|
| 58 |
+
return cast(NormalizedExtra, re.sub("[^A-Za-z0-9.-]+", "_", extra).lower())
|
llava/lib/python3.10/site-packages/pip/_internal/utils/retry.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
from time import perf_counter, sleep
|
| 3 |
+
from typing import Callable, TypeVar
|
| 4 |
+
|
| 5 |
+
from pip._vendor.typing_extensions import ParamSpec
|
| 6 |
+
|
| 7 |
+
T = TypeVar("T")
|
| 8 |
+
P = ParamSpec("P")
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def retry(
|
| 12 |
+
wait: float, stop_after_delay: float
|
| 13 |
+
) -> Callable[[Callable[P, T]], Callable[P, T]]:
|
| 14 |
+
"""Decorator to automatically retry a function on error.
|
| 15 |
+
|
| 16 |
+
If the function raises, the function is recalled with the same arguments
|
| 17 |
+
until it returns or the time limit is reached. When the time limit is
|
| 18 |
+
surpassed, the last exception raised is reraised.
|
| 19 |
+
|
| 20 |
+
:param wait: The time to wait after an error before retrying, in seconds.
|
| 21 |
+
:param stop_after_delay: The time limit after which retries will cease,
|
| 22 |
+
in seconds.
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
def wrapper(func: Callable[P, T]) -> Callable[P, T]:
|
| 26 |
+
|
| 27 |
+
@functools.wraps(func)
|
| 28 |
+
def retry_wrapped(*args: P.args, **kwargs: P.kwargs) -> T:
|
| 29 |
+
# The performance counter is monotonic on all platforms we care
|
| 30 |
+
# about and has much better resolution than time.monotonic().
|
| 31 |
+
start_time = perf_counter()
|
| 32 |
+
while True:
|
| 33 |
+
try:
|
| 34 |
+
return func(*args, **kwargs)
|
| 35 |
+
except Exception:
|
| 36 |
+
if perf_counter() - start_time > stop_after_delay:
|
| 37 |
+
raise
|
| 38 |
+
sleep(wait)
|
| 39 |
+
|
| 40 |
+
return retry_wrapped
|
| 41 |
+
|
| 42 |
+
return wrapper
|
llava/lib/python3.10/site-packages/pip/_internal/utils/subprocess.py
ADDED
|
@@ -0,0 +1,245 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os
|
| 3 |
+
import shlex
|
| 4 |
+
import subprocess
|
| 5 |
+
from typing import Any, Callable, Iterable, List, Literal, Mapping, Optional, Union
|
| 6 |
+
|
| 7 |
+
from pip._vendor.rich.markup import escape
|
| 8 |
+
|
| 9 |
+
from pip._internal.cli.spinners import SpinnerInterface, open_spinner
|
| 10 |
+
from pip._internal.exceptions import InstallationSubprocessError
|
| 11 |
+
from pip._internal.utils.logging import VERBOSE, subprocess_logger
|
| 12 |
+
from pip._internal.utils.misc import HiddenText
|
| 13 |
+
|
| 14 |
+
CommandArgs = List[Union[str, HiddenText]]
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def make_command(*args: Union[str, HiddenText, CommandArgs]) -> CommandArgs:
|
| 18 |
+
"""
|
| 19 |
+
Create a CommandArgs object.
|
| 20 |
+
"""
|
| 21 |
+
command_args: CommandArgs = []
|
| 22 |
+
for arg in args:
|
| 23 |
+
# Check for list instead of CommandArgs since CommandArgs is
|
| 24 |
+
# only known during type-checking.
|
| 25 |
+
if isinstance(arg, list):
|
| 26 |
+
command_args.extend(arg)
|
| 27 |
+
else:
|
| 28 |
+
# Otherwise, arg is str or HiddenText.
|
| 29 |
+
command_args.append(arg)
|
| 30 |
+
|
| 31 |
+
return command_args
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def format_command_args(args: Union[List[str], CommandArgs]) -> str:
|
| 35 |
+
"""
|
| 36 |
+
Format command arguments for display.
|
| 37 |
+
"""
|
| 38 |
+
# For HiddenText arguments, display the redacted form by calling str().
|
| 39 |
+
# Also, we don't apply str() to arguments that aren't HiddenText since
|
| 40 |
+
# this can trigger a UnicodeDecodeError in Python 2 if the argument
|
| 41 |
+
# has type unicode and includes a non-ascii character. (The type
|
| 42 |
+
# checker doesn't ensure the annotations are correct in all cases.)
|
| 43 |
+
return " ".join(
|
| 44 |
+
shlex.quote(str(arg)) if isinstance(arg, HiddenText) else shlex.quote(arg)
|
| 45 |
+
for arg in args
|
| 46 |
+
)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
def reveal_command_args(args: Union[List[str], CommandArgs]) -> List[str]:
|
| 50 |
+
"""
|
| 51 |
+
Return the arguments in their raw, unredacted form.
|
| 52 |
+
"""
|
| 53 |
+
return [arg.secret if isinstance(arg, HiddenText) else arg for arg in args]
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def call_subprocess(
|
| 57 |
+
cmd: Union[List[str], CommandArgs],
|
| 58 |
+
show_stdout: bool = False,
|
| 59 |
+
cwd: Optional[str] = None,
|
| 60 |
+
on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise",
|
| 61 |
+
extra_ok_returncodes: Optional[Iterable[int]] = None,
|
| 62 |
+
extra_environ: Optional[Mapping[str, Any]] = None,
|
| 63 |
+
unset_environ: Optional[Iterable[str]] = None,
|
| 64 |
+
spinner: Optional[SpinnerInterface] = None,
|
| 65 |
+
log_failed_cmd: Optional[bool] = True,
|
| 66 |
+
stdout_only: Optional[bool] = False,
|
| 67 |
+
*,
|
| 68 |
+
command_desc: str,
|
| 69 |
+
) -> str:
|
| 70 |
+
"""
|
| 71 |
+
Args:
|
| 72 |
+
show_stdout: if true, use INFO to log the subprocess's stderr and
|
| 73 |
+
stdout streams. Otherwise, use DEBUG. Defaults to False.
|
| 74 |
+
extra_ok_returncodes: an iterable of integer return codes that are
|
| 75 |
+
acceptable, in addition to 0. Defaults to None, which means [].
|
| 76 |
+
unset_environ: an iterable of environment variable names to unset
|
| 77 |
+
prior to calling subprocess.Popen().
|
| 78 |
+
log_failed_cmd: if false, failed commands are not logged, only raised.
|
| 79 |
+
stdout_only: if true, return only stdout, else return both. When true,
|
| 80 |
+
logging of both stdout and stderr occurs when the subprocess has
|
| 81 |
+
terminated, else logging occurs as subprocess output is produced.
|
| 82 |
+
"""
|
| 83 |
+
if extra_ok_returncodes is None:
|
| 84 |
+
extra_ok_returncodes = []
|
| 85 |
+
if unset_environ is None:
|
| 86 |
+
unset_environ = []
|
| 87 |
+
# Most places in pip use show_stdout=False. What this means is--
|
| 88 |
+
#
|
| 89 |
+
# - We connect the child's output (combined stderr and stdout) to a
|
| 90 |
+
# single pipe, which we read.
|
| 91 |
+
# - We log this output to stderr at DEBUG level as it is received.
|
| 92 |
+
# - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't
|
| 93 |
+
# requested), then we show a spinner so the user can still see the
|
| 94 |
+
# subprocess is in progress.
|
| 95 |
+
# - If the subprocess exits with an error, we log the output to stderr
|
| 96 |
+
# at ERROR level if it hasn't already been displayed to the console
|
| 97 |
+
# (e.g. if --verbose logging wasn't enabled). This way we don't log
|
| 98 |
+
# the output to the console twice.
|
| 99 |
+
#
|
| 100 |
+
# If show_stdout=True, then the above is still done, but with DEBUG
|
| 101 |
+
# replaced by INFO.
|
| 102 |
+
if show_stdout:
|
| 103 |
+
# Then log the subprocess output at INFO level.
|
| 104 |
+
log_subprocess: Callable[..., None] = subprocess_logger.info
|
| 105 |
+
used_level = logging.INFO
|
| 106 |
+
else:
|
| 107 |
+
# Then log the subprocess output using VERBOSE. This also ensures
|
| 108 |
+
# it will be logged to the log file (aka user_log), if enabled.
|
| 109 |
+
log_subprocess = subprocess_logger.verbose
|
| 110 |
+
used_level = VERBOSE
|
| 111 |
+
|
| 112 |
+
# Whether the subprocess will be visible in the console.
|
| 113 |
+
showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level
|
| 114 |
+
|
| 115 |
+
# Only use the spinner if we're not showing the subprocess output
|
| 116 |
+
# and we have a spinner.
|
| 117 |
+
use_spinner = not showing_subprocess and spinner is not None
|
| 118 |
+
|
| 119 |
+
log_subprocess("Running command %s", command_desc)
|
| 120 |
+
env = os.environ.copy()
|
| 121 |
+
if extra_environ:
|
| 122 |
+
env.update(extra_environ)
|
| 123 |
+
for name in unset_environ:
|
| 124 |
+
env.pop(name, None)
|
| 125 |
+
try:
|
| 126 |
+
proc = subprocess.Popen(
|
| 127 |
+
# Convert HiddenText objects to the underlying str.
|
| 128 |
+
reveal_command_args(cmd),
|
| 129 |
+
stdin=subprocess.PIPE,
|
| 130 |
+
stdout=subprocess.PIPE,
|
| 131 |
+
stderr=subprocess.STDOUT if not stdout_only else subprocess.PIPE,
|
| 132 |
+
cwd=cwd,
|
| 133 |
+
env=env,
|
| 134 |
+
errors="backslashreplace",
|
| 135 |
+
)
|
| 136 |
+
except Exception as exc:
|
| 137 |
+
if log_failed_cmd:
|
| 138 |
+
subprocess_logger.critical(
|
| 139 |
+
"Error %s while executing command %s",
|
| 140 |
+
exc,
|
| 141 |
+
command_desc,
|
| 142 |
+
)
|
| 143 |
+
raise
|
| 144 |
+
all_output = []
|
| 145 |
+
if not stdout_only:
|
| 146 |
+
assert proc.stdout
|
| 147 |
+
assert proc.stdin
|
| 148 |
+
proc.stdin.close()
|
| 149 |
+
# In this mode, stdout and stderr are in the same pipe.
|
| 150 |
+
while True:
|
| 151 |
+
line: str = proc.stdout.readline()
|
| 152 |
+
if not line:
|
| 153 |
+
break
|
| 154 |
+
line = line.rstrip()
|
| 155 |
+
all_output.append(line + "\n")
|
| 156 |
+
|
| 157 |
+
# Show the line immediately.
|
| 158 |
+
log_subprocess(line)
|
| 159 |
+
# Update the spinner.
|
| 160 |
+
if use_spinner:
|
| 161 |
+
assert spinner
|
| 162 |
+
spinner.spin()
|
| 163 |
+
try:
|
| 164 |
+
proc.wait()
|
| 165 |
+
finally:
|
| 166 |
+
if proc.stdout:
|
| 167 |
+
proc.stdout.close()
|
| 168 |
+
output = "".join(all_output)
|
| 169 |
+
else:
|
| 170 |
+
# In this mode, stdout and stderr are in different pipes.
|
| 171 |
+
# We must use communicate() which is the only safe way to read both.
|
| 172 |
+
out, err = proc.communicate()
|
| 173 |
+
# log line by line to preserve pip log indenting
|
| 174 |
+
for out_line in out.splitlines():
|
| 175 |
+
log_subprocess(out_line)
|
| 176 |
+
all_output.append(out)
|
| 177 |
+
for err_line in err.splitlines():
|
| 178 |
+
log_subprocess(err_line)
|
| 179 |
+
all_output.append(err)
|
| 180 |
+
output = out
|
| 181 |
+
|
| 182 |
+
proc_had_error = proc.returncode and proc.returncode not in extra_ok_returncodes
|
| 183 |
+
if use_spinner:
|
| 184 |
+
assert spinner
|
| 185 |
+
if proc_had_error:
|
| 186 |
+
spinner.finish("error")
|
| 187 |
+
else:
|
| 188 |
+
spinner.finish("done")
|
| 189 |
+
if proc_had_error:
|
| 190 |
+
if on_returncode == "raise":
|
| 191 |
+
error = InstallationSubprocessError(
|
| 192 |
+
command_description=command_desc,
|
| 193 |
+
exit_code=proc.returncode,
|
| 194 |
+
output_lines=all_output if not showing_subprocess else None,
|
| 195 |
+
)
|
| 196 |
+
if log_failed_cmd:
|
| 197 |
+
subprocess_logger.error("%s", error, extra={"rich": True})
|
| 198 |
+
subprocess_logger.verbose(
|
| 199 |
+
"[bold magenta]full command[/]: [blue]%s[/]",
|
| 200 |
+
escape(format_command_args(cmd)),
|
| 201 |
+
extra={"markup": True},
|
| 202 |
+
)
|
| 203 |
+
subprocess_logger.verbose(
|
| 204 |
+
"[bold magenta]cwd[/]: %s",
|
| 205 |
+
escape(cwd or "[inherit]"),
|
| 206 |
+
extra={"markup": True},
|
| 207 |
+
)
|
| 208 |
+
|
| 209 |
+
raise error
|
| 210 |
+
elif on_returncode == "warn":
|
| 211 |
+
subprocess_logger.warning(
|
| 212 |
+
'Command "%s" had error code %s in %s',
|
| 213 |
+
command_desc,
|
| 214 |
+
proc.returncode,
|
| 215 |
+
cwd,
|
| 216 |
+
)
|
| 217 |
+
elif on_returncode == "ignore":
|
| 218 |
+
pass
|
| 219 |
+
else:
|
| 220 |
+
raise ValueError(f"Invalid value: on_returncode={on_returncode!r}")
|
| 221 |
+
return output
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
def runner_with_spinner_message(message: str) -> Callable[..., None]:
|
| 225 |
+
"""Provide a subprocess_runner that shows a spinner message.
|
| 226 |
+
|
| 227 |
+
Intended for use with for BuildBackendHookCaller. Thus, the runner has
|
| 228 |
+
an API that matches what's expected by BuildBackendHookCaller.subprocess_runner.
|
| 229 |
+
"""
|
| 230 |
+
|
| 231 |
+
def runner(
|
| 232 |
+
cmd: List[str],
|
| 233 |
+
cwd: Optional[str] = None,
|
| 234 |
+
extra_environ: Optional[Mapping[str, Any]] = None,
|
| 235 |
+
) -> None:
|
| 236 |
+
with open_spinner(message) as spinner:
|
| 237 |
+
call_subprocess(
|
| 238 |
+
cmd,
|
| 239 |
+
command_desc=message,
|
| 240 |
+
cwd=cwd,
|
| 241 |
+
extra_environ=extra_environ,
|
| 242 |
+
spinner=spinner,
|
| 243 |
+
)
|
| 244 |
+
|
| 245 |
+
return runner
|
llava/lib/python3.10/site-packages/pip/_internal/utils/temp_dir.py
ADDED
|
@@ -0,0 +1,296 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import errno
|
| 2 |
+
import itertools
|
| 3 |
+
import logging
|
| 4 |
+
import os.path
|
| 5 |
+
import tempfile
|
| 6 |
+
import traceback
|
| 7 |
+
from contextlib import ExitStack, contextmanager
|
| 8 |
+
from pathlib import Path
|
| 9 |
+
from typing import (
|
| 10 |
+
Any,
|
| 11 |
+
Callable,
|
| 12 |
+
Dict,
|
| 13 |
+
Generator,
|
| 14 |
+
List,
|
| 15 |
+
Optional,
|
| 16 |
+
TypeVar,
|
| 17 |
+
Union,
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
from pip._internal.utils.misc import enum, rmtree
|
| 21 |
+
|
| 22 |
+
logger = logging.getLogger(__name__)
|
| 23 |
+
|
| 24 |
+
_T = TypeVar("_T", bound="TempDirectory")
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
# Kinds of temporary directories. Only needed for ones that are
|
| 28 |
+
# globally-managed.
|
| 29 |
+
tempdir_kinds = enum(
|
| 30 |
+
BUILD_ENV="build-env",
|
| 31 |
+
EPHEM_WHEEL_CACHE="ephem-wheel-cache",
|
| 32 |
+
REQ_BUILD="req-build",
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
_tempdir_manager: Optional[ExitStack] = None
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
@contextmanager
|
| 40 |
+
def global_tempdir_manager() -> Generator[None, None, None]:
|
| 41 |
+
global _tempdir_manager
|
| 42 |
+
with ExitStack() as stack:
|
| 43 |
+
old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack
|
| 44 |
+
try:
|
| 45 |
+
yield
|
| 46 |
+
finally:
|
| 47 |
+
_tempdir_manager = old_tempdir_manager
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class TempDirectoryTypeRegistry:
|
| 51 |
+
"""Manages temp directory behavior"""
|
| 52 |
+
|
| 53 |
+
def __init__(self) -> None:
|
| 54 |
+
self._should_delete: Dict[str, bool] = {}
|
| 55 |
+
|
| 56 |
+
def set_delete(self, kind: str, value: bool) -> None:
|
| 57 |
+
"""Indicate whether a TempDirectory of the given kind should be
|
| 58 |
+
auto-deleted.
|
| 59 |
+
"""
|
| 60 |
+
self._should_delete[kind] = value
|
| 61 |
+
|
| 62 |
+
def get_delete(self, kind: str) -> bool:
|
| 63 |
+
"""Get configured auto-delete flag for a given TempDirectory type,
|
| 64 |
+
default True.
|
| 65 |
+
"""
|
| 66 |
+
return self._should_delete.get(kind, True)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
_tempdir_registry: Optional[TempDirectoryTypeRegistry] = None
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
@contextmanager
|
| 73 |
+
def tempdir_registry() -> Generator[TempDirectoryTypeRegistry, None, None]:
|
| 74 |
+
"""Provides a scoped global tempdir registry that can be used to dictate
|
| 75 |
+
whether directories should be deleted.
|
| 76 |
+
"""
|
| 77 |
+
global _tempdir_registry
|
| 78 |
+
old_tempdir_registry = _tempdir_registry
|
| 79 |
+
_tempdir_registry = TempDirectoryTypeRegistry()
|
| 80 |
+
try:
|
| 81 |
+
yield _tempdir_registry
|
| 82 |
+
finally:
|
| 83 |
+
_tempdir_registry = old_tempdir_registry
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
class _Default:
|
| 87 |
+
pass
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
_default = _Default()
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
class TempDirectory:
|
| 94 |
+
"""Helper class that owns and cleans up a temporary directory.
|
| 95 |
+
|
| 96 |
+
This class can be used as a context manager or as an OO representation of a
|
| 97 |
+
temporary directory.
|
| 98 |
+
|
| 99 |
+
Attributes:
|
| 100 |
+
path
|
| 101 |
+
Location to the created temporary directory
|
| 102 |
+
delete
|
| 103 |
+
Whether the directory should be deleted when exiting
|
| 104 |
+
(when used as a contextmanager)
|
| 105 |
+
|
| 106 |
+
Methods:
|
| 107 |
+
cleanup()
|
| 108 |
+
Deletes the temporary directory
|
| 109 |
+
|
| 110 |
+
When used as a context manager, if the delete attribute is True, on
|
| 111 |
+
exiting the context the temporary directory is deleted.
|
| 112 |
+
"""
|
| 113 |
+
|
| 114 |
+
def __init__(
|
| 115 |
+
self,
|
| 116 |
+
path: Optional[str] = None,
|
| 117 |
+
delete: Union[bool, None, _Default] = _default,
|
| 118 |
+
kind: str = "temp",
|
| 119 |
+
globally_managed: bool = False,
|
| 120 |
+
ignore_cleanup_errors: bool = True,
|
| 121 |
+
):
|
| 122 |
+
super().__init__()
|
| 123 |
+
|
| 124 |
+
if delete is _default:
|
| 125 |
+
if path is not None:
|
| 126 |
+
# If we were given an explicit directory, resolve delete option
|
| 127 |
+
# now.
|
| 128 |
+
delete = False
|
| 129 |
+
else:
|
| 130 |
+
# Otherwise, we wait until cleanup and see what
|
| 131 |
+
# tempdir_registry says.
|
| 132 |
+
delete = None
|
| 133 |
+
|
| 134 |
+
# The only time we specify path is in for editables where it
|
| 135 |
+
# is the value of the --src option.
|
| 136 |
+
if path is None:
|
| 137 |
+
path = self._create(kind)
|
| 138 |
+
|
| 139 |
+
self._path = path
|
| 140 |
+
self._deleted = False
|
| 141 |
+
self.delete = delete
|
| 142 |
+
self.kind = kind
|
| 143 |
+
self.ignore_cleanup_errors = ignore_cleanup_errors
|
| 144 |
+
|
| 145 |
+
if globally_managed:
|
| 146 |
+
assert _tempdir_manager is not None
|
| 147 |
+
_tempdir_manager.enter_context(self)
|
| 148 |
+
|
| 149 |
+
@property
|
| 150 |
+
def path(self) -> str:
|
| 151 |
+
assert not self._deleted, f"Attempted to access deleted path: {self._path}"
|
| 152 |
+
return self._path
|
| 153 |
+
|
| 154 |
+
def __repr__(self) -> str:
|
| 155 |
+
return f"<{self.__class__.__name__} {self.path!r}>"
|
| 156 |
+
|
| 157 |
+
def __enter__(self: _T) -> _T:
|
| 158 |
+
return self
|
| 159 |
+
|
| 160 |
+
def __exit__(self, exc: Any, value: Any, tb: Any) -> None:
|
| 161 |
+
if self.delete is not None:
|
| 162 |
+
delete = self.delete
|
| 163 |
+
elif _tempdir_registry:
|
| 164 |
+
delete = _tempdir_registry.get_delete(self.kind)
|
| 165 |
+
else:
|
| 166 |
+
delete = True
|
| 167 |
+
|
| 168 |
+
if delete:
|
| 169 |
+
self.cleanup()
|
| 170 |
+
|
| 171 |
+
def _create(self, kind: str) -> str:
|
| 172 |
+
"""Create a temporary directory and store its path in self.path"""
|
| 173 |
+
# We realpath here because some systems have their default tmpdir
|
| 174 |
+
# symlinked to another directory. This tends to confuse build
|
| 175 |
+
# scripts, so we canonicalize the path by traversing potential
|
| 176 |
+
# symlinks here.
|
| 177 |
+
path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-"))
|
| 178 |
+
logger.debug("Created temporary directory: %s", path)
|
| 179 |
+
return path
|
| 180 |
+
|
| 181 |
+
def cleanup(self) -> None:
|
| 182 |
+
"""Remove the temporary directory created and reset state"""
|
| 183 |
+
self._deleted = True
|
| 184 |
+
if not os.path.exists(self._path):
|
| 185 |
+
return
|
| 186 |
+
|
| 187 |
+
errors: List[BaseException] = []
|
| 188 |
+
|
| 189 |
+
def onerror(
|
| 190 |
+
func: Callable[..., Any],
|
| 191 |
+
path: Path,
|
| 192 |
+
exc_val: BaseException,
|
| 193 |
+
) -> None:
|
| 194 |
+
"""Log a warning for a `rmtree` error and continue"""
|
| 195 |
+
formatted_exc = "\n".join(
|
| 196 |
+
traceback.format_exception_only(type(exc_val), exc_val)
|
| 197 |
+
)
|
| 198 |
+
formatted_exc = formatted_exc.rstrip() # remove trailing new line
|
| 199 |
+
if func in (os.unlink, os.remove, os.rmdir):
|
| 200 |
+
logger.debug(
|
| 201 |
+
"Failed to remove a temporary file '%s' due to %s.\n",
|
| 202 |
+
path,
|
| 203 |
+
formatted_exc,
|
| 204 |
+
)
|
| 205 |
+
else:
|
| 206 |
+
logger.debug("%s failed with %s.", func.__qualname__, formatted_exc)
|
| 207 |
+
errors.append(exc_val)
|
| 208 |
+
|
| 209 |
+
if self.ignore_cleanup_errors:
|
| 210 |
+
try:
|
| 211 |
+
# first try with @retry; retrying to handle ephemeral errors
|
| 212 |
+
rmtree(self._path, ignore_errors=False)
|
| 213 |
+
except OSError:
|
| 214 |
+
# last pass ignore/log all errors
|
| 215 |
+
rmtree(self._path, onexc=onerror)
|
| 216 |
+
if errors:
|
| 217 |
+
logger.warning(
|
| 218 |
+
"Failed to remove contents in a temporary directory '%s'.\n"
|
| 219 |
+
"You can safely remove it manually.",
|
| 220 |
+
self._path,
|
| 221 |
+
)
|
| 222 |
+
else:
|
| 223 |
+
rmtree(self._path)
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
class AdjacentTempDirectory(TempDirectory):
|
| 227 |
+
"""Helper class that creates a temporary directory adjacent to a real one.
|
| 228 |
+
|
| 229 |
+
Attributes:
|
| 230 |
+
original
|
| 231 |
+
The original directory to create a temp directory for.
|
| 232 |
+
path
|
| 233 |
+
After calling create() or entering, contains the full
|
| 234 |
+
path to the temporary directory.
|
| 235 |
+
delete
|
| 236 |
+
Whether the directory should be deleted when exiting
|
| 237 |
+
(when used as a contextmanager)
|
| 238 |
+
|
| 239 |
+
"""
|
| 240 |
+
|
| 241 |
+
# The characters that may be used to name the temp directory
|
| 242 |
+
# We always prepend a ~ and then rotate through these until
|
| 243 |
+
# a usable name is found.
|
| 244 |
+
# pkg_resources raises a different error for .dist-info folder
|
| 245 |
+
# with leading '-' and invalid metadata
|
| 246 |
+
LEADING_CHARS = "-~.=%0123456789"
|
| 247 |
+
|
| 248 |
+
def __init__(self, original: str, delete: Optional[bool] = None) -> None:
|
| 249 |
+
self.original = original.rstrip("/\\")
|
| 250 |
+
super().__init__(delete=delete)
|
| 251 |
+
|
| 252 |
+
@classmethod
|
| 253 |
+
def _generate_names(cls, name: str) -> Generator[str, None, None]:
|
| 254 |
+
"""Generates a series of temporary names.
|
| 255 |
+
|
| 256 |
+
The algorithm replaces the leading characters in the name
|
| 257 |
+
with ones that are valid filesystem characters, but are not
|
| 258 |
+
valid package names (for both Python and pip definitions of
|
| 259 |
+
package).
|
| 260 |
+
"""
|
| 261 |
+
for i in range(1, len(name)):
|
| 262 |
+
for candidate in itertools.combinations_with_replacement(
|
| 263 |
+
cls.LEADING_CHARS, i - 1
|
| 264 |
+
):
|
| 265 |
+
new_name = "~" + "".join(candidate) + name[i:]
|
| 266 |
+
if new_name != name:
|
| 267 |
+
yield new_name
|
| 268 |
+
|
| 269 |
+
# If we make it this far, we will have to make a longer name
|
| 270 |
+
for i in range(len(cls.LEADING_CHARS)):
|
| 271 |
+
for candidate in itertools.combinations_with_replacement(
|
| 272 |
+
cls.LEADING_CHARS, i
|
| 273 |
+
):
|
| 274 |
+
new_name = "~" + "".join(candidate) + name
|
| 275 |
+
if new_name != name:
|
| 276 |
+
yield new_name
|
| 277 |
+
|
| 278 |
+
def _create(self, kind: str) -> str:
|
| 279 |
+
root, name = os.path.split(self.original)
|
| 280 |
+
for candidate in self._generate_names(name):
|
| 281 |
+
path = os.path.join(root, candidate)
|
| 282 |
+
try:
|
| 283 |
+
os.mkdir(path)
|
| 284 |
+
except OSError as ex:
|
| 285 |
+
# Continue if the name exists already
|
| 286 |
+
if ex.errno != errno.EEXIST:
|
| 287 |
+
raise
|
| 288 |
+
else:
|
| 289 |
+
path = os.path.realpath(path)
|
| 290 |
+
break
|
| 291 |
+
else:
|
| 292 |
+
# Final fallback on the default behavior.
|
| 293 |
+
path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-"))
|
| 294 |
+
|
| 295 |
+
logger.debug("Created temporary directory: %s", path)
|
| 296 |
+
return path
|
llava/lib/python3.10/site-packages/pip/_internal/utils/unpacking.py
ADDED
|
@@ -0,0 +1,337 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Utilities related archives.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import logging
|
| 5 |
+
import os
|
| 6 |
+
import shutil
|
| 7 |
+
import stat
|
| 8 |
+
import sys
|
| 9 |
+
import tarfile
|
| 10 |
+
import zipfile
|
| 11 |
+
from typing import Iterable, List, Optional
|
| 12 |
+
from zipfile import ZipInfo
|
| 13 |
+
|
| 14 |
+
from pip._internal.exceptions import InstallationError
|
| 15 |
+
from pip._internal.utils.filetypes import (
|
| 16 |
+
BZ2_EXTENSIONS,
|
| 17 |
+
TAR_EXTENSIONS,
|
| 18 |
+
XZ_EXTENSIONS,
|
| 19 |
+
ZIP_EXTENSIONS,
|
| 20 |
+
)
|
| 21 |
+
from pip._internal.utils.misc import ensure_dir
|
| 22 |
+
|
| 23 |
+
logger = logging.getLogger(__name__)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
|
| 27 |
+
|
| 28 |
+
try:
|
| 29 |
+
import bz2 # noqa
|
| 30 |
+
|
| 31 |
+
SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
|
| 32 |
+
except ImportError:
|
| 33 |
+
logger.debug("bz2 module is not available")
|
| 34 |
+
|
| 35 |
+
try:
|
| 36 |
+
# Only for Python 3.3+
|
| 37 |
+
import lzma # noqa
|
| 38 |
+
|
| 39 |
+
SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
|
| 40 |
+
except ImportError:
|
| 41 |
+
logger.debug("lzma module is not available")
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def current_umask() -> int:
|
| 45 |
+
"""Get the current umask which involves having to set it temporarily."""
|
| 46 |
+
mask = os.umask(0)
|
| 47 |
+
os.umask(mask)
|
| 48 |
+
return mask
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def split_leading_dir(path: str) -> List[str]:
|
| 52 |
+
path = path.lstrip("/").lstrip("\\")
|
| 53 |
+
if "/" in path and (
|
| 54 |
+
("\\" in path and path.find("/") < path.find("\\")) or "\\" not in path
|
| 55 |
+
):
|
| 56 |
+
return path.split("/", 1)
|
| 57 |
+
elif "\\" in path:
|
| 58 |
+
return path.split("\\", 1)
|
| 59 |
+
else:
|
| 60 |
+
return [path, ""]
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def has_leading_dir(paths: Iterable[str]) -> bool:
|
| 64 |
+
"""Returns true if all the paths have the same leading path name
|
| 65 |
+
(i.e., everything is in one subdirectory in an archive)"""
|
| 66 |
+
common_prefix = None
|
| 67 |
+
for path in paths:
|
| 68 |
+
prefix, rest = split_leading_dir(path)
|
| 69 |
+
if not prefix:
|
| 70 |
+
return False
|
| 71 |
+
elif common_prefix is None:
|
| 72 |
+
common_prefix = prefix
|
| 73 |
+
elif prefix != common_prefix:
|
| 74 |
+
return False
|
| 75 |
+
return True
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def is_within_directory(directory: str, target: str) -> bool:
|
| 79 |
+
"""
|
| 80 |
+
Return true if the absolute path of target is within the directory
|
| 81 |
+
"""
|
| 82 |
+
abs_directory = os.path.abspath(directory)
|
| 83 |
+
abs_target = os.path.abspath(target)
|
| 84 |
+
|
| 85 |
+
prefix = os.path.commonprefix([abs_directory, abs_target])
|
| 86 |
+
return prefix == abs_directory
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
def _get_default_mode_plus_executable() -> int:
|
| 90 |
+
return 0o777 & ~current_umask() | 0o111
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def set_extracted_file_to_default_mode_plus_executable(path: str) -> None:
|
| 94 |
+
"""
|
| 95 |
+
Make file present at path have execute for user/group/world
|
| 96 |
+
(chmod +x) is no-op on windows per python docs
|
| 97 |
+
"""
|
| 98 |
+
os.chmod(path, _get_default_mode_plus_executable())
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def zip_item_is_executable(info: ZipInfo) -> bool:
|
| 102 |
+
mode = info.external_attr >> 16
|
| 103 |
+
# if mode and regular file and any execute permissions for
|
| 104 |
+
# user/group/world?
|
| 105 |
+
return bool(mode and stat.S_ISREG(mode) and mode & 0o111)
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def unzip_file(filename: str, location: str, flatten: bool = True) -> None:
|
| 109 |
+
"""
|
| 110 |
+
Unzip the file (with path `filename`) to the destination `location`. All
|
| 111 |
+
files are written based on system defaults and umask (i.e. permissions are
|
| 112 |
+
not preserved), except that regular file members with any execute
|
| 113 |
+
permissions (user, group, or world) have "chmod +x" applied after being
|
| 114 |
+
written. Note that for windows, any execute changes using os.chmod are
|
| 115 |
+
no-ops per the python docs.
|
| 116 |
+
"""
|
| 117 |
+
ensure_dir(location)
|
| 118 |
+
zipfp = open(filename, "rb")
|
| 119 |
+
try:
|
| 120 |
+
zip = zipfile.ZipFile(zipfp, allowZip64=True)
|
| 121 |
+
leading = has_leading_dir(zip.namelist()) and flatten
|
| 122 |
+
for info in zip.infolist():
|
| 123 |
+
name = info.filename
|
| 124 |
+
fn = name
|
| 125 |
+
if leading:
|
| 126 |
+
fn = split_leading_dir(name)[1]
|
| 127 |
+
fn = os.path.join(location, fn)
|
| 128 |
+
dir = os.path.dirname(fn)
|
| 129 |
+
if not is_within_directory(location, fn):
|
| 130 |
+
message = (
|
| 131 |
+
"The zip file ({}) has a file ({}) trying to install "
|
| 132 |
+
"outside target directory ({})"
|
| 133 |
+
)
|
| 134 |
+
raise InstallationError(message.format(filename, fn, location))
|
| 135 |
+
if fn.endswith("/") or fn.endswith("\\"):
|
| 136 |
+
# A directory
|
| 137 |
+
ensure_dir(fn)
|
| 138 |
+
else:
|
| 139 |
+
ensure_dir(dir)
|
| 140 |
+
# Don't use read() to avoid allocating an arbitrarily large
|
| 141 |
+
# chunk of memory for the file's content
|
| 142 |
+
fp = zip.open(name)
|
| 143 |
+
try:
|
| 144 |
+
with open(fn, "wb") as destfp:
|
| 145 |
+
shutil.copyfileobj(fp, destfp)
|
| 146 |
+
finally:
|
| 147 |
+
fp.close()
|
| 148 |
+
if zip_item_is_executable(info):
|
| 149 |
+
set_extracted_file_to_default_mode_plus_executable(fn)
|
| 150 |
+
finally:
|
| 151 |
+
zipfp.close()
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
def untar_file(filename: str, location: str) -> None:
|
| 155 |
+
"""
|
| 156 |
+
Untar the file (with path `filename`) to the destination `location`.
|
| 157 |
+
All files are written based on system defaults and umask (i.e. permissions
|
| 158 |
+
are not preserved), except that regular file members with any execute
|
| 159 |
+
permissions (user, group, or world) have "chmod +x" applied on top of the
|
| 160 |
+
default. Note that for windows, any execute changes using os.chmod are
|
| 161 |
+
no-ops per the python docs.
|
| 162 |
+
"""
|
| 163 |
+
ensure_dir(location)
|
| 164 |
+
if filename.lower().endswith(".gz") or filename.lower().endswith(".tgz"):
|
| 165 |
+
mode = "r:gz"
|
| 166 |
+
elif filename.lower().endswith(BZ2_EXTENSIONS):
|
| 167 |
+
mode = "r:bz2"
|
| 168 |
+
elif filename.lower().endswith(XZ_EXTENSIONS):
|
| 169 |
+
mode = "r:xz"
|
| 170 |
+
elif filename.lower().endswith(".tar"):
|
| 171 |
+
mode = "r"
|
| 172 |
+
else:
|
| 173 |
+
logger.warning(
|
| 174 |
+
"Cannot determine compression type for file %s",
|
| 175 |
+
filename,
|
| 176 |
+
)
|
| 177 |
+
mode = "r:*"
|
| 178 |
+
|
| 179 |
+
tar = tarfile.open(filename, mode, encoding="utf-8") # type: ignore
|
| 180 |
+
try:
|
| 181 |
+
leading = has_leading_dir([member.name for member in tar.getmembers()])
|
| 182 |
+
|
| 183 |
+
# PEP 706 added `tarfile.data_filter`, and made some other changes to
|
| 184 |
+
# Python's tarfile module (see below). The features were backported to
|
| 185 |
+
# security releases.
|
| 186 |
+
try:
|
| 187 |
+
data_filter = tarfile.data_filter
|
| 188 |
+
except AttributeError:
|
| 189 |
+
_untar_without_filter(filename, location, tar, leading)
|
| 190 |
+
else:
|
| 191 |
+
default_mode_plus_executable = _get_default_mode_plus_executable()
|
| 192 |
+
|
| 193 |
+
if leading:
|
| 194 |
+
# Strip the leading directory from all files in the archive,
|
| 195 |
+
# including hardlink targets (which are relative to the
|
| 196 |
+
# unpack location).
|
| 197 |
+
for member in tar.getmembers():
|
| 198 |
+
name_lead, name_rest = split_leading_dir(member.name)
|
| 199 |
+
member.name = name_rest
|
| 200 |
+
if member.islnk():
|
| 201 |
+
lnk_lead, lnk_rest = split_leading_dir(member.linkname)
|
| 202 |
+
if lnk_lead == name_lead:
|
| 203 |
+
member.linkname = lnk_rest
|
| 204 |
+
|
| 205 |
+
def pip_filter(member: tarfile.TarInfo, path: str) -> tarfile.TarInfo:
|
| 206 |
+
orig_mode = member.mode
|
| 207 |
+
try:
|
| 208 |
+
try:
|
| 209 |
+
member = data_filter(member, location)
|
| 210 |
+
except tarfile.LinkOutsideDestinationError:
|
| 211 |
+
if sys.version_info[:3] in {
|
| 212 |
+
(3, 8, 17),
|
| 213 |
+
(3, 9, 17),
|
| 214 |
+
(3, 10, 12),
|
| 215 |
+
(3, 11, 4),
|
| 216 |
+
}:
|
| 217 |
+
# The tarfile filter in specific Python versions
|
| 218 |
+
# raises LinkOutsideDestinationError on valid input
|
| 219 |
+
# (https://github.com/python/cpython/issues/107845)
|
| 220 |
+
# Ignore the error there, but do use the
|
| 221 |
+
# more lax `tar_filter`
|
| 222 |
+
member = tarfile.tar_filter(member, location)
|
| 223 |
+
else:
|
| 224 |
+
raise
|
| 225 |
+
except tarfile.TarError as exc:
|
| 226 |
+
message = "Invalid member in the tar file {}: {}"
|
| 227 |
+
# Filter error messages mention the member name.
|
| 228 |
+
# No need to add it here.
|
| 229 |
+
raise InstallationError(
|
| 230 |
+
message.format(
|
| 231 |
+
filename,
|
| 232 |
+
exc,
|
| 233 |
+
)
|
| 234 |
+
)
|
| 235 |
+
if member.isfile() and orig_mode & 0o111:
|
| 236 |
+
member.mode = default_mode_plus_executable
|
| 237 |
+
else:
|
| 238 |
+
# See PEP 706 note above.
|
| 239 |
+
# The PEP changed this from `int` to `Optional[int]`,
|
| 240 |
+
# where None means "use the default". Mypy doesn't
|
| 241 |
+
# know this yet.
|
| 242 |
+
member.mode = None # type: ignore [assignment]
|
| 243 |
+
return member
|
| 244 |
+
|
| 245 |
+
tar.extractall(location, filter=pip_filter)
|
| 246 |
+
|
| 247 |
+
finally:
|
| 248 |
+
tar.close()
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
def _untar_without_filter(
|
| 252 |
+
filename: str,
|
| 253 |
+
location: str,
|
| 254 |
+
tar: tarfile.TarFile,
|
| 255 |
+
leading: bool,
|
| 256 |
+
) -> None:
|
| 257 |
+
"""Fallback for Python without tarfile.data_filter"""
|
| 258 |
+
for member in tar.getmembers():
|
| 259 |
+
fn = member.name
|
| 260 |
+
if leading:
|
| 261 |
+
fn = split_leading_dir(fn)[1]
|
| 262 |
+
path = os.path.join(location, fn)
|
| 263 |
+
if not is_within_directory(location, path):
|
| 264 |
+
message = (
|
| 265 |
+
"The tar file ({}) has a file ({}) trying to install "
|
| 266 |
+
"outside target directory ({})"
|
| 267 |
+
)
|
| 268 |
+
raise InstallationError(message.format(filename, path, location))
|
| 269 |
+
if member.isdir():
|
| 270 |
+
ensure_dir(path)
|
| 271 |
+
elif member.issym():
|
| 272 |
+
try:
|
| 273 |
+
tar._extract_member(member, path)
|
| 274 |
+
except Exception as exc:
|
| 275 |
+
# Some corrupt tar files seem to produce this
|
| 276 |
+
# (specifically bad symlinks)
|
| 277 |
+
logger.warning(
|
| 278 |
+
"In the tar file %s the member %s is invalid: %s",
|
| 279 |
+
filename,
|
| 280 |
+
member.name,
|
| 281 |
+
exc,
|
| 282 |
+
)
|
| 283 |
+
continue
|
| 284 |
+
else:
|
| 285 |
+
try:
|
| 286 |
+
fp = tar.extractfile(member)
|
| 287 |
+
except (KeyError, AttributeError) as exc:
|
| 288 |
+
# Some corrupt tar files seem to produce this
|
| 289 |
+
# (specifically bad symlinks)
|
| 290 |
+
logger.warning(
|
| 291 |
+
"In the tar file %s the member %s is invalid: %s",
|
| 292 |
+
filename,
|
| 293 |
+
member.name,
|
| 294 |
+
exc,
|
| 295 |
+
)
|
| 296 |
+
continue
|
| 297 |
+
ensure_dir(os.path.dirname(path))
|
| 298 |
+
assert fp is not None
|
| 299 |
+
with open(path, "wb") as destfp:
|
| 300 |
+
shutil.copyfileobj(fp, destfp)
|
| 301 |
+
fp.close()
|
| 302 |
+
# Update the timestamp (useful for cython compiled files)
|
| 303 |
+
tar.utime(member, path)
|
| 304 |
+
# member have any execute permissions for user/group/world?
|
| 305 |
+
if member.mode & 0o111:
|
| 306 |
+
set_extracted_file_to_default_mode_plus_executable(path)
|
| 307 |
+
|
| 308 |
+
|
| 309 |
+
def unpack_file(
|
| 310 |
+
filename: str,
|
| 311 |
+
location: str,
|
| 312 |
+
content_type: Optional[str] = None,
|
| 313 |
+
) -> None:
|
| 314 |
+
filename = os.path.realpath(filename)
|
| 315 |
+
if (
|
| 316 |
+
content_type == "application/zip"
|
| 317 |
+
or filename.lower().endswith(ZIP_EXTENSIONS)
|
| 318 |
+
or zipfile.is_zipfile(filename)
|
| 319 |
+
):
|
| 320 |
+
unzip_file(filename, location, flatten=not filename.endswith(".whl"))
|
| 321 |
+
elif (
|
| 322 |
+
content_type == "application/x-gzip"
|
| 323 |
+
or tarfile.is_tarfile(filename)
|
| 324 |
+
or filename.lower().endswith(TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)
|
| 325 |
+
):
|
| 326 |
+
untar_file(filename, location)
|
| 327 |
+
else:
|
| 328 |
+
# FIXME: handle?
|
| 329 |
+
# FIXME: magic signatures?
|
| 330 |
+
logger.critical(
|
| 331 |
+
"Cannot unpack file %s (downloaded from %s, content-type: %s); "
|
| 332 |
+
"cannot detect archive format",
|
| 333 |
+
filename,
|
| 334 |
+
location,
|
| 335 |
+
content_type,
|
| 336 |
+
)
|
| 337 |
+
raise InstallationError(f"Cannot determine archive format of {location}")
|
llava/lib/python3.10/site-packages/pip/_internal/utils/urls.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import string
|
| 3 |
+
import urllib.parse
|
| 4 |
+
import urllib.request
|
| 5 |
+
|
| 6 |
+
from .compat import WINDOWS
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def path_to_url(path: str) -> str:
|
| 10 |
+
"""
|
| 11 |
+
Convert a path to a file: URL. The path will be made absolute and have
|
| 12 |
+
quoted path parts.
|
| 13 |
+
"""
|
| 14 |
+
path = os.path.normpath(os.path.abspath(path))
|
| 15 |
+
url = urllib.parse.urljoin("file:", urllib.request.pathname2url(path))
|
| 16 |
+
return url
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def url_to_path(url: str) -> str:
|
| 20 |
+
"""
|
| 21 |
+
Convert a file: URL to a path.
|
| 22 |
+
"""
|
| 23 |
+
assert url.startswith(
|
| 24 |
+
"file:"
|
| 25 |
+
), f"You can only turn file: urls into filenames (not {url!r})"
|
| 26 |
+
|
| 27 |
+
_, netloc, path, _, _ = urllib.parse.urlsplit(url)
|
| 28 |
+
|
| 29 |
+
if not netloc or netloc == "localhost":
|
| 30 |
+
# According to RFC 8089, same as empty authority.
|
| 31 |
+
netloc = ""
|
| 32 |
+
elif WINDOWS:
|
| 33 |
+
# If we have a UNC path, prepend UNC share notation.
|
| 34 |
+
netloc = "\\\\" + netloc
|
| 35 |
+
else:
|
| 36 |
+
raise ValueError(
|
| 37 |
+
f"non-local file URIs are not supported on this platform: {url!r}"
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
path = urllib.request.url2pathname(netloc + path)
|
| 41 |
+
|
| 42 |
+
# On Windows, urlsplit parses the path as something like "/C:/Users/foo".
|
| 43 |
+
# This creates issues for path-related functions like io.open(), so we try
|
| 44 |
+
# to detect and strip the leading slash.
|
| 45 |
+
if (
|
| 46 |
+
WINDOWS
|
| 47 |
+
and not netloc # Not UNC.
|
| 48 |
+
and len(path) >= 3
|
| 49 |
+
and path[0] == "/" # Leading slash to strip.
|
| 50 |
+
and path[1] in string.ascii_letters # Drive letter.
|
| 51 |
+
and path[2:4] in (":", ":/") # Colon + end of string, or colon + absolute path.
|
| 52 |
+
):
|
| 53 |
+
path = path[1:]
|
| 54 |
+
|
| 55 |
+
return path
|
llava/lib/python3.10/site-packages/pip/_internal/utils/virtualenv.py
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
import os
|
| 3 |
+
import re
|
| 4 |
+
import site
|
| 5 |
+
import sys
|
| 6 |
+
from typing import List, Optional
|
| 7 |
+
|
| 8 |
+
logger = logging.getLogger(__name__)
|
| 9 |
+
_INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile(
|
| 10 |
+
r"include-system-site-packages\s*=\s*(?P<value>true|false)"
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def _running_under_venv() -> bool:
|
| 15 |
+
"""Checks if sys.base_prefix and sys.prefix match.
|
| 16 |
+
|
| 17 |
+
This handles PEP 405 compliant virtual environments.
|
| 18 |
+
"""
|
| 19 |
+
return sys.prefix != getattr(sys, "base_prefix", sys.prefix)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def _running_under_legacy_virtualenv() -> bool:
|
| 23 |
+
"""Checks if sys.real_prefix is set.
|
| 24 |
+
|
| 25 |
+
This handles virtual environments created with pypa's virtualenv.
|
| 26 |
+
"""
|
| 27 |
+
# pypa/virtualenv case
|
| 28 |
+
return hasattr(sys, "real_prefix")
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def running_under_virtualenv() -> bool:
|
| 32 |
+
"""True if we're running inside a virtual environment, False otherwise."""
|
| 33 |
+
return _running_under_venv() or _running_under_legacy_virtualenv()
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def _get_pyvenv_cfg_lines() -> Optional[List[str]]:
|
| 37 |
+
"""Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines
|
| 38 |
+
|
| 39 |
+
Returns None, if it could not read/access the file.
|
| 40 |
+
"""
|
| 41 |
+
pyvenv_cfg_file = os.path.join(sys.prefix, "pyvenv.cfg")
|
| 42 |
+
try:
|
| 43 |
+
# Although PEP 405 does not specify, the built-in venv module always
|
| 44 |
+
# writes with UTF-8. (pypa/pip#8717)
|
| 45 |
+
with open(pyvenv_cfg_file, encoding="utf-8") as f:
|
| 46 |
+
return f.read().splitlines() # avoids trailing newlines
|
| 47 |
+
except OSError:
|
| 48 |
+
return None
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def _no_global_under_venv() -> bool:
|
| 52 |
+
"""Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion
|
| 53 |
+
|
| 54 |
+
PEP 405 specifies that when system site-packages are not supposed to be
|
| 55 |
+
visible from a virtual environment, `pyvenv.cfg` must contain the following
|
| 56 |
+
line:
|
| 57 |
+
|
| 58 |
+
include-system-site-packages = false
|
| 59 |
+
|
| 60 |
+
Additionally, log a warning if accessing the file fails.
|
| 61 |
+
"""
|
| 62 |
+
cfg_lines = _get_pyvenv_cfg_lines()
|
| 63 |
+
if cfg_lines is None:
|
| 64 |
+
# We're not in a "sane" venv, so assume there is no system
|
| 65 |
+
# site-packages access (since that's PEP 405's default state).
|
| 66 |
+
logger.warning(
|
| 67 |
+
"Could not access 'pyvenv.cfg' despite a virtual environment "
|
| 68 |
+
"being active. Assuming global site-packages is not accessible "
|
| 69 |
+
"in this environment."
|
| 70 |
+
)
|
| 71 |
+
return True
|
| 72 |
+
|
| 73 |
+
for line in cfg_lines:
|
| 74 |
+
match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line)
|
| 75 |
+
if match is not None and match.group("value") == "false":
|
| 76 |
+
return True
|
| 77 |
+
return False
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def _no_global_under_legacy_virtualenv() -> bool:
|
| 81 |
+
"""Check if "no-global-site-packages.txt" exists beside site.py
|
| 82 |
+
|
| 83 |
+
This mirrors logic in pypa/virtualenv for determining whether system
|
| 84 |
+
site-packages are visible in the virtual environment.
|
| 85 |
+
"""
|
| 86 |
+
site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
|
| 87 |
+
no_global_site_packages_file = os.path.join(
|
| 88 |
+
site_mod_dir,
|
| 89 |
+
"no-global-site-packages.txt",
|
| 90 |
+
)
|
| 91 |
+
return os.path.exists(no_global_site_packages_file)
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def virtualenv_no_global() -> bool:
|
| 95 |
+
"""Returns a boolean, whether running in venv with no system site-packages."""
|
| 96 |
+
# PEP 405 compliance needs to be checked first since virtualenv >=20 would
|
| 97 |
+
# return True for both checks, but is only able to use the PEP 405 config.
|
| 98 |
+
if _running_under_venv():
|
| 99 |
+
return _no_global_under_venv()
|
| 100 |
+
|
| 101 |
+
if _running_under_legacy_virtualenv():
|
| 102 |
+
return _no_global_under_legacy_virtualenv()
|
| 103 |
+
|
| 104 |
+
return False
|
llava/lib/python3.10/site-packages/pip/_internal/utils/wheel.py
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Support functions for working with wheel files.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import logging
|
| 5 |
+
from email.message import Message
|
| 6 |
+
from email.parser import Parser
|
| 7 |
+
from typing import Tuple
|
| 8 |
+
from zipfile import BadZipFile, ZipFile
|
| 9 |
+
|
| 10 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 11 |
+
|
| 12 |
+
from pip._internal.exceptions import UnsupportedWheel
|
| 13 |
+
|
| 14 |
+
VERSION_COMPATIBLE = (1, 0)
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
logger = logging.getLogger(__name__)
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def parse_wheel(wheel_zip: ZipFile, name: str) -> Tuple[str, Message]:
|
| 21 |
+
"""Extract information from the provided wheel, ensuring it meets basic
|
| 22 |
+
standards.
|
| 23 |
+
|
| 24 |
+
Returns the name of the .dist-info directory and the parsed WHEEL metadata.
|
| 25 |
+
"""
|
| 26 |
+
try:
|
| 27 |
+
info_dir = wheel_dist_info_dir(wheel_zip, name)
|
| 28 |
+
metadata = wheel_metadata(wheel_zip, info_dir)
|
| 29 |
+
version = wheel_version(metadata)
|
| 30 |
+
except UnsupportedWheel as e:
|
| 31 |
+
raise UnsupportedWheel(f"{name} has an invalid wheel, {e}")
|
| 32 |
+
|
| 33 |
+
check_compatibility(version, name)
|
| 34 |
+
|
| 35 |
+
return info_dir, metadata
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def wheel_dist_info_dir(source: ZipFile, name: str) -> str:
|
| 39 |
+
"""Returns the name of the contained .dist-info directory.
|
| 40 |
+
|
| 41 |
+
Raises AssertionError or UnsupportedWheel if not found, >1 found, or
|
| 42 |
+
it doesn't match the provided name.
|
| 43 |
+
"""
|
| 44 |
+
# Zip file path separators must be /
|
| 45 |
+
subdirs = {p.split("/", 1)[0] for p in source.namelist()}
|
| 46 |
+
|
| 47 |
+
info_dirs = [s for s in subdirs if s.endswith(".dist-info")]
|
| 48 |
+
|
| 49 |
+
if not info_dirs:
|
| 50 |
+
raise UnsupportedWheel(".dist-info directory not found")
|
| 51 |
+
|
| 52 |
+
if len(info_dirs) > 1:
|
| 53 |
+
raise UnsupportedWheel(
|
| 54 |
+
"multiple .dist-info directories found: {}".format(", ".join(info_dirs))
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
info_dir = info_dirs[0]
|
| 58 |
+
|
| 59 |
+
info_dir_name = canonicalize_name(info_dir)
|
| 60 |
+
canonical_name = canonicalize_name(name)
|
| 61 |
+
if not info_dir_name.startswith(canonical_name):
|
| 62 |
+
raise UnsupportedWheel(
|
| 63 |
+
f".dist-info directory {info_dir!r} does not start with {canonical_name!r}"
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
return info_dir
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def read_wheel_metadata_file(source: ZipFile, path: str) -> bytes:
|
| 70 |
+
try:
|
| 71 |
+
return source.read(path)
|
| 72 |
+
# BadZipFile for general corruption, KeyError for missing entry,
|
| 73 |
+
# and RuntimeError for password-protected files
|
| 74 |
+
except (BadZipFile, KeyError, RuntimeError) as e:
|
| 75 |
+
raise UnsupportedWheel(f"could not read {path!r} file: {e!r}")
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def wheel_metadata(source: ZipFile, dist_info_dir: str) -> Message:
|
| 79 |
+
"""Return the WHEEL metadata of an extracted wheel, if possible.
|
| 80 |
+
Otherwise, raise UnsupportedWheel.
|
| 81 |
+
"""
|
| 82 |
+
path = f"{dist_info_dir}/WHEEL"
|
| 83 |
+
# Zip file path separators must be /
|
| 84 |
+
wheel_contents = read_wheel_metadata_file(source, path)
|
| 85 |
+
|
| 86 |
+
try:
|
| 87 |
+
wheel_text = wheel_contents.decode()
|
| 88 |
+
except UnicodeDecodeError as e:
|
| 89 |
+
raise UnsupportedWheel(f"error decoding {path!r}: {e!r}")
|
| 90 |
+
|
| 91 |
+
# FeedParser (used by Parser) does not raise any exceptions. The returned
|
| 92 |
+
# message may have .defects populated, but for backwards-compatibility we
|
| 93 |
+
# currently ignore them.
|
| 94 |
+
return Parser().parsestr(wheel_text)
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def wheel_version(wheel_data: Message) -> Tuple[int, ...]:
|
| 98 |
+
"""Given WHEEL metadata, return the parsed Wheel-Version.
|
| 99 |
+
Otherwise, raise UnsupportedWheel.
|
| 100 |
+
"""
|
| 101 |
+
version_text = wheel_data["Wheel-Version"]
|
| 102 |
+
if version_text is None:
|
| 103 |
+
raise UnsupportedWheel("WHEEL is missing Wheel-Version")
|
| 104 |
+
|
| 105 |
+
version = version_text.strip()
|
| 106 |
+
|
| 107 |
+
try:
|
| 108 |
+
return tuple(map(int, version.split(".")))
|
| 109 |
+
except ValueError:
|
| 110 |
+
raise UnsupportedWheel(f"invalid Wheel-Version: {version!r}")
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def check_compatibility(version: Tuple[int, ...], name: str) -> None:
|
| 114 |
+
"""Raises errors or warns if called with an incompatible Wheel-Version.
|
| 115 |
+
|
| 116 |
+
pip should refuse to install a Wheel-Version that's a major series
|
| 117 |
+
ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
|
| 118 |
+
installing a version only minor version ahead (e.g 1.2 > 1.1).
|
| 119 |
+
|
| 120 |
+
version: a 2-tuple representing a Wheel-Version (Major, Minor)
|
| 121 |
+
name: name of wheel or package to raise exception about
|
| 122 |
+
|
| 123 |
+
:raises UnsupportedWheel: when an incompatible Wheel-Version is given
|
| 124 |
+
"""
|
| 125 |
+
if version[0] > VERSION_COMPATIBLE[0]:
|
| 126 |
+
raise UnsupportedWheel(
|
| 127 |
+
"{}'s Wheel-Version ({}) is not compatible with this version "
|
| 128 |
+
"of pip".format(name, ".".join(map(str, version)))
|
| 129 |
+
)
|
| 130 |
+
elif version > VERSION_COMPATIBLE:
|
| 131 |
+
logger.warning(
|
| 132 |
+
"Installing from a newer Wheel-Version (%s)",
|
| 133 |
+
".".join(map(str, version)),
|
| 134 |
+
)
|
minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/aiosignal/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (1.37 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/async_timeout/__init__.py
ADDED
|
@@ -0,0 +1,276 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import enum
|
| 3 |
+
import sys
|
| 4 |
+
from types import TracebackType
|
| 5 |
+
from typing import Optional, Type, final
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
__version__ = "5.0.1"
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
__all__ = ("timeout", "timeout_at", "Timeout")
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def timeout(delay: Optional[float]) -> "Timeout":
|
| 15 |
+
"""timeout context manager.
|
| 16 |
+
|
| 17 |
+
Useful in cases when you want to apply timeout logic around block
|
| 18 |
+
of code or in cases when asyncio.wait_for is not suitable. For example:
|
| 19 |
+
|
| 20 |
+
>>> async with timeout(0.001):
|
| 21 |
+
... async with aiohttp.get('https://github.com') as r:
|
| 22 |
+
... await r.text()
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
delay - value in seconds or None to disable timeout logic
|
| 26 |
+
"""
|
| 27 |
+
loop = asyncio.get_running_loop()
|
| 28 |
+
if delay is not None:
|
| 29 |
+
deadline = loop.time() + delay # type: Optional[float]
|
| 30 |
+
else:
|
| 31 |
+
deadline = None
|
| 32 |
+
return Timeout(deadline, loop)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def timeout_at(deadline: Optional[float]) -> "Timeout":
|
| 36 |
+
"""Schedule the timeout at absolute time.
|
| 37 |
+
|
| 38 |
+
deadline argument points on the time in the same clock system
|
| 39 |
+
as loop.time().
|
| 40 |
+
|
| 41 |
+
Please note: it is not POSIX time but a time with
|
| 42 |
+
undefined starting base, e.g. the time of the system power on.
|
| 43 |
+
|
| 44 |
+
>>> async with timeout_at(loop.time() + 10):
|
| 45 |
+
... async with aiohttp.get('https://github.com') as r:
|
| 46 |
+
... await r.text()
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
"""
|
| 50 |
+
loop = asyncio.get_running_loop()
|
| 51 |
+
return Timeout(deadline, loop)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class _State(enum.Enum):
|
| 55 |
+
INIT = "INIT"
|
| 56 |
+
ENTER = "ENTER"
|
| 57 |
+
TIMEOUT = "TIMEOUT"
|
| 58 |
+
EXIT = "EXIT"
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
if sys.version_info >= (3, 11):
|
| 62 |
+
|
| 63 |
+
class _Expired:
|
| 64 |
+
__slots__ = ("_val",)
|
| 65 |
+
|
| 66 |
+
def __init__(self, val: bool) -> None:
|
| 67 |
+
self._val = val
|
| 68 |
+
|
| 69 |
+
def __call__(self) -> bool:
|
| 70 |
+
return self._val
|
| 71 |
+
|
| 72 |
+
def __bool__(self) -> bool:
|
| 73 |
+
return self._val
|
| 74 |
+
|
| 75 |
+
def __repr__(self) -> str:
|
| 76 |
+
return repr(self._val)
|
| 77 |
+
|
| 78 |
+
def __str__(self) -> str:
|
| 79 |
+
return str(self._val)
|
| 80 |
+
|
| 81 |
+
@final
|
| 82 |
+
class Timeout(asyncio.Timeout): # type: ignore[misc]
|
| 83 |
+
# Supports full asyncio.Timeout API.
|
| 84 |
+
# Also provides several asyncio_timeout specific methods
|
| 85 |
+
# for backward compatibility.
|
| 86 |
+
def __init__(
|
| 87 |
+
self, deadline: Optional[float], loop: asyncio.AbstractEventLoop
|
| 88 |
+
) -> None:
|
| 89 |
+
super().__init__(deadline)
|
| 90 |
+
|
| 91 |
+
@property
|
| 92 |
+
def expired(self) -> _Expired:
|
| 93 |
+
# a hacky property hat can provide both roles:
|
| 94 |
+
# timeout.expired() from asyncio
|
| 95 |
+
# timeout.expired from asyncio_timeout
|
| 96 |
+
return _Expired(super().expired())
|
| 97 |
+
|
| 98 |
+
@property
|
| 99 |
+
def deadline(self) -> Optional[float]:
|
| 100 |
+
return self.when()
|
| 101 |
+
|
| 102 |
+
def reject(self) -> None:
|
| 103 |
+
"""Reject scheduled timeout if any."""
|
| 104 |
+
# cancel is maybe better name but
|
| 105 |
+
# task.cancel() raises CancelledError in asyncio world.
|
| 106 |
+
self.reschedule(None)
|
| 107 |
+
|
| 108 |
+
def shift(self, delay: float) -> None:
|
| 109 |
+
"""Advance timeout on delay seconds.
|
| 110 |
+
|
| 111 |
+
The delay can be negative.
|
| 112 |
+
|
| 113 |
+
Raise RuntimeError if shift is called when deadline is not scheduled
|
| 114 |
+
"""
|
| 115 |
+
deadline = self.when()
|
| 116 |
+
if deadline is None:
|
| 117 |
+
raise RuntimeError("cannot shift timeout if deadline is not scheduled")
|
| 118 |
+
self.reschedule(deadline + delay)
|
| 119 |
+
|
| 120 |
+
def update(self, deadline: float) -> None:
|
| 121 |
+
"""Set deadline to absolute value.
|
| 122 |
+
|
| 123 |
+
deadline argument points on the time in the same clock system
|
| 124 |
+
as loop.time().
|
| 125 |
+
|
| 126 |
+
If new deadline is in the past the timeout is raised immediately.
|
| 127 |
+
|
| 128 |
+
Please note: it is not POSIX time but a time with
|
| 129 |
+
undefined starting base, e.g. the time of the system power on.
|
| 130 |
+
"""
|
| 131 |
+
self.reschedule(deadline)
|
| 132 |
+
|
| 133 |
+
else:
|
| 134 |
+
|
| 135 |
+
@final
|
| 136 |
+
class Timeout:
|
| 137 |
+
# Internal class, please don't instantiate it directly
|
| 138 |
+
# Use timeout() and timeout_at() public factories instead.
|
| 139 |
+
#
|
| 140 |
+
# Implementation note: `async with timeout()` is preferred
|
| 141 |
+
# over `with timeout()`.
|
| 142 |
+
# While technically the Timeout class implementation
|
| 143 |
+
# doesn't need to be async at all,
|
| 144 |
+
# the `async with` statement explicitly points that
|
| 145 |
+
# the context manager should be used from async function context.
|
| 146 |
+
#
|
| 147 |
+
# This design allows to avoid many silly misusages.
|
| 148 |
+
#
|
| 149 |
+
# TimeoutError is raised immediately when scheduled
|
| 150 |
+
# if the deadline is passed.
|
| 151 |
+
# The purpose is to time out as soon as possible
|
| 152 |
+
# without waiting for the next await expression.
|
| 153 |
+
|
| 154 |
+
__slots__ = ("_deadline", "_loop", "_state", "_timeout_handler", "_task")
|
| 155 |
+
|
| 156 |
+
def __init__(
|
| 157 |
+
self, deadline: Optional[float], loop: asyncio.AbstractEventLoop
|
| 158 |
+
) -> None:
|
| 159 |
+
self._loop = loop
|
| 160 |
+
self._state = _State.INIT
|
| 161 |
+
|
| 162 |
+
self._task: Optional["asyncio.Task[object]"] = None
|
| 163 |
+
self._timeout_handler = None # type: Optional[asyncio.Handle]
|
| 164 |
+
if deadline is None:
|
| 165 |
+
self._deadline = None # type: Optional[float]
|
| 166 |
+
else:
|
| 167 |
+
self.update(deadline)
|
| 168 |
+
|
| 169 |
+
async def __aenter__(self) -> "Timeout":
|
| 170 |
+
self._do_enter()
|
| 171 |
+
return self
|
| 172 |
+
|
| 173 |
+
async def __aexit__(
|
| 174 |
+
self,
|
| 175 |
+
exc_type: Optional[Type[BaseException]],
|
| 176 |
+
exc_val: Optional[BaseException],
|
| 177 |
+
exc_tb: Optional[TracebackType],
|
| 178 |
+
) -> Optional[bool]:
|
| 179 |
+
self._do_exit(exc_type)
|
| 180 |
+
return None
|
| 181 |
+
|
| 182 |
+
@property
|
| 183 |
+
def expired(self) -> bool:
|
| 184 |
+
"""Is timeout expired during execution?"""
|
| 185 |
+
return self._state == _State.TIMEOUT
|
| 186 |
+
|
| 187 |
+
@property
|
| 188 |
+
def deadline(self) -> Optional[float]:
|
| 189 |
+
return self._deadline
|
| 190 |
+
|
| 191 |
+
def reject(self) -> None:
|
| 192 |
+
"""Reject scheduled timeout if any."""
|
| 193 |
+
# cancel is maybe better name but
|
| 194 |
+
# task.cancel() raises CancelledError in asyncio world.
|
| 195 |
+
if self._state not in (_State.INIT, _State.ENTER):
|
| 196 |
+
raise RuntimeError(f"invalid state {self._state.value}")
|
| 197 |
+
self._reject()
|
| 198 |
+
|
| 199 |
+
def _reject(self) -> None:
|
| 200 |
+
self._task = None
|
| 201 |
+
if self._timeout_handler is not None:
|
| 202 |
+
self._timeout_handler.cancel()
|
| 203 |
+
self._timeout_handler = None
|
| 204 |
+
|
| 205 |
+
def shift(self, delay: float) -> None:
|
| 206 |
+
"""Advance timeout on delay seconds.
|
| 207 |
+
|
| 208 |
+
The delay can be negative.
|
| 209 |
+
|
| 210 |
+
Raise RuntimeError if shift is called when deadline is not scheduled
|
| 211 |
+
"""
|
| 212 |
+
deadline = self._deadline
|
| 213 |
+
if deadline is None:
|
| 214 |
+
raise RuntimeError("cannot shift timeout if deadline is not scheduled")
|
| 215 |
+
self.update(deadline + delay)
|
| 216 |
+
|
| 217 |
+
def update(self, deadline: float) -> None:
|
| 218 |
+
"""Set deadline to absolute value.
|
| 219 |
+
|
| 220 |
+
deadline argument points on the time in the same clock system
|
| 221 |
+
as loop.time().
|
| 222 |
+
|
| 223 |
+
If new deadline is in the past the timeout is raised immediately.
|
| 224 |
+
|
| 225 |
+
Please note: it is not POSIX time but a time with
|
| 226 |
+
undefined starting base, e.g. the time of the system power on.
|
| 227 |
+
"""
|
| 228 |
+
if self._state == _State.EXIT:
|
| 229 |
+
raise RuntimeError("cannot reschedule after exit from context manager")
|
| 230 |
+
if self._state == _State.TIMEOUT:
|
| 231 |
+
raise RuntimeError("cannot reschedule expired timeout")
|
| 232 |
+
if self._timeout_handler is not None:
|
| 233 |
+
self._timeout_handler.cancel()
|
| 234 |
+
self._deadline = deadline
|
| 235 |
+
if self._state != _State.INIT:
|
| 236 |
+
self._reschedule()
|
| 237 |
+
|
| 238 |
+
def _reschedule(self) -> None:
|
| 239 |
+
assert self._state == _State.ENTER
|
| 240 |
+
deadline = self._deadline
|
| 241 |
+
if deadline is None:
|
| 242 |
+
return
|
| 243 |
+
|
| 244 |
+
now = self._loop.time()
|
| 245 |
+
if self._timeout_handler is not None:
|
| 246 |
+
self._timeout_handler.cancel()
|
| 247 |
+
|
| 248 |
+
self._task = asyncio.current_task()
|
| 249 |
+
if deadline <= now:
|
| 250 |
+
self._timeout_handler = self._loop.call_soon(self._on_timeout)
|
| 251 |
+
else:
|
| 252 |
+
self._timeout_handler = self._loop.call_at(deadline, self._on_timeout)
|
| 253 |
+
|
| 254 |
+
def _do_enter(self) -> None:
|
| 255 |
+
if self._state != _State.INIT:
|
| 256 |
+
raise RuntimeError(f"invalid state {self._state.value}")
|
| 257 |
+
self._state = _State.ENTER
|
| 258 |
+
self._reschedule()
|
| 259 |
+
|
| 260 |
+
def _do_exit(self, exc_type: Optional[Type[BaseException]]) -> None:
|
| 261 |
+
if exc_type is asyncio.CancelledError and self._state == _State.TIMEOUT:
|
| 262 |
+
assert self._task is not None
|
| 263 |
+
self._timeout_handler = None
|
| 264 |
+
self._task = None
|
| 265 |
+
raise asyncio.TimeoutError
|
| 266 |
+
# timeout has not expired
|
| 267 |
+
self._state = _State.EXIT
|
| 268 |
+
self._reject()
|
| 269 |
+
return None
|
| 270 |
+
|
| 271 |
+
def _on_timeout(self) -> None:
|
| 272 |
+
assert self._task is not None
|
| 273 |
+
self._task.cancel()
|
| 274 |
+
self._state = _State.TIMEOUT
|
| 275 |
+
# drop the reference early
|
| 276 |
+
self._timeout_handler = None
|
minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/async_timeout/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (7.66 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/async_timeout/py.typed
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
Placeholder
|
minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/multidict-6.1.0.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/multidict-6.1.0.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright 2016 Andrew Svetlov and aio-libs contributors
|
| 2 |
+
|
| 3 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
+
you may not use this file except in compliance with the License.
|
| 5 |
+
You may obtain a copy of the License at
|
| 6 |
+
|
| 7 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
+
|
| 9 |
+
Unless required by applicable law or agreed to in writing, software
|
| 10 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
+
See the License for the specific language governing permissions and
|
| 13 |
+
limitations under the License.
|
minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/multidict-6.1.0.dist-info/METADATA
ADDED
|
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: multidict
|
| 3 |
+
Version: 6.1.0
|
| 4 |
+
Summary: multidict implementation
|
| 5 |
+
Home-page: https://github.com/aio-libs/multidict
|
| 6 |
+
Author: Andrew Svetlov
|
| 7 |
+
Author-email: andrew.svetlov@gmail.com
|
| 8 |
+
License: Apache 2
|
| 9 |
+
Project-URL: Chat: Matrix, https://matrix.to/#/#aio-libs:matrix.org
|
| 10 |
+
Project-URL: Chat: Matrix Space, https://matrix.to/#/#aio-libs-space:matrix.org
|
| 11 |
+
Project-URL: CI: GitHub, https://github.com/aio-libs/multidict/actions
|
| 12 |
+
Project-URL: Code of Conduct, https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md
|
| 13 |
+
Project-URL: Coverage: codecov, https://codecov.io/github/aio-libs/multidict
|
| 14 |
+
Project-URL: Docs: Changelog, https://multidict.aio-libs.org/en/latest/changes/
|
| 15 |
+
Project-URL: Docs: RTD, https://multidict.aio-libs.org
|
| 16 |
+
Project-URL: GitHub: issues, https://github.com/aio-libs/multidict/issues
|
| 17 |
+
Project-URL: GitHub: repo, https://github.com/aio-libs/multidict
|
| 18 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 19 |
+
Classifier: Intended Audience :: Developers
|
| 20 |
+
Classifier: License :: OSI Approved :: Apache Software License
|
| 21 |
+
Classifier: Programming Language :: Python
|
| 22 |
+
Classifier: Programming Language :: Python :: 3
|
| 23 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 24 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 25 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 26 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 27 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 28 |
+
Classifier: Programming Language :: Python :: 3.13
|
| 29 |
+
Requires-Python: >=3.8
|
| 30 |
+
Description-Content-Type: text/x-rst
|
| 31 |
+
License-File: LICENSE
|
| 32 |
+
Requires-Dist: typing-extensions >=4.1.0 ; python_version < "3.11"
|
| 33 |
+
|
| 34 |
+
=========
|
| 35 |
+
multidict
|
| 36 |
+
=========
|
| 37 |
+
|
| 38 |
+
.. image:: https://github.com/aio-libs/multidict/actions/workflows/ci-cd.yml/badge.svg
|
| 39 |
+
:target: https://github.com/aio-libs/multidict/actions
|
| 40 |
+
:alt: GitHub status for master branch
|
| 41 |
+
|
| 42 |
+
.. image:: https://codecov.io/gh/aio-libs/multidict/branch/master/graph/badge.svg
|
| 43 |
+
:target: https://codecov.io/gh/aio-libs/multidict
|
| 44 |
+
:alt: Coverage metrics
|
| 45 |
+
|
| 46 |
+
.. image:: https://img.shields.io/pypi/v/multidict.svg
|
| 47 |
+
:target: https://pypi.org/project/multidict
|
| 48 |
+
:alt: PyPI
|
| 49 |
+
|
| 50 |
+
.. image:: https://readthedocs.org/projects/multidict/badge/?version=latest
|
| 51 |
+
:target: https://multidict.aio-libs.org
|
| 52 |
+
:alt: Read The Docs build status badge
|
| 53 |
+
|
| 54 |
+
.. image:: https://img.shields.io/pypi/pyversions/multidict.svg
|
| 55 |
+
:target: https://pypi.org/project/multidict
|
| 56 |
+
:alt: Python versions
|
| 57 |
+
|
| 58 |
+
.. image:: https://img.shields.io/matrix/aio-libs:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
|
| 59 |
+
:target: https://matrix.to/#/%23aio-libs:matrix.org
|
| 60 |
+
:alt: Matrix Room — #aio-libs:matrix.org
|
| 61 |
+
|
| 62 |
+
.. image:: https://img.shields.io/matrix/aio-libs-space:matrix.org?label=Discuss%20on%20Matrix%20at%20%23aio-libs-space%3Amatrix.org&logo=matrix&server_fqdn=matrix.org&style=flat
|
| 63 |
+
:target: https://matrix.to/#/%23aio-libs-space:matrix.org
|
| 64 |
+
:alt: Matrix Space — #aio-libs-space:matrix.org
|
| 65 |
+
|
| 66 |
+
Multidict is dict-like collection of *key-value pairs* where key
|
| 67 |
+
might occur more than once in the container.
|
| 68 |
+
|
| 69 |
+
Introduction
|
| 70 |
+
------------
|
| 71 |
+
|
| 72 |
+
*HTTP Headers* and *URL query string* require specific data structure:
|
| 73 |
+
*multidict*. It behaves mostly like a regular ``dict`` but it may have
|
| 74 |
+
several *values* for the same *key* and *preserves insertion ordering*.
|
| 75 |
+
|
| 76 |
+
The *key* is ``str`` (or ``istr`` for case-insensitive dictionaries).
|
| 77 |
+
|
| 78 |
+
``multidict`` has four multidict classes:
|
| 79 |
+
``MultiDict``, ``MultiDictProxy``, ``CIMultiDict``
|
| 80 |
+
and ``CIMultiDictProxy``.
|
| 81 |
+
|
| 82 |
+
Immutable proxies (``MultiDictProxy`` and
|
| 83 |
+
``CIMultiDictProxy``) provide a dynamic view for the
|
| 84 |
+
proxied multidict, the view reflects underlying collection changes. They
|
| 85 |
+
implement the ``collections.abc.Mapping`` interface.
|
| 86 |
+
|
| 87 |
+
Regular mutable (``MultiDict`` and ``CIMultiDict``) classes
|
| 88 |
+
implement ``collections.abc.MutableMapping`` and allows them to change
|
| 89 |
+
their own content.
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
*Case insensitive* (``CIMultiDict`` and
|
| 93 |
+
``CIMultiDictProxy``) assume the *keys* are case
|
| 94 |
+
insensitive, e.g.::
|
| 95 |
+
|
| 96 |
+
>>> dct = CIMultiDict(key='val')
|
| 97 |
+
>>> 'Key' in dct
|
| 98 |
+
True
|
| 99 |
+
>>> dct['Key']
|
| 100 |
+
'val'
|
| 101 |
+
|
| 102 |
+
*Keys* should be ``str`` or ``istr`` instances.
|
| 103 |
+
|
| 104 |
+
The library has optional C Extensions for speed.
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
License
|
| 108 |
+
-------
|
| 109 |
+
|
| 110 |
+
Apache 2
|
| 111 |
+
|
| 112 |
+
Library Installation
|
| 113 |
+
--------------------
|
| 114 |
+
|
| 115 |
+
.. code-block:: bash
|
| 116 |
+
|
| 117 |
+
$ pip install multidict
|
| 118 |
+
|
| 119 |
+
The library is Python 3 only!
|
| 120 |
+
|
| 121 |
+
PyPI contains binary wheels for Linux, Windows and MacOS. If you want to install
|
| 122 |
+
``multidict`` on another operating system (or *Alpine Linux* inside a Docker) the
|
| 123 |
+
tarball will be used to compile the library from source. It requires a C compiler and
|
| 124 |
+
Python headers to be installed.
|
| 125 |
+
|
| 126 |
+
To skip the compilation, please use the `MULTIDICT_NO_EXTENSIONS` environment variable,
|
| 127 |
+
e.g.:
|
| 128 |
+
|
| 129 |
+
.. code-block:: bash
|
| 130 |
+
|
| 131 |
+
$ MULTIDICT_NO_EXTENSIONS=1 pip install multidict
|
| 132 |
+
|
| 133 |
+
Please note, the pure Python (uncompiled) version is about 20-50 times slower depending on
|
| 134 |
+
the usage scenario!!!
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
Changelog
|
| 139 |
+
---------
|
| 140 |
+
See `RTD page <http://multidict.aio-libs.org/en/latest/changes>`_.
|
minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/multidict-6.1.0.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: setuptools (74.1.2)
|
| 3 |
+
Root-Is-Purelib: false
|
| 4 |
+
Tag: cp310-cp310-manylinux_2_17_x86_64
|
| 5 |
+
Tag: cp310-cp310-manylinux2014_x86_64
|
| 6 |
+
|
minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/multidict-6.1.0.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
multidict
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_convolution_mode_ops.h
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API _convolution_mode {
|
| 18 |
+
using schema = at::Tensor (const at::Tensor &, const at::Tensor &, const ::std::optional<at::Tensor> &, c10::SymIntArrayRef, c10::string_view, c10::SymIntArrayRef, c10::SymInt);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_convolution_mode")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_convolution_mode(Tensor input, Tensor weight, Tensor? bias, SymInt[] stride, str padding, SymInt[] dilation, SymInt groups) -> Tensor")
|
| 24 |
+
static at::Tensor call(const at::Tensor & input, const at::Tensor & weight, const ::std::optional<at::Tensor> & bias, c10::SymIntArrayRef stride, c10::string_view padding, c10::SymIntArrayRef dilation, c10::SymInt groups);
|
| 25 |
+
static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & input, const at::Tensor & weight, const ::std::optional<at::Tensor> & bias, c10::SymIntArrayRef stride, c10::string_view padding, c10::SymIntArrayRef dilation, c10::SymInt groups);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
}} // namespace at::_ops
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_ctc_loss.h
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <c10/util/Optional.h>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/_ctc_loss_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::_ctc_loss(Tensor log_probs, Tensor targets, int[] input_lengths, int[] target_lengths, int blank=0, bool zero_infinity=False) -> (Tensor, Tensor)
|
| 26 |
+
inline ::std::tuple<at::Tensor,at::Tensor> _ctc_loss(const at::Tensor & log_probs, const at::Tensor & targets, at::IntArrayRef input_lengths, at::IntArrayRef target_lengths, int64_t blank=0, bool zero_infinity=false) {
|
| 27 |
+
return at::_ops::_ctc_loss::call(log_probs, targets, input_lengths, target_lengths, blank, zero_infinity);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
// aten::_ctc_loss.Tensor(Tensor log_probs, Tensor targets, Tensor input_lengths, Tensor target_lengths, int blank=0, bool zero_infinity=False) -> (Tensor, Tensor)
|
| 31 |
+
inline ::std::tuple<at::Tensor,at::Tensor> _ctc_loss(const at::Tensor & log_probs, const at::Tensor & targets, const at::Tensor & input_lengths, const at::Tensor & target_lengths, int64_t blank=0, bool zero_infinity=false) {
|
| 32 |
+
return at::_ops::_ctc_loss_Tensor::call(log_probs, targets, input_lengths, target_lengths, blank, zero_infinity);
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
// aten::_ctc_loss.out(Tensor log_probs, Tensor targets, int[] input_lengths, int[] target_lengths, int blank=0, bool zero_infinity=False, *, Tensor(a!) out0, Tensor(b!) out1) -> (Tensor(a!), Tensor(b!))
|
| 36 |
+
inline ::std::tuple<at::Tensor &,at::Tensor &> _ctc_loss_out(at::Tensor & out0, at::Tensor & out1, const at::Tensor & log_probs, const at::Tensor & targets, at::IntArrayRef input_lengths, at::IntArrayRef target_lengths, int64_t blank=0, bool zero_infinity=false) {
|
| 37 |
+
return at::_ops::_ctc_loss_out::call(log_probs, targets, input_lengths, target_lengths, blank, zero_infinity, out0, out1);
|
| 38 |
+
}
|
| 39 |
+
// aten::_ctc_loss.out(Tensor log_probs, Tensor targets, int[] input_lengths, int[] target_lengths, int blank=0, bool zero_infinity=False, *, Tensor(a!) out0, Tensor(b!) out1) -> (Tensor(a!), Tensor(b!))
|
| 40 |
+
inline ::std::tuple<at::Tensor &,at::Tensor &> _ctc_loss_outf(const at::Tensor & log_probs, const at::Tensor & targets, at::IntArrayRef input_lengths, at::IntArrayRef target_lengths, int64_t blank, bool zero_infinity, at::Tensor & out0, at::Tensor & out1) {
|
| 41 |
+
return at::_ops::_ctc_loss_out::call(log_probs, targets, input_lengths, target_lengths, blank, zero_infinity, out0, out1);
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
// aten::_ctc_loss.Tensor_out(Tensor log_probs, Tensor targets, Tensor input_lengths, Tensor target_lengths, int blank=0, bool zero_infinity=False, *, Tensor(a!) out0, Tensor(b!) out1) -> (Tensor(a!), Tensor(b!))
|
| 45 |
+
inline ::std::tuple<at::Tensor &,at::Tensor &> _ctc_loss_out(at::Tensor & out0, at::Tensor & out1, const at::Tensor & log_probs, const at::Tensor & targets, const at::Tensor & input_lengths, const at::Tensor & target_lengths, int64_t blank=0, bool zero_infinity=false) {
|
| 46 |
+
return at::_ops::_ctc_loss_Tensor_out::call(log_probs, targets, input_lengths, target_lengths, blank, zero_infinity, out0, out1);
|
| 47 |
+
}
|
| 48 |
+
// aten::_ctc_loss.Tensor_out(Tensor log_probs, Tensor targets, Tensor input_lengths, Tensor target_lengths, int blank=0, bool zero_infinity=False, *, Tensor(a!) out0, Tensor(b!) out1) -> (Tensor(a!), Tensor(b!))
|
| 49 |
+
inline ::std::tuple<at::Tensor &,at::Tensor &> _ctc_loss_outf(const at::Tensor & log_probs, const at::Tensor & targets, const at::Tensor & input_lengths, const at::Tensor & target_lengths, int64_t blank, bool zero_infinity, at::Tensor & out0, at::Tensor & out1) {
|
| 50 |
+
return at::_ops::_ctc_loss_Tensor_out::call(log_probs, targets, input_lengths, target_lengths, blank, zero_infinity, out0, out1);
|
| 51 |
+
}
|
| 52 |
+
|
| 53 |
+
}
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_empty_affine_quantized.h
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <c10/util/Optional.h>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/_empty_affine_quantized_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::_empty_affine_quantized(SymInt[] size, *, ScalarType? dtype=None, Layout? layout=None, Device? device=None, bool? pin_memory=None, float scale=1, int zero_point=0, MemoryFormat? memory_format=contiguous_format) -> Tensor
|
| 26 |
+
inline at::Tensor _empty_affine_quantized(at::IntArrayRef size, at::TensorOptions options={}, double scale=1, int64_t zero_point=0, ::std::optional<at::MemoryFormat> memory_format=MemoryFormat::Contiguous) {
|
| 27 |
+
return at::_ops::_empty_affine_quantized::call(c10::fromIntArrayRefSlow(size), c10::optTypeMetaToScalarType(options.dtype_opt()), options.layout_opt(), options.device_opt(), options.pinned_memory_opt(), scale, zero_point, c10::impl::check_tensor_options_and_extract_memory_format(options, memory_format));
|
| 28 |
+
}
|
| 29 |
+
namespace symint {
|
| 30 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, int64_t>::value>>
|
| 31 |
+
at::Tensor _empty_affine_quantized(at::IntArrayRef size, at::TensorOptions options={}, double scale=1, int64_t zero_point=0, ::std::optional<at::MemoryFormat> memory_format=MemoryFormat::Contiguous) {
|
| 32 |
+
return at::_ops::_empty_affine_quantized::call(c10::fromIntArrayRefSlow(size), c10::optTypeMetaToScalarType(options.dtype_opt()), options.layout_opt(), options.device_opt(), options.pinned_memory_opt(), scale, zero_point, c10::impl::check_tensor_options_and_extract_memory_format(options, memory_format));
|
| 33 |
+
}
|
| 34 |
+
}
|
| 35 |
+
|
| 36 |
+
// aten::_empty_affine_quantized(SymInt[] size, *, ScalarType? dtype=None, Layout? layout=None, Device? device=None, bool? pin_memory=None, float scale=1, int zero_point=0, MemoryFormat? memory_format=contiguous_format) -> Tensor
|
| 37 |
+
inline at::Tensor _empty_affine_quantized(at::IntArrayRef size, ::std::optional<at::ScalarType> dtype, ::std::optional<at::Layout> layout, ::std::optional<at::Device> device, ::std::optional<bool> pin_memory, double scale, int64_t zero_point, ::std::optional<at::MemoryFormat> memory_format) {
|
| 38 |
+
return at::_ops::_empty_affine_quantized::call(c10::fromIntArrayRefSlow(size), dtype, layout, device, pin_memory, scale, zero_point, memory_format);
|
| 39 |
+
}
|
| 40 |
+
namespace symint {
|
| 41 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, int64_t>::value>>
|
| 42 |
+
at::Tensor _empty_affine_quantized(at::IntArrayRef size, ::std::optional<at::ScalarType> dtype, ::std::optional<at::Layout> layout, ::std::optional<at::Device> device, ::std::optional<bool> pin_memory, double scale, int64_t zero_point, ::std::optional<at::MemoryFormat> memory_format) {
|
| 43 |
+
return at::_ops::_empty_affine_quantized::call(c10::fromIntArrayRefSlow(size), dtype, layout, device, pin_memory, scale, zero_point, memory_format);
|
| 44 |
+
}
|
| 45 |
+
}
|
| 46 |
+
|
| 47 |
+
// aten::_empty_affine_quantized(SymInt[] size, *, ScalarType? dtype=None, Layout? layout=None, Device? device=None, bool? pin_memory=None, float scale=1, int zero_point=0, MemoryFormat? memory_format=contiguous_format) -> Tensor
|
| 48 |
+
inline at::Tensor _empty_affine_quantized_symint(c10::SymIntArrayRef size, at::TensorOptions options={}, double scale=1, int64_t zero_point=0, ::std::optional<at::MemoryFormat> memory_format=MemoryFormat::Contiguous) {
|
| 49 |
+
return at::_ops::_empty_affine_quantized::call(size, c10::optTypeMetaToScalarType(options.dtype_opt()), options.layout_opt(), options.device_opt(), options.pinned_memory_opt(), scale, zero_point, c10::impl::check_tensor_options_and_extract_memory_format(options, memory_format));
|
| 50 |
+
}
|
| 51 |
+
namespace symint {
|
| 52 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, c10::SymInt>::value>>
|
| 53 |
+
at::Tensor _empty_affine_quantized(c10::SymIntArrayRef size, at::TensorOptions options={}, double scale=1, int64_t zero_point=0, ::std::optional<at::MemoryFormat> memory_format=MemoryFormat::Contiguous) {
|
| 54 |
+
return at::_ops::_empty_affine_quantized::call(size, c10::optTypeMetaToScalarType(options.dtype_opt()), options.layout_opt(), options.device_opt(), options.pinned_memory_opt(), scale, zero_point, c10::impl::check_tensor_options_and_extract_memory_format(options, memory_format));
|
| 55 |
+
}
|
| 56 |
+
}
|
| 57 |
+
|
| 58 |
+
// aten::_empty_affine_quantized(SymInt[] size, *, ScalarType? dtype=None, Layout? layout=None, Device? device=None, bool? pin_memory=None, float scale=1, int zero_point=0, MemoryFormat? memory_format=contiguous_format) -> Tensor
|
| 59 |
+
inline at::Tensor _empty_affine_quantized_symint(c10::SymIntArrayRef size, ::std::optional<at::ScalarType> dtype, ::std::optional<at::Layout> layout, ::std::optional<at::Device> device, ::std::optional<bool> pin_memory, double scale, int64_t zero_point, ::std::optional<at::MemoryFormat> memory_format) {
|
| 60 |
+
return at::_ops::_empty_affine_quantized::call(size, dtype, layout, device, pin_memory, scale, zero_point, memory_format);
|
| 61 |
+
}
|
| 62 |
+
namespace symint {
|
| 63 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, c10::SymInt>::value>>
|
| 64 |
+
at::Tensor _empty_affine_quantized(c10::SymIntArrayRef size, ::std::optional<at::ScalarType> dtype, ::std::optional<at::Layout> layout, ::std::optional<at::Device> device, ::std::optional<bool> pin_memory, double scale, int64_t zero_point, ::std::optional<at::MemoryFormat> memory_format) {
|
| 65 |
+
return at::_ops::_empty_affine_quantized::call(size, dtype, layout, device, pin_memory, scale, zero_point, memory_format);
|
| 66 |
+
}
|
| 67 |
+
}
|
| 68 |
+
|
| 69 |
+
// aten::_empty_affine_quantized.out(SymInt[] size, *, float scale=1, int zero_point=0, MemoryFormat? memory_format=contiguous_format, Tensor(a!) out) -> Tensor(a!)
|
| 70 |
+
inline at::Tensor & _empty_affine_quantized_out(at::Tensor & out, at::IntArrayRef size, double scale=1, int64_t zero_point=0, ::std::optional<at::MemoryFormat> memory_format=MemoryFormat::Contiguous) {
|
| 71 |
+
return at::_ops::_empty_affine_quantized_out::call(c10::fromIntArrayRefSlow(size), scale, zero_point, memory_format, out);
|
| 72 |
+
}
|
| 73 |
+
namespace symint {
|
| 74 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, int64_t>::value>>
|
| 75 |
+
at::Tensor & _empty_affine_quantized_out(at::Tensor & out, at::IntArrayRef size, double scale=1, int64_t zero_point=0, ::std::optional<at::MemoryFormat> memory_format=MemoryFormat::Contiguous) {
|
| 76 |
+
return at::_ops::_empty_affine_quantized_out::call(c10::fromIntArrayRefSlow(size), scale, zero_point, memory_format, out);
|
| 77 |
+
}
|
| 78 |
+
}
|
| 79 |
+
|
| 80 |
+
// aten::_empty_affine_quantized.out(SymInt[] size, *, float scale=1, int zero_point=0, MemoryFormat? memory_format=contiguous_format, Tensor(a!) out) -> Tensor(a!)
|
| 81 |
+
inline at::Tensor & _empty_affine_quantized_outf(at::IntArrayRef size, double scale, int64_t zero_point, ::std::optional<at::MemoryFormat> memory_format, at::Tensor & out) {
|
| 82 |
+
return at::_ops::_empty_affine_quantized_out::call(c10::fromIntArrayRefSlow(size), scale, zero_point, memory_format, out);
|
| 83 |
+
}
|
| 84 |
+
namespace symint {
|
| 85 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, int64_t>::value>>
|
| 86 |
+
at::Tensor & _empty_affine_quantized_outf(at::IntArrayRef size, double scale, int64_t zero_point, ::std::optional<at::MemoryFormat> memory_format, at::Tensor & out) {
|
| 87 |
+
return at::_ops::_empty_affine_quantized_out::call(c10::fromIntArrayRefSlow(size), scale, zero_point, memory_format, out);
|
| 88 |
+
}
|
| 89 |
+
}
|
| 90 |
+
|
| 91 |
+
// aten::_empty_affine_quantized.out(SymInt[] size, *, float scale=1, int zero_point=0, MemoryFormat? memory_format=contiguous_format, Tensor(a!) out) -> Tensor(a!)
|
| 92 |
+
inline at::Tensor & _empty_affine_quantized_symint_out(at::Tensor & out, c10::SymIntArrayRef size, double scale=1, int64_t zero_point=0, ::std::optional<at::MemoryFormat> memory_format=MemoryFormat::Contiguous) {
|
| 93 |
+
return at::_ops::_empty_affine_quantized_out::call(size, scale, zero_point, memory_format, out);
|
| 94 |
+
}
|
| 95 |
+
namespace symint {
|
| 96 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, c10::SymInt>::value>>
|
| 97 |
+
at::Tensor & _empty_affine_quantized_out(at::Tensor & out, c10::SymIntArrayRef size, double scale=1, int64_t zero_point=0, ::std::optional<at::MemoryFormat> memory_format=MemoryFormat::Contiguous) {
|
| 98 |
+
return at::_ops::_empty_affine_quantized_out::call(size, scale, zero_point, memory_format, out);
|
| 99 |
+
}
|
| 100 |
+
}
|
| 101 |
+
|
| 102 |
+
// aten::_empty_affine_quantized.out(SymInt[] size, *, float scale=1, int zero_point=0, MemoryFormat? memory_format=contiguous_format, Tensor(a!) out) -> Tensor(a!)
|
| 103 |
+
inline at::Tensor & _empty_affine_quantized_symint_outf(c10::SymIntArrayRef size, double scale, int64_t zero_point, ::std::optional<at::MemoryFormat> memory_format, at::Tensor & out) {
|
| 104 |
+
return at::_ops::_empty_affine_quantized_out::call(size, scale, zero_point, memory_format, out);
|
| 105 |
+
}
|
| 106 |
+
namespace symint {
|
| 107 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, c10::SymInt>::value>>
|
| 108 |
+
at::Tensor & _empty_affine_quantized_outf(c10::SymIntArrayRef size, double scale, int64_t zero_point, ::std::optional<at::MemoryFormat> memory_format, at::Tensor & out) {
|
| 109 |
+
return at::_ops::_empty_affine_quantized_out::call(size, scale, zero_point, memory_format, out);
|
| 110 |
+
}
|
| 111 |
+
}
|
| 112 |
+
|
| 113 |
+
}
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_addcmul_compositeexplicitautograd_dispatch.h
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeexplicitautograd {
|
| 19 |
+
|
| 20 |
+
TORCH_API ::std::vector<at::Tensor> _foreach_addcmul(at::TensorList self, at::TensorList tensor1, at::TensorList tensor2, const at::Scalar & value=1);
|
| 21 |
+
TORCH_API void _foreach_addcmul_out(at::TensorList out, at::TensorList self, at::TensorList tensor1, at::TensorList tensor2, const at::Scalar & value=1);
|
| 22 |
+
TORCH_API void _foreach_addcmul_outf(at::TensorList self, at::TensorList tensor1, at::TensorList tensor2, const at::Scalar & value, at::TensorList out);
|
| 23 |
+
TORCH_API void _foreach_addcmul_(at::TensorList self, at::TensorList tensor1, at::TensorList tensor2, const at::Scalar & value=1);
|
| 24 |
+
TORCH_API ::std::vector<at::Tensor> _foreach_addcmul(at::TensorList self, at::TensorList tensor1, at::TensorList tensor2, at::ArrayRef<at::Scalar> scalars);
|
| 25 |
+
TORCH_API void _foreach_addcmul_out(at::TensorList out, at::TensorList self, at::TensorList tensor1, at::TensorList tensor2, at::ArrayRef<at::Scalar> scalars);
|
| 26 |
+
TORCH_API void _foreach_addcmul_outf(at::TensorList self, at::TensorList tensor1, at::TensorList tensor2, at::ArrayRef<at::Scalar> scalars, at::TensorList out);
|
| 27 |
+
TORCH_API void _foreach_addcmul_(at::TensorList self, at::TensorList tensor1, at::TensorList tensor2, at::ArrayRef<at::Scalar> scalars);
|
| 28 |
+
TORCH_API ::std::vector<at::Tensor> _foreach_addcmul(at::TensorList self, at::TensorList tensor1, at::TensorList tensor2, const at::Tensor & scalars);
|
| 29 |
+
TORCH_API void _foreach_addcmul_out(at::TensorList out, at::TensorList self, at::TensorList tensor1, at::TensorList tensor2, const at::Tensor & scalars);
|
| 30 |
+
TORCH_API void _foreach_addcmul_outf(at::TensorList self, at::TensorList tensor1, at::TensorList tensor2, const at::Tensor & scalars, at::TensorList out);
|
| 31 |
+
TORCH_API void _foreach_addcmul_(at::TensorList self, at::TensorList tensor1, at::TensorList tensor2, const at::Tensor & scalars);
|
| 32 |
+
|
| 33 |
+
} // namespace compositeexplicitautograd
|
| 34 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_foreach_norm_ops.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API _foreach_norm_Scalar {
|
| 18 |
+
using schema = ::std::vector<at::Tensor> (at::TensorList, const at::Scalar &, ::std::optional<at::ScalarType>);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_foreach_norm")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Scalar")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_foreach_norm.Scalar(Tensor[] self, Scalar ord=2, ScalarType? dtype=None) -> Tensor[]")
|
| 24 |
+
static ::std::vector<at::Tensor> call(at::TensorList self, const at::Scalar & ord, ::std::optional<at::ScalarType> dtype);
|
| 25 |
+
static ::std::vector<at::Tensor> redispatch(c10::DispatchKeySet dispatchKeySet, at::TensorList self, const at::Scalar & ord, ::std::optional<at::ScalarType> dtype);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
struct TORCH_API _foreach_norm_Scalar_out {
|
| 29 |
+
using schema = void (at::TensorList, const at::Scalar &, ::std::optional<at::ScalarType>, at::TensorList);
|
| 30 |
+
using ptr_schema = schema*;
|
| 31 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 32 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_foreach_norm")
|
| 33 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "Scalar_out")
|
| 34 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_foreach_norm.Scalar_out(Tensor[] self, Scalar ord=2, ScalarType? dtype=None, *, Tensor(a!)[] out) -> ()")
|
| 35 |
+
static void call(at::TensorList self, const at::Scalar & ord, ::std::optional<at::ScalarType> dtype, at::TensorList out);
|
| 36 |
+
static void redispatch(c10::DispatchKeySet dispatchKeySet, at::TensorList self, const at::Scalar & ord, ::std::optional<at::ScalarType> dtype, at::TensorList out);
|
| 37 |
+
};
|
| 38 |
+
|
| 39 |
+
}} // namespace at::_ops
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_int_mm_native.h
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <c10/util/Optional.h>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API at::Tensor _int_mm_cpu(const at::Tensor & self, const at::Tensor & mat2);
|
| 20 |
+
TORCH_API at::Tensor & _int_mm_out_cpu(const at::Tensor & self, const at::Tensor & mat2, at::Tensor & out);
|
| 21 |
+
TORCH_API at::Tensor _int_mm_cuda(const at::Tensor & self, const at::Tensor & mat2);
|
| 22 |
+
TORCH_API at::Tensor & _int_mm_out_cuda(const at::Tensor & self, const at::Tensor & mat2, at::Tensor & out);
|
| 23 |
+
} // namespace native
|
| 24 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_is_all_true_ops.h
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API _is_all_true {
|
| 18 |
+
using schema = at::Tensor (const at::Tensor &);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_is_all_true")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_is_all_true(Tensor self) -> Tensor")
|
| 24 |
+
static at::Tensor call(const at::Tensor & self);
|
| 25 |
+
static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
}} // namespace at::_ops
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/_sparse_mask_projection.h
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <c10/util/Optional.h>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/_sparse_mask_projection_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::_sparse_mask_projection.out(Tensor self, Tensor mask, bool accumulate_matches=False, *, Tensor(a!) out) -> Tensor(a!)
|
| 26 |
+
inline at::Tensor & _sparse_mask_projection_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & mask, bool accumulate_matches=false) {
|
| 27 |
+
return at::_ops::_sparse_mask_projection_out::call(self, mask, accumulate_matches, out);
|
| 28 |
+
}
|
| 29 |
+
// aten::_sparse_mask_projection.out(Tensor self, Tensor mask, bool accumulate_matches=False, *, Tensor(a!) out) -> Tensor(a!)
|
| 30 |
+
inline at::Tensor & _sparse_mask_projection_outf(const at::Tensor & self, const at::Tensor & mask, bool accumulate_matches, at::Tensor & out) {
|
| 31 |
+
return at::_ops::_sparse_mask_projection_out::call(self, mask, accumulate_matches, out);
|
| 32 |
+
}
|
| 33 |
+
|
| 34 |
+
}
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/amin_native.h
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <c10/util/Optional.h>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
#include <ATen/ops/amin_meta.h>
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
struct TORCH_API structured_amin_out : public at::meta::structured_amin {
|
| 20 |
+
void impl(const at::Tensor & self, at::IntArrayRef dim, bool keepdim, const at::Tensor & out);
|
| 21 |
+
};
|
| 22 |
+
} // namespace native
|
| 23 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/atan2_cuda_dispatch.h
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace cuda {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor atan2(const at::Tensor & self, const at::Tensor & other);
|
| 21 |
+
TORCH_API at::Tensor & atan2_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & other);
|
| 22 |
+
TORCH_API at::Tensor & atan2_outf(const at::Tensor & self, const at::Tensor & other, at::Tensor & out);
|
| 23 |
+
TORCH_API at::Tensor & atan2_(at::Tensor & self, const at::Tensor & other);
|
| 24 |
+
|
| 25 |
+
} // namespace cuda
|
| 26 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/col_indices_copy.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <c10/util/Optional.h>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/col_indices_copy_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::col_indices_copy(Tensor self) -> Tensor
|
| 26 |
+
inline at::Tensor col_indices_copy(const at::Tensor & self) {
|
| 27 |
+
return at::_ops::col_indices_copy::call(self);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
// aten::col_indices_copy.out(Tensor self, *, Tensor(a!) out) -> Tensor(a!)
|
| 31 |
+
inline at::Tensor & col_indices_copy_out(at::Tensor & out, const at::Tensor & self) {
|
| 32 |
+
return at::_ops::col_indices_copy_out::call(self, out);
|
| 33 |
+
}
|
| 34 |
+
// aten::col_indices_copy.out(Tensor self, *, Tensor(a!) out) -> Tensor(a!)
|
| 35 |
+
inline at::Tensor & col_indices_copy_outf(const at::Tensor & self, at::Tensor & out) {
|
| 36 |
+
return at::_ops::col_indices_copy_out::call(self, out);
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
}
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/combinations_native.h
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <c10/util/Optional.h>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API at::Tensor combinations(const at::Tensor & self, int64_t r=2, bool with_replacement=false);
|
| 20 |
+
} // namespace native
|
| 21 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/conv_depthwise3d.h
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <c10/util/Optional.h>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/conv_depthwise3d_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::conv_depthwise3d(Tensor self, Tensor weight, SymInt[3] kernel_size, Tensor? bias, SymInt[3] stride, SymInt[3] padding, SymInt[3] dilation) -> Tensor
|
| 26 |
+
inline at::Tensor conv_depthwise3d(const at::Tensor & self, const at::Tensor & weight, at::IntArrayRef kernel_size, const ::std::optional<at::Tensor> & bias, at::IntArrayRef stride, at::IntArrayRef padding, at::IntArrayRef dilation) {
|
| 27 |
+
return at::_ops::conv_depthwise3d::call(self, weight, c10::fromIntArrayRefSlow(kernel_size), bias, c10::fromIntArrayRefSlow(stride), c10::fromIntArrayRefSlow(padding), c10::fromIntArrayRefSlow(dilation));
|
| 28 |
+
}
|
| 29 |
+
namespace symint {
|
| 30 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, int64_t>::value>>
|
| 31 |
+
at::Tensor conv_depthwise3d(const at::Tensor & self, const at::Tensor & weight, at::IntArrayRef kernel_size, const ::std::optional<at::Tensor> & bias, at::IntArrayRef stride, at::IntArrayRef padding, at::IntArrayRef dilation) {
|
| 32 |
+
return at::_ops::conv_depthwise3d::call(self, weight, c10::fromIntArrayRefSlow(kernel_size), bias, c10::fromIntArrayRefSlow(stride), c10::fromIntArrayRefSlow(padding), c10::fromIntArrayRefSlow(dilation));
|
| 33 |
+
}
|
| 34 |
+
}
|
| 35 |
+
|
| 36 |
+
// aten::conv_depthwise3d(Tensor self, Tensor weight, SymInt[3] kernel_size, Tensor? bias, SymInt[3] stride, SymInt[3] padding, SymInt[3] dilation) -> Tensor
|
| 37 |
+
inline at::Tensor conv_depthwise3d_symint(const at::Tensor & self, const at::Tensor & weight, c10::SymIntArrayRef kernel_size, const ::std::optional<at::Tensor> & bias, c10::SymIntArrayRef stride, c10::SymIntArrayRef padding, c10::SymIntArrayRef dilation) {
|
| 38 |
+
return at::_ops::conv_depthwise3d::call(self, weight, kernel_size, bias, stride, padding, dilation);
|
| 39 |
+
}
|
| 40 |
+
namespace symint {
|
| 41 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, c10::SymInt>::value>>
|
| 42 |
+
at::Tensor conv_depthwise3d(const at::Tensor & self, const at::Tensor & weight, c10::SymIntArrayRef kernel_size, const ::std::optional<at::Tensor> & bias, c10::SymIntArrayRef stride, c10::SymIntArrayRef padding, c10::SymIntArrayRef dilation) {
|
| 43 |
+
return at::_ops::conv_depthwise3d::call(self, weight, kernel_size, bias, stride, padding, dilation);
|
| 44 |
+
}
|
| 45 |
+
}
|
| 46 |
+
|
| 47 |
+
// aten::conv_depthwise3d.out(Tensor self, Tensor weight, SymInt[3] kernel_size, Tensor? bias, SymInt[3] stride, SymInt[3] padding, SymInt[3] dilation, *, Tensor(a!) out) -> Tensor(a!)
|
| 48 |
+
inline at::Tensor & conv_depthwise3d_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & weight, at::IntArrayRef kernel_size, const ::std::optional<at::Tensor> & bias, at::IntArrayRef stride, at::IntArrayRef padding, at::IntArrayRef dilation) {
|
| 49 |
+
return at::_ops::conv_depthwise3d_out::call(self, weight, c10::fromIntArrayRefSlow(kernel_size), bias, c10::fromIntArrayRefSlow(stride), c10::fromIntArrayRefSlow(padding), c10::fromIntArrayRefSlow(dilation), out);
|
| 50 |
+
}
|
| 51 |
+
namespace symint {
|
| 52 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, int64_t>::value>>
|
| 53 |
+
at::Tensor & conv_depthwise3d_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & weight, at::IntArrayRef kernel_size, const ::std::optional<at::Tensor> & bias, at::IntArrayRef stride, at::IntArrayRef padding, at::IntArrayRef dilation) {
|
| 54 |
+
return at::_ops::conv_depthwise3d_out::call(self, weight, c10::fromIntArrayRefSlow(kernel_size), bias, c10::fromIntArrayRefSlow(stride), c10::fromIntArrayRefSlow(padding), c10::fromIntArrayRefSlow(dilation), out);
|
| 55 |
+
}
|
| 56 |
+
}
|
| 57 |
+
|
| 58 |
+
// aten::conv_depthwise3d.out(Tensor self, Tensor weight, SymInt[3] kernel_size, Tensor? bias, SymInt[3] stride, SymInt[3] padding, SymInt[3] dilation, *, Tensor(a!) out) -> Tensor(a!)
|
| 59 |
+
inline at::Tensor & conv_depthwise3d_outf(const at::Tensor & self, const at::Tensor & weight, at::IntArrayRef kernel_size, const ::std::optional<at::Tensor> & bias, at::IntArrayRef stride, at::IntArrayRef padding, at::IntArrayRef dilation, at::Tensor & out) {
|
| 60 |
+
return at::_ops::conv_depthwise3d_out::call(self, weight, c10::fromIntArrayRefSlow(kernel_size), bias, c10::fromIntArrayRefSlow(stride), c10::fromIntArrayRefSlow(padding), c10::fromIntArrayRefSlow(dilation), out);
|
| 61 |
+
}
|
| 62 |
+
namespace symint {
|
| 63 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, int64_t>::value>>
|
| 64 |
+
at::Tensor & conv_depthwise3d_outf(const at::Tensor & self, const at::Tensor & weight, at::IntArrayRef kernel_size, const ::std::optional<at::Tensor> & bias, at::IntArrayRef stride, at::IntArrayRef padding, at::IntArrayRef dilation, at::Tensor & out) {
|
| 65 |
+
return at::_ops::conv_depthwise3d_out::call(self, weight, c10::fromIntArrayRefSlow(kernel_size), bias, c10::fromIntArrayRefSlow(stride), c10::fromIntArrayRefSlow(padding), c10::fromIntArrayRefSlow(dilation), out);
|
| 66 |
+
}
|
| 67 |
+
}
|
| 68 |
+
|
| 69 |
+
// aten::conv_depthwise3d.out(Tensor self, Tensor weight, SymInt[3] kernel_size, Tensor? bias, SymInt[3] stride, SymInt[3] padding, SymInt[3] dilation, *, Tensor(a!) out) -> Tensor(a!)
|
| 70 |
+
inline at::Tensor & conv_depthwise3d_symint_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & weight, c10::SymIntArrayRef kernel_size, const ::std::optional<at::Tensor> & bias, c10::SymIntArrayRef stride, c10::SymIntArrayRef padding, c10::SymIntArrayRef dilation) {
|
| 71 |
+
return at::_ops::conv_depthwise3d_out::call(self, weight, kernel_size, bias, stride, padding, dilation, out);
|
| 72 |
+
}
|
| 73 |
+
namespace symint {
|
| 74 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, c10::SymInt>::value>>
|
| 75 |
+
at::Tensor & conv_depthwise3d_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & weight, c10::SymIntArrayRef kernel_size, const ::std::optional<at::Tensor> & bias, c10::SymIntArrayRef stride, c10::SymIntArrayRef padding, c10::SymIntArrayRef dilation) {
|
| 76 |
+
return at::_ops::conv_depthwise3d_out::call(self, weight, kernel_size, bias, stride, padding, dilation, out);
|
| 77 |
+
}
|
| 78 |
+
}
|
| 79 |
+
|
| 80 |
+
// aten::conv_depthwise3d.out(Tensor self, Tensor weight, SymInt[3] kernel_size, Tensor? bias, SymInt[3] stride, SymInt[3] padding, SymInt[3] dilation, *, Tensor(a!) out) -> Tensor(a!)
|
| 81 |
+
inline at::Tensor & conv_depthwise3d_symint_outf(const at::Tensor & self, const at::Tensor & weight, c10::SymIntArrayRef kernel_size, const ::std::optional<at::Tensor> & bias, c10::SymIntArrayRef stride, c10::SymIntArrayRef padding, c10::SymIntArrayRef dilation, at::Tensor & out) {
|
| 82 |
+
return at::_ops::conv_depthwise3d_out::call(self, weight, kernel_size, bias, stride, padding, dilation, out);
|
| 83 |
+
}
|
| 84 |
+
namespace symint {
|
| 85 |
+
template <typename T, typename = std::enable_if_t<std::is_same<T, c10::SymInt>::value>>
|
| 86 |
+
at::Tensor & conv_depthwise3d_outf(const at::Tensor & self, const at::Tensor & weight, c10::SymIntArrayRef kernel_size, const ::std::optional<at::Tensor> & bias, c10::SymIntArrayRef stride, c10::SymIntArrayRef padding, c10::SymIntArrayRef dilation, at::Tensor & out) {
|
| 87 |
+
return at::_ops::conv_depthwise3d_out::call(self, weight, kernel_size, bias, stride, padding, dilation, out);
|
| 88 |
+
}
|
| 89 |
+
}
|
| 90 |
+
|
| 91 |
+
}
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/cudnn_is_acceptable_compositeimplicitautograd_dispatch.h
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeimplicitautograd {
|
| 19 |
+
|
| 20 |
+
TORCH_API bool cudnn_is_acceptable(const at::Tensor & self);
|
| 21 |
+
|
| 22 |
+
} // namespace compositeimplicitautograd
|
| 23 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/diagonal_scatter_native.h
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <c10/util/Optional.h>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API at::Tensor & diagonal_scatter_out(const at::Tensor & self, const at::Tensor & src, int64_t offset, int64_t dim1, int64_t dim2, at::Tensor & out);
|
| 20 |
+
TORCH_API at::Tensor diagonal_scatter(const at::Tensor & self, const at::Tensor & src, int64_t offset=0, int64_t dim1=0, int64_t dim2=1);
|
| 21 |
+
} // namespace native
|
| 22 |
+
} // namespace at
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/dropout.h
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <c10/util/Optional.h>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/dropout_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::dropout(Tensor input, float p, bool train) -> Tensor
|
| 26 |
+
inline at::Tensor dropout(const at::Tensor & input, double p, bool train) {
|
| 27 |
+
return at::_ops::dropout::call(input, p, train);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
// aten::dropout_(Tensor(a!) self, float p, bool train) -> Tensor(a!)
|
| 31 |
+
inline at::Tensor & dropout_(at::Tensor & self, double p, bool train) {
|
| 32 |
+
return at::_ops::dropout_::call(self, p, train);
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
}
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/expm1_ops.h
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API expm1 {
|
| 18 |
+
using schema = at::Tensor (const at::Tensor &);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::expm1")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "expm1(Tensor self) -> Tensor")
|
| 24 |
+
static at::Tensor call(const at::Tensor & self);
|
| 25 |
+
static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
struct TORCH_API expm1_ {
|
| 29 |
+
using schema = at::Tensor & (at::Tensor &);
|
| 30 |
+
using ptr_schema = schema*;
|
| 31 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 32 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::expm1_")
|
| 33 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 34 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "expm1_(Tensor(a!) self) -> Tensor(a!)")
|
| 35 |
+
static at::Tensor & call(at::Tensor & self);
|
| 36 |
+
static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, at::Tensor & self);
|
| 37 |
+
};
|
| 38 |
+
|
| 39 |
+
struct TORCH_API expm1_out {
|
| 40 |
+
using schema = at::Tensor & (const at::Tensor &, at::Tensor &);
|
| 41 |
+
using ptr_schema = schema*;
|
| 42 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 43 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::expm1")
|
| 44 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "out")
|
| 45 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "expm1.out(Tensor self, *, Tensor(a!) out) -> Tensor(a!)")
|
| 46 |
+
static at::Tensor & call(const at::Tensor & self, at::Tensor & out);
|
| 47 |
+
static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, at::Tensor & out);
|
| 48 |
+
};
|
| 49 |
+
|
| 50 |
+
}} // namespace at::_ops
|
parrot/lib/python3.10/site-packages/torch/include/ATen/ops/fake_quantize_per_channel_affine_native.h
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <c10/util/Optional.h>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API at::Tensor fake_quantize_per_channel_affine(const at::Tensor & self, const at::Tensor & scale, const at::Tensor & zero_point, int64_t axis, int64_t quant_min, int64_t quant_max);
|
| 20 |
+
} // namespace native
|
| 21 |
+
} // namespace at
|