Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- llava/lib/python3.10/site-packages/pip/_internal/cli/__init__.py +4 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/index_command.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/parser.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/autocompletion.py +176 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/base_command.py +240 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/cmdoptions.py +1075 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/command_context.py +27 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/index_command.py +171 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/main.py +80 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/main_parser.py +134 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/parser.py +294 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/progress_bars.py +94 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/req_command.py +329 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/spinners.py +159 -0
- llava/lib/python3.10/site-packages/pip/_internal/cli/status_codes.py +6 -0
- llava/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/__init__.py +116 -0
- llava/lib/python3.10/site-packages/pip/_vendor/platformdirs/__main__.py +55 -0
- llava/lib/python3.10/site-packages/pip/_vendor/vendor.txt +18 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/__init__.py +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/aggregate.py +365 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/arrow_block.py +650 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/batcher.py +325 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/block_builder.py +39 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/block_list.py +98 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/compute.py +151 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/delegating_block_builder.py +76 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/equalize.py +142 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/__init__.py +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/autoscaling_requester.py +131 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/bundle_queue/__init__.py +9 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/bundle_queue/__pycache__/__init__.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/bundle_queue/__pycache__/bundle_queue.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/bundle_queue/__pycache__/fifo_bundle_queue.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/bundle_queue/bundle_queue.py +62 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/bundle_queue/fifo_bundle_queue.py +129 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/interfaces/__init__.py +19 -0
llava/lib/python3.10/site-packages/pip/_internal/cli/__init__.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Subpackage containing all of pip's command line interface related code
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
# This file intentionally does not import submodules
|
llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (251 Bytes). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-310.pyc
ADDED
|
Binary file (5.43 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-310.pyc
ADDED
|
Binary file (6.55 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-310.pyc
ADDED
|
Binary file (23.5 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-310.pyc
ADDED
|
Binary file (1.29 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/index_command.cpython-310.pyc
ADDED
|
Binary file (4.94 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main.cpython-310.pyc
ADDED
|
Binary file (1.49 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-310.pyc
ADDED
|
Binary file (2.97 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/parser.cpython-310.pyc
ADDED
|
Binary file (9.96 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-310.pyc
ADDED
|
Binary file (2.61 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-310.pyc
ADDED
|
Binary file (8.69 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-310.pyc
ADDED
|
Binary file (4.94 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-310.pyc
ADDED
|
Binary file (330 Bytes). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/cli/autocompletion.py
ADDED
|
@@ -0,0 +1,176 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Logic that powers autocompletion installed by ``pip completion``.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import optparse
|
| 5 |
+
import os
|
| 6 |
+
import sys
|
| 7 |
+
from itertools import chain
|
| 8 |
+
from typing import Any, Iterable, List, Optional
|
| 9 |
+
|
| 10 |
+
from pip._internal.cli.main_parser import create_main_parser
|
| 11 |
+
from pip._internal.commands import commands_dict, create_command
|
| 12 |
+
from pip._internal.metadata import get_default_environment
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def autocomplete() -> None:
|
| 16 |
+
"""Entry Point for completion of main and subcommand options."""
|
| 17 |
+
# Don't complete if user hasn't sourced bash_completion file.
|
| 18 |
+
if "PIP_AUTO_COMPLETE" not in os.environ:
|
| 19 |
+
return
|
| 20 |
+
# Don't complete if autocompletion environment variables
|
| 21 |
+
# are not present
|
| 22 |
+
if not os.environ.get("COMP_WORDS") or not os.environ.get("COMP_CWORD"):
|
| 23 |
+
return
|
| 24 |
+
cwords = os.environ["COMP_WORDS"].split()[1:]
|
| 25 |
+
cword = int(os.environ["COMP_CWORD"])
|
| 26 |
+
try:
|
| 27 |
+
current = cwords[cword - 1]
|
| 28 |
+
except IndexError:
|
| 29 |
+
current = ""
|
| 30 |
+
|
| 31 |
+
parser = create_main_parser()
|
| 32 |
+
subcommands = list(commands_dict)
|
| 33 |
+
options = []
|
| 34 |
+
|
| 35 |
+
# subcommand
|
| 36 |
+
subcommand_name: Optional[str] = None
|
| 37 |
+
for word in cwords:
|
| 38 |
+
if word in subcommands:
|
| 39 |
+
subcommand_name = word
|
| 40 |
+
break
|
| 41 |
+
# subcommand options
|
| 42 |
+
if subcommand_name is not None:
|
| 43 |
+
# special case: 'help' subcommand has no options
|
| 44 |
+
if subcommand_name == "help":
|
| 45 |
+
sys.exit(1)
|
| 46 |
+
# special case: list locally installed dists for show and uninstall
|
| 47 |
+
should_list_installed = not current.startswith("-") and subcommand_name in [
|
| 48 |
+
"show",
|
| 49 |
+
"uninstall",
|
| 50 |
+
]
|
| 51 |
+
if should_list_installed:
|
| 52 |
+
env = get_default_environment()
|
| 53 |
+
lc = current.lower()
|
| 54 |
+
installed = [
|
| 55 |
+
dist.canonical_name
|
| 56 |
+
for dist in env.iter_installed_distributions(local_only=True)
|
| 57 |
+
if dist.canonical_name.startswith(lc)
|
| 58 |
+
and dist.canonical_name not in cwords[1:]
|
| 59 |
+
]
|
| 60 |
+
# if there are no dists installed, fall back to option completion
|
| 61 |
+
if installed:
|
| 62 |
+
for dist in installed:
|
| 63 |
+
print(dist)
|
| 64 |
+
sys.exit(1)
|
| 65 |
+
|
| 66 |
+
should_list_installables = (
|
| 67 |
+
not current.startswith("-") and subcommand_name == "install"
|
| 68 |
+
)
|
| 69 |
+
if should_list_installables:
|
| 70 |
+
for path in auto_complete_paths(current, "path"):
|
| 71 |
+
print(path)
|
| 72 |
+
sys.exit(1)
|
| 73 |
+
|
| 74 |
+
subcommand = create_command(subcommand_name)
|
| 75 |
+
|
| 76 |
+
for opt in subcommand.parser.option_list_all:
|
| 77 |
+
if opt.help != optparse.SUPPRESS_HELP:
|
| 78 |
+
options += [
|
| 79 |
+
(opt_str, opt.nargs) for opt_str in opt._long_opts + opt._short_opts
|
| 80 |
+
]
|
| 81 |
+
|
| 82 |
+
# filter out previously specified options from available options
|
| 83 |
+
prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
|
| 84 |
+
options = [(x, v) for (x, v) in options if x not in prev_opts]
|
| 85 |
+
# filter options by current input
|
| 86 |
+
options = [(k, v) for k, v in options if k.startswith(current)]
|
| 87 |
+
# get completion type given cwords and available subcommand options
|
| 88 |
+
completion_type = get_path_completion_type(
|
| 89 |
+
cwords,
|
| 90 |
+
cword,
|
| 91 |
+
subcommand.parser.option_list_all,
|
| 92 |
+
)
|
| 93 |
+
# get completion files and directories if ``completion_type`` is
|
| 94 |
+
# ``<file>``, ``<dir>`` or ``<path>``
|
| 95 |
+
if completion_type:
|
| 96 |
+
paths = auto_complete_paths(current, completion_type)
|
| 97 |
+
options = [(path, 0) for path in paths]
|
| 98 |
+
for option in options:
|
| 99 |
+
opt_label = option[0]
|
| 100 |
+
# append '=' to options which require args
|
| 101 |
+
if option[1] and option[0][:2] == "--":
|
| 102 |
+
opt_label += "="
|
| 103 |
+
print(opt_label)
|
| 104 |
+
else:
|
| 105 |
+
# show main parser options only when necessary
|
| 106 |
+
|
| 107 |
+
opts = [i.option_list for i in parser.option_groups]
|
| 108 |
+
opts.append(parser.option_list)
|
| 109 |
+
flattened_opts = chain.from_iterable(opts)
|
| 110 |
+
if current.startswith("-"):
|
| 111 |
+
for opt in flattened_opts:
|
| 112 |
+
if opt.help != optparse.SUPPRESS_HELP:
|
| 113 |
+
subcommands += opt._long_opts + opt._short_opts
|
| 114 |
+
else:
|
| 115 |
+
# get completion type given cwords and all available options
|
| 116 |
+
completion_type = get_path_completion_type(cwords, cword, flattened_opts)
|
| 117 |
+
if completion_type:
|
| 118 |
+
subcommands = list(auto_complete_paths(current, completion_type))
|
| 119 |
+
|
| 120 |
+
print(" ".join([x for x in subcommands if x.startswith(current)]))
|
| 121 |
+
sys.exit(1)
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
def get_path_completion_type(
|
| 125 |
+
cwords: List[str], cword: int, opts: Iterable[Any]
|
| 126 |
+
) -> Optional[str]:
|
| 127 |
+
"""Get the type of path completion (``file``, ``dir``, ``path`` or None)
|
| 128 |
+
|
| 129 |
+
:param cwords: same as the environmental variable ``COMP_WORDS``
|
| 130 |
+
:param cword: same as the environmental variable ``COMP_CWORD``
|
| 131 |
+
:param opts: The available options to check
|
| 132 |
+
:return: path completion type (``file``, ``dir``, ``path`` or None)
|
| 133 |
+
"""
|
| 134 |
+
if cword < 2 or not cwords[cword - 2].startswith("-"):
|
| 135 |
+
return None
|
| 136 |
+
for opt in opts:
|
| 137 |
+
if opt.help == optparse.SUPPRESS_HELP:
|
| 138 |
+
continue
|
| 139 |
+
for o in str(opt).split("/"):
|
| 140 |
+
if cwords[cword - 2].split("=")[0] == o:
|
| 141 |
+
if not opt.metavar or any(
|
| 142 |
+
x in ("path", "file", "dir") for x in opt.metavar.split("/")
|
| 143 |
+
):
|
| 144 |
+
return opt.metavar
|
| 145 |
+
return None
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]:
|
| 149 |
+
"""If ``completion_type`` is ``file`` or ``path``, list all regular files
|
| 150 |
+
and directories starting with ``current``; otherwise only list directories
|
| 151 |
+
starting with ``current``.
|
| 152 |
+
|
| 153 |
+
:param current: The word to be completed
|
| 154 |
+
:param completion_type: path completion type(``file``, ``path`` or ``dir``)
|
| 155 |
+
:return: A generator of regular files and/or directories
|
| 156 |
+
"""
|
| 157 |
+
directory, filename = os.path.split(current)
|
| 158 |
+
current_path = os.path.abspath(directory)
|
| 159 |
+
# Don't complete paths if they can't be accessed
|
| 160 |
+
if not os.access(current_path, os.R_OK):
|
| 161 |
+
return
|
| 162 |
+
filename = os.path.normcase(filename)
|
| 163 |
+
# list all files that start with ``filename``
|
| 164 |
+
file_list = (
|
| 165 |
+
x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename)
|
| 166 |
+
)
|
| 167 |
+
for f in file_list:
|
| 168 |
+
opt = os.path.join(current_path, f)
|
| 169 |
+
comp_file = os.path.normcase(os.path.join(directory, f))
|
| 170 |
+
# complete regular files when there is not ``<dir>`` after option
|
| 171 |
+
# complete directories when there is ``<file>``, ``<path>`` or
|
| 172 |
+
# ``<dir>``after option
|
| 173 |
+
if completion_type != "dir" and os.path.isfile(opt):
|
| 174 |
+
yield comp_file
|
| 175 |
+
elif os.path.isdir(opt):
|
| 176 |
+
yield os.path.join(comp_file, "")
|
llava/lib/python3.10/site-packages/pip/_internal/cli/base_command.py
ADDED
|
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Base Command class, and related routines"""
|
| 2 |
+
|
| 3 |
+
import logging
|
| 4 |
+
import logging.config
|
| 5 |
+
import optparse
|
| 6 |
+
import os
|
| 7 |
+
import sys
|
| 8 |
+
import traceback
|
| 9 |
+
from optparse import Values
|
| 10 |
+
from typing import List, Optional, Tuple
|
| 11 |
+
|
| 12 |
+
from pip._vendor.rich import reconfigure
|
| 13 |
+
from pip._vendor.rich import traceback as rich_traceback
|
| 14 |
+
|
| 15 |
+
from pip._internal.cli import cmdoptions
|
| 16 |
+
from pip._internal.cli.command_context import CommandContextMixIn
|
| 17 |
+
from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
|
| 18 |
+
from pip._internal.cli.status_codes import (
|
| 19 |
+
ERROR,
|
| 20 |
+
PREVIOUS_BUILD_DIR_ERROR,
|
| 21 |
+
UNKNOWN_ERROR,
|
| 22 |
+
VIRTUALENV_NOT_FOUND,
|
| 23 |
+
)
|
| 24 |
+
from pip._internal.exceptions import (
|
| 25 |
+
BadCommand,
|
| 26 |
+
CommandError,
|
| 27 |
+
DiagnosticPipError,
|
| 28 |
+
InstallationError,
|
| 29 |
+
NetworkConnectionError,
|
| 30 |
+
PreviousBuildDirError,
|
| 31 |
+
)
|
| 32 |
+
from pip._internal.utils.deprecation import deprecated
|
| 33 |
+
from pip._internal.utils.filesystem import check_path_owner
|
| 34 |
+
from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
|
| 35 |
+
from pip._internal.utils.misc import get_prog, normalize_path
|
| 36 |
+
from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry
|
| 37 |
+
from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry
|
| 38 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
| 39 |
+
|
| 40 |
+
__all__ = ["Command"]
|
| 41 |
+
|
| 42 |
+
logger = logging.getLogger(__name__)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class Command(CommandContextMixIn):
|
| 46 |
+
usage: str = ""
|
| 47 |
+
ignore_require_venv: bool = False
|
| 48 |
+
|
| 49 |
+
def __init__(self, name: str, summary: str, isolated: bool = False) -> None:
|
| 50 |
+
super().__init__()
|
| 51 |
+
|
| 52 |
+
self.name = name
|
| 53 |
+
self.summary = summary
|
| 54 |
+
self.parser = ConfigOptionParser(
|
| 55 |
+
usage=self.usage,
|
| 56 |
+
prog=f"{get_prog()} {name}",
|
| 57 |
+
formatter=UpdatingDefaultsHelpFormatter(),
|
| 58 |
+
add_help_option=False,
|
| 59 |
+
name=name,
|
| 60 |
+
description=self.__doc__,
|
| 61 |
+
isolated=isolated,
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
self.tempdir_registry: Optional[TempDirRegistry] = None
|
| 65 |
+
|
| 66 |
+
# Commands should add options to this option group
|
| 67 |
+
optgroup_name = f"{self.name.capitalize()} Options"
|
| 68 |
+
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
|
| 69 |
+
|
| 70 |
+
# Add the general options
|
| 71 |
+
gen_opts = cmdoptions.make_option_group(
|
| 72 |
+
cmdoptions.general_group,
|
| 73 |
+
self.parser,
|
| 74 |
+
)
|
| 75 |
+
self.parser.add_option_group(gen_opts)
|
| 76 |
+
|
| 77 |
+
self.add_options()
|
| 78 |
+
|
| 79 |
+
def add_options(self) -> None:
|
| 80 |
+
pass
|
| 81 |
+
|
| 82 |
+
def handle_pip_version_check(self, options: Values) -> None:
|
| 83 |
+
"""
|
| 84 |
+
This is a no-op so that commands by default do not do the pip version
|
| 85 |
+
check.
|
| 86 |
+
"""
|
| 87 |
+
# Make sure we do the pip version check if the index_group options
|
| 88 |
+
# are present.
|
| 89 |
+
assert not hasattr(options, "no_index")
|
| 90 |
+
|
| 91 |
+
def run(self, options: Values, args: List[str]) -> int:
|
| 92 |
+
raise NotImplementedError
|
| 93 |
+
|
| 94 |
+
def _run_wrapper(self, level_number: int, options: Values, args: List[str]) -> int:
|
| 95 |
+
def _inner_run() -> int:
|
| 96 |
+
try:
|
| 97 |
+
return self.run(options, args)
|
| 98 |
+
finally:
|
| 99 |
+
self.handle_pip_version_check(options)
|
| 100 |
+
|
| 101 |
+
if options.debug_mode:
|
| 102 |
+
rich_traceback.install(show_locals=True)
|
| 103 |
+
return _inner_run()
|
| 104 |
+
|
| 105 |
+
try:
|
| 106 |
+
status = _inner_run()
|
| 107 |
+
assert isinstance(status, int)
|
| 108 |
+
return status
|
| 109 |
+
except DiagnosticPipError as exc:
|
| 110 |
+
logger.error("%s", exc, extra={"rich": True})
|
| 111 |
+
logger.debug("Exception information:", exc_info=True)
|
| 112 |
+
|
| 113 |
+
return ERROR
|
| 114 |
+
except PreviousBuildDirError as exc:
|
| 115 |
+
logger.critical(str(exc))
|
| 116 |
+
logger.debug("Exception information:", exc_info=True)
|
| 117 |
+
|
| 118 |
+
return PREVIOUS_BUILD_DIR_ERROR
|
| 119 |
+
except (
|
| 120 |
+
InstallationError,
|
| 121 |
+
BadCommand,
|
| 122 |
+
NetworkConnectionError,
|
| 123 |
+
) as exc:
|
| 124 |
+
logger.critical(str(exc))
|
| 125 |
+
logger.debug("Exception information:", exc_info=True)
|
| 126 |
+
|
| 127 |
+
return ERROR
|
| 128 |
+
except CommandError as exc:
|
| 129 |
+
logger.critical("%s", exc)
|
| 130 |
+
logger.debug("Exception information:", exc_info=True)
|
| 131 |
+
|
| 132 |
+
return ERROR
|
| 133 |
+
except BrokenStdoutLoggingError:
|
| 134 |
+
# Bypass our logger and write any remaining messages to
|
| 135 |
+
# stderr because stdout no longer works.
|
| 136 |
+
print("ERROR: Pipe to stdout was broken", file=sys.stderr)
|
| 137 |
+
if level_number <= logging.DEBUG:
|
| 138 |
+
traceback.print_exc(file=sys.stderr)
|
| 139 |
+
|
| 140 |
+
return ERROR
|
| 141 |
+
except KeyboardInterrupt:
|
| 142 |
+
logger.critical("Operation cancelled by user")
|
| 143 |
+
logger.debug("Exception information:", exc_info=True)
|
| 144 |
+
|
| 145 |
+
return ERROR
|
| 146 |
+
except BaseException:
|
| 147 |
+
logger.critical("Exception:", exc_info=True)
|
| 148 |
+
|
| 149 |
+
return UNKNOWN_ERROR
|
| 150 |
+
|
| 151 |
+
def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]:
|
| 152 |
+
# factored out for testability
|
| 153 |
+
return self.parser.parse_args(args)
|
| 154 |
+
|
| 155 |
+
def main(self, args: List[str]) -> int:
|
| 156 |
+
try:
|
| 157 |
+
with self.main_context():
|
| 158 |
+
return self._main(args)
|
| 159 |
+
finally:
|
| 160 |
+
logging.shutdown()
|
| 161 |
+
|
| 162 |
+
def _main(self, args: List[str]) -> int:
|
| 163 |
+
# We must initialize this before the tempdir manager, otherwise the
|
| 164 |
+
# configuration would not be accessible by the time we clean up the
|
| 165 |
+
# tempdir manager.
|
| 166 |
+
self.tempdir_registry = self.enter_context(tempdir_registry())
|
| 167 |
+
# Intentionally set as early as possible so globally-managed temporary
|
| 168 |
+
# directories are available to the rest of the code.
|
| 169 |
+
self.enter_context(global_tempdir_manager())
|
| 170 |
+
|
| 171 |
+
options, args = self.parse_args(args)
|
| 172 |
+
|
| 173 |
+
# Set verbosity so that it can be used elsewhere.
|
| 174 |
+
self.verbosity = options.verbose - options.quiet
|
| 175 |
+
|
| 176 |
+
reconfigure(no_color=options.no_color)
|
| 177 |
+
level_number = setup_logging(
|
| 178 |
+
verbosity=self.verbosity,
|
| 179 |
+
no_color=options.no_color,
|
| 180 |
+
user_log_file=options.log,
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
always_enabled_features = set(options.features_enabled) & set(
|
| 184 |
+
cmdoptions.ALWAYS_ENABLED_FEATURES
|
| 185 |
+
)
|
| 186 |
+
if always_enabled_features:
|
| 187 |
+
logger.warning(
|
| 188 |
+
"The following features are always enabled: %s. ",
|
| 189 |
+
", ".join(sorted(always_enabled_features)),
|
| 190 |
+
)
|
| 191 |
+
|
| 192 |
+
# Make sure that the --python argument isn't specified after the
|
| 193 |
+
# subcommand. We can tell, because if --python was specified,
|
| 194 |
+
# we should only reach this point if we're running in the created
|
| 195 |
+
# subprocess, which has the _PIP_RUNNING_IN_SUBPROCESS environment
|
| 196 |
+
# variable set.
|
| 197 |
+
if options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
|
| 198 |
+
logger.critical(
|
| 199 |
+
"The --python option must be placed before the pip subcommand name"
|
| 200 |
+
)
|
| 201 |
+
sys.exit(ERROR)
|
| 202 |
+
|
| 203 |
+
# TODO: Try to get these passing down from the command?
|
| 204 |
+
# without resorting to os.environ to hold these.
|
| 205 |
+
# This also affects isolated builds and it should.
|
| 206 |
+
|
| 207 |
+
if options.no_input:
|
| 208 |
+
os.environ["PIP_NO_INPUT"] = "1"
|
| 209 |
+
|
| 210 |
+
if options.exists_action:
|
| 211 |
+
os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action)
|
| 212 |
+
|
| 213 |
+
if options.require_venv and not self.ignore_require_venv:
|
| 214 |
+
# If a venv is required check if it can really be found
|
| 215 |
+
if not running_under_virtualenv():
|
| 216 |
+
logger.critical("Could not find an activated virtualenv (required).")
|
| 217 |
+
sys.exit(VIRTUALENV_NOT_FOUND)
|
| 218 |
+
|
| 219 |
+
if options.cache_dir:
|
| 220 |
+
options.cache_dir = normalize_path(options.cache_dir)
|
| 221 |
+
if not check_path_owner(options.cache_dir):
|
| 222 |
+
logger.warning(
|
| 223 |
+
"The directory '%s' or its parent directory is not owned "
|
| 224 |
+
"or is not writable by the current user. The cache "
|
| 225 |
+
"has been disabled. Check the permissions and owner of "
|
| 226 |
+
"that directory. If executing pip with sudo, you should "
|
| 227 |
+
"use sudo's -H flag.",
|
| 228 |
+
options.cache_dir,
|
| 229 |
+
)
|
| 230 |
+
options.cache_dir = None
|
| 231 |
+
|
| 232 |
+
if options.no_python_version_warning:
|
| 233 |
+
deprecated(
|
| 234 |
+
reason="--no-python-version-warning is deprecated.",
|
| 235 |
+
replacement="to remove the flag as it's a no-op",
|
| 236 |
+
gone_in="25.1",
|
| 237 |
+
issue=13154,
|
| 238 |
+
)
|
| 239 |
+
|
| 240 |
+
return self._run_wrapper(level_number, options, args)
|
llava/lib/python3.10/site-packages/pip/_internal/cli/cmdoptions.py
ADDED
|
@@ -0,0 +1,1075 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
shared options and groups
|
| 3 |
+
|
| 4 |
+
The principle here is to define options once, but *not* instantiate them
|
| 5 |
+
globally. One reason being that options with action='append' can carry state
|
| 6 |
+
between parses. pip parses general options twice internally, and shouldn't
|
| 7 |
+
pass on state. To be consistent, all options will follow this design.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
# The following comment should be removed at some point in the future.
|
| 11 |
+
# mypy: strict-optional=False
|
| 12 |
+
|
| 13 |
+
import importlib.util
|
| 14 |
+
import logging
|
| 15 |
+
import os
|
| 16 |
+
import textwrap
|
| 17 |
+
from functools import partial
|
| 18 |
+
from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
|
| 19 |
+
from textwrap import dedent
|
| 20 |
+
from typing import Any, Callable, Dict, Optional, Tuple
|
| 21 |
+
|
| 22 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
| 23 |
+
|
| 24 |
+
from pip._internal.cli.parser import ConfigOptionParser
|
| 25 |
+
from pip._internal.exceptions import CommandError
|
| 26 |
+
from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
|
| 27 |
+
from pip._internal.models.format_control import FormatControl
|
| 28 |
+
from pip._internal.models.index import PyPI
|
| 29 |
+
from pip._internal.models.target_python import TargetPython
|
| 30 |
+
from pip._internal.utils.hashes import STRONG_HASHES
|
| 31 |
+
from pip._internal.utils.misc import strtobool
|
| 32 |
+
|
| 33 |
+
logger = logging.getLogger(__name__)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None:
|
| 37 |
+
"""
|
| 38 |
+
Raise an option parsing error using parser.error().
|
| 39 |
+
|
| 40 |
+
Args:
|
| 41 |
+
parser: an OptionParser instance.
|
| 42 |
+
option: an Option instance.
|
| 43 |
+
msg: the error text.
|
| 44 |
+
"""
|
| 45 |
+
msg = f"{option} error: {msg}"
|
| 46 |
+
msg = textwrap.fill(" ".join(msg.split()))
|
| 47 |
+
parser.error(msg)
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> OptionGroup:
|
| 51 |
+
"""
|
| 52 |
+
Return an OptionGroup object
|
| 53 |
+
group -- assumed to be dict with 'name' and 'options' keys
|
| 54 |
+
parser -- an optparse Parser
|
| 55 |
+
"""
|
| 56 |
+
option_group = OptionGroup(parser, group["name"])
|
| 57 |
+
for option in group["options"]:
|
| 58 |
+
option_group.add_option(option())
|
| 59 |
+
return option_group
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def check_dist_restriction(options: Values, check_target: bool = False) -> None:
|
| 63 |
+
"""Function for determining if custom platform options are allowed.
|
| 64 |
+
|
| 65 |
+
:param options: The OptionParser options.
|
| 66 |
+
:param check_target: Whether or not to check if --target is being used.
|
| 67 |
+
"""
|
| 68 |
+
dist_restriction_set = any(
|
| 69 |
+
[
|
| 70 |
+
options.python_version,
|
| 71 |
+
options.platforms,
|
| 72 |
+
options.abis,
|
| 73 |
+
options.implementation,
|
| 74 |
+
]
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
binary_only = FormatControl(set(), {":all:"})
|
| 78 |
+
sdist_dependencies_allowed = (
|
| 79 |
+
options.format_control != binary_only and not options.ignore_dependencies
|
| 80 |
+
)
|
| 81 |
+
|
| 82 |
+
# Installations or downloads using dist restrictions must not combine
|
| 83 |
+
# source distributions and dist-specific wheels, as they are not
|
| 84 |
+
# guaranteed to be locally compatible.
|
| 85 |
+
if dist_restriction_set and sdist_dependencies_allowed:
|
| 86 |
+
raise CommandError(
|
| 87 |
+
"When restricting platform and interpreter constraints using "
|
| 88 |
+
"--python-version, --platform, --abi, or --implementation, "
|
| 89 |
+
"either --no-deps must be set, or --only-binary=:all: must be "
|
| 90 |
+
"set and --no-binary must not be set (or must be set to "
|
| 91 |
+
":none:)."
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
if check_target:
|
| 95 |
+
if not options.dry_run and dist_restriction_set and not options.target_dir:
|
| 96 |
+
raise CommandError(
|
| 97 |
+
"Can not use any platform or abi specific options unless "
|
| 98 |
+
"installing via '--target' or using '--dry-run'"
|
| 99 |
+
)
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
def _path_option_check(option: Option, opt: str, value: str) -> str:
|
| 103 |
+
return os.path.expanduser(value)
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
def _package_name_option_check(option: Option, opt: str, value: str) -> str:
|
| 107 |
+
return canonicalize_name(value)
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
class PipOption(Option):
|
| 111 |
+
TYPES = Option.TYPES + ("path", "package_name")
|
| 112 |
+
TYPE_CHECKER = Option.TYPE_CHECKER.copy()
|
| 113 |
+
TYPE_CHECKER["package_name"] = _package_name_option_check
|
| 114 |
+
TYPE_CHECKER["path"] = _path_option_check
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
###########
|
| 118 |
+
# options #
|
| 119 |
+
###########
|
| 120 |
+
|
| 121 |
+
help_: Callable[..., Option] = partial(
|
| 122 |
+
Option,
|
| 123 |
+
"-h",
|
| 124 |
+
"--help",
|
| 125 |
+
dest="help",
|
| 126 |
+
action="help",
|
| 127 |
+
help="Show help.",
|
| 128 |
+
)
|
| 129 |
+
|
| 130 |
+
debug_mode: Callable[..., Option] = partial(
|
| 131 |
+
Option,
|
| 132 |
+
"--debug",
|
| 133 |
+
dest="debug_mode",
|
| 134 |
+
action="store_true",
|
| 135 |
+
default=False,
|
| 136 |
+
help=(
|
| 137 |
+
"Let unhandled exceptions propagate outside the main subroutine, "
|
| 138 |
+
"instead of logging them to stderr."
|
| 139 |
+
),
|
| 140 |
+
)
|
| 141 |
+
|
| 142 |
+
isolated_mode: Callable[..., Option] = partial(
|
| 143 |
+
Option,
|
| 144 |
+
"--isolated",
|
| 145 |
+
dest="isolated_mode",
|
| 146 |
+
action="store_true",
|
| 147 |
+
default=False,
|
| 148 |
+
help=(
|
| 149 |
+
"Run pip in an isolated mode, ignoring environment variables and user "
|
| 150 |
+
"configuration."
|
| 151 |
+
),
|
| 152 |
+
)
|
| 153 |
+
|
| 154 |
+
require_virtualenv: Callable[..., Option] = partial(
|
| 155 |
+
Option,
|
| 156 |
+
"--require-virtualenv",
|
| 157 |
+
"--require-venv",
|
| 158 |
+
dest="require_venv",
|
| 159 |
+
action="store_true",
|
| 160 |
+
default=False,
|
| 161 |
+
help=(
|
| 162 |
+
"Allow pip to only run in a virtual environment; "
|
| 163 |
+
"exit with an error otherwise."
|
| 164 |
+
),
|
| 165 |
+
)
|
| 166 |
+
|
| 167 |
+
override_externally_managed: Callable[..., Option] = partial(
|
| 168 |
+
Option,
|
| 169 |
+
"--break-system-packages",
|
| 170 |
+
dest="override_externally_managed",
|
| 171 |
+
action="store_true",
|
| 172 |
+
help="Allow pip to modify an EXTERNALLY-MANAGED Python installation",
|
| 173 |
+
)
|
| 174 |
+
|
| 175 |
+
python: Callable[..., Option] = partial(
|
| 176 |
+
Option,
|
| 177 |
+
"--python",
|
| 178 |
+
dest="python",
|
| 179 |
+
help="Run pip with the specified Python interpreter.",
|
| 180 |
+
)
|
| 181 |
+
|
| 182 |
+
verbose: Callable[..., Option] = partial(
|
| 183 |
+
Option,
|
| 184 |
+
"-v",
|
| 185 |
+
"--verbose",
|
| 186 |
+
dest="verbose",
|
| 187 |
+
action="count",
|
| 188 |
+
default=0,
|
| 189 |
+
help="Give more output. Option is additive, and can be used up to 3 times.",
|
| 190 |
+
)
|
| 191 |
+
|
| 192 |
+
no_color: Callable[..., Option] = partial(
|
| 193 |
+
Option,
|
| 194 |
+
"--no-color",
|
| 195 |
+
dest="no_color",
|
| 196 |
+
action="store_true",
|
| 197 |
+
default=False,
|
| 198 |
+
help="Suppress colored output.",
|
| 199 |
+
)
|
| 200 |
+
|
| 201 |
+
version: Callable[..., Option] = partial(
|
| 202 |
+
Option,
|
| 203 |
+
"-V",
|
| 204 |
+
"--version",
|
| 205 |
+
dest="version",
|
| 206 |
+
action="store_true",
|
| 207 |
+
help="Show version and exit.",
|
| 208 |
+
)
|
| 209 |
+
|
| 210 |
+
quiet: Callable[..., Option] = partial(
|
| 211 |
+
Option,
|
| 212 |
+
"-q",
|
| 213 |
+
"--quiet",
|
| 214 |
+
dest="quiet",
|
| 215 |
+
action="count",
|
| 216 |
+
default=0,
|
| 217 |
+
help=(
|
| 218 |
+
"Give less output. Option is additive, and can be used up to 3"
|
| 219 |
+
" times (corresponding to WARNING, ERROR, and CRITICAL logging"
|
| 220 |
+
" levels)."
|
| 221 |
+
),
|
| 222 |
+
)
|
| 223 |
+
|
| 224 |
+
progress_bar: Callable[..., Option] = partial(
|
| 225 |
+
Option,
|
| 226 |
+
"--progress-bar",
|
| 227 |
+
dest="progress_bar",
|
| 228 |
+
type="choice",
|
| 229 |
+
choices=["on", "off", "raw"],
|
| 230 |
+
default="on",
|
| 231 |
+
help="Specify whether the progress bar should be used [on, off, raw] (default: on)",
|
| 232 |
+
)
|
| 233 |
+
|
| 234 |
+
log: Callable[..., Option] = partial(
|
| 235 |
+
PipOption,
|
| 236 |
+
"--log",
|
| 237 |
+
"--log-file",
|
| 238 |
+
"--local-log",
|
| 239 |
+
dest="log",
|
| 240 |
+
metavar="path",
|
| 241 |
+
type="path",
|
| 242 |
+
help="Path to a verbose appending log.",
|
| 243 |
+
)
|
| 244 |
+
|
| 245 |
+
no_input: Callable[..., Option] = partial(
|
| 246 |
+
Option,
|
| 247 |
+
# Don't ask for input
|
| 248 |
+
"--no-input",
|
| 249 |
+
dest="no_input",
|
| 250 |
+
action="store_true",
|
| 251 |
+
default=False,
|
| 252 |
+
help="Disable prompting for input.",
|
| 253 |
+
)
|
| 254 |
+
|
| 255 |
+
keyring_provider: Callable[..., Option] = partial(
|
| 256 |
+
Option,
|
| 257 |
+
"--keyring-provider",
|
| 258 |
+
dest="keyring_provider",
|
| 259 |
+
choices=["auto", "disabled", "import", "subprocess"],
|
| 260 |
+
default="auto",
|
| 261 |
+
help=(
|
| 262 |
+
"Enable the credential lookup via the keyring library if user input is allowed."
|
| 263 |
+
" Specify which mechanism to use [auto, disabled, import, subprocess]."
|
| 264 |
+
" (default: %default)"
|
| 265 |
+
),
|
| 266 |
+
)
|
| 267 |
+
|
| 268 |
+
proxy: Callable[..., Option] = partial(
|
| 269 |
+
Option,
|
| 270 |
+
"--proxy",
|
| 271 |
+
dest="proxy",
|
| 272 |
+
type="str",
|
| 273 |
+
default="",
|
| 274 |
+
help="Specify a proxy in the form scheme://[user:passwd@]proxy.server:port.",
|
| 275 |
+
)
|
| 276 |
+
|
| 277 |
+
retries: Callable[..., Option] = partial(
|
| 278 |
+
Option,
|
| 279 |
+
"--retries",
|
| 280 |
+
dest="retries",
|
| 281 |
+
type="int",
|
| 282 |
+
default=5,
|
| 283 |
+
help="Maximum number of retries each connection should attempt "
|
| 284 |
+
"(default %default times).",
|
| 285 |
+
)
|
| 286 |
+
|
| 287 |
+
timeout: Callable[..., Option] = partial(
|
| 288 |
+
Option,
|
| 289 |
+
"--timeout",
|
| 290 |
+
"--default-timeout",
|
| 291 |
+
metavar="sec",
|
| 292 |
+
dest="timeout",
|
| 293 |
+
type="float",
|
| 294 |
+
default=15,
|
| 295 |
+
help="Set the socket timeout (default %default seconds).",
|
| 296 |
+
)
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
def exists_action() -> Option:
|
| 300 |
+
return Option(
|
| 301 |
+
# Option when path already exist
|
| 302 |
+
"--exists-action",
|
| 303 |
+
dest="exists_action",
|
| 304 |
+
type="choice",
|
| 305 |
+
choices=["s", "i", "w", "b", "a"],
|
| 306 |
+
default=[],
|
| 307 |
+
action="append",
|
| 308 |
+
metavar="action",
|
| 309 |
+
help="Default action when a path already exists: "
|
| 310 |
+
"(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
|
| 311 |
+
)
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
cert: Callable[..., Option] = partial(
|
| 315 |
+
PipOption,
|
| 316 |
+
"--cert",
|
| 317 |
+
dest="cert",
|
| 318 |
+
type="path",
|
| 319 |
+
metavar="path",
|
| 320 |
+
help=(
|
| 321 |
+
"Path to PEM-encoded CA certificate bundle. "
|
| 322 |
+
"If provided, overrides the default. "
|
| 323 |
+
"See 'SSL Certificate Verification' in pip documentation "
|
| 324 |
+
"for more information."
|
| 325 |
+
),
|
| 326 |
+
)
|
| 327 |
+
|
| 328 |
+
client_cert: Callable[..., Option] = partial(
|
| 329 |
+
PipOption,
|
| 330 |
+
"--client-cert",
|
| 331 |
+
dest="client_cert",
|
| 332 |
+
type="path",
|
| 333 |
+
default=None,
|
| 334 |
+
metavar="path",
|
| 335 |
+
help="Path to SSL client certificate, a single file containing the "
|
| 336 |
+
"private key and the certificate in PEM format.",
|
| 337 |
+
)
|
| 338 |
+
|
| 339 |
+
index_url: Callable[..., Option] = partial(
|
| 340 |
+
Option,
|
| 341 |
+
"-i",
|
| 342 |
+
"--index-url",
|
| 343 |
+
"--pypi-url",
|
| 344 |
+
dest="index_url",
|
| 345 |
+
metavar="URL",
|
| 346 |
+
default=PyPI.simple_url,
|
| 347 |
+
help="Base URL of the Python Package Index (default %default). "
|
| 348 |
+
"This should point to a repository compliant with PEP 503 "
|
| 349 |
+
"(the simple repository API) or a local directory laid out "
|
| 350 |
+
"in the same format.",
|
| 351 |
+
)
|
| 352 |
+
|
| 353 |
+
|
| 354 |
+
def extra_index_url() -> Option:
|
| 355 |
+
return Option(
|
| 356 |
+
"--extra-index-url",
|
| 357 |
+
dest="extra_index_urls",
|
| 358 |
+
metavar="URL",
|
| 359 |
+
action="append",
|
| 360 |
+
default=[],
|
| 361 |
+
help="Extra URLs of package indexes to use in addition to "
|
| 362 |
+
"--index-url. Should follow the same rules as "
|
| 363 |
+
"--index-url.",
|
| 364 |
+
)
|
| 365 |
+
|
| 366 |
+
|
| 367 |
+
no_index: Callable[..., Option] = partial(
|
| 368 |
+
Option,
|
| 369 |
+
"--no-index",
|
| 370 |
+
dest="no_index",
|
| 371 |
+
action="store_true",
|
| 372 |
+
default=False,
|
| 373 |
+
help="Ignore package index (only looking at --find-links URLs instead).",
|
| 374 |
+
)
|
| 375 |
+
|
| 376 |
+
|
| 377 |
+
def find_links() -> Option:
|
| 378 |
+
return Option(
|
| 379 |
+
"-f",
|
| 380 |
+
"--find-links",
|
| 381 |
+
dest="find_links",
|
| 382 |
+
action="append",
|
| 383 |
+
default=[],
|
| 384 |
+
metavar="url",
|
| 385 |
+
help="If a URL or path to an html file, then parse for links to "
|
| 386 |
+
"archives such as sdist (.tar.gz) or wheel (.whl) files. "
|
| 387 |
+
"If a local path or file:// URL that's a directory, "
|
| 388 |
+
"then look for archives in the directory listing. "
|
| 389 |
+
"Links to VCS project URLs are not supported.",
|
| 390 |
+
)
|
| 391 |
+
|
| 392 |
+
|
| 393 |
+
def trusted_host() -> Option:
|
| 394 |
+
return Option(
|
| 395 |
+
"--trusted-host",
|
| 396 |
+
dest="trusted_hosts",
|
| 397 |
+
action="append",
|
| 398 |
+
metavar="HOSTNAME",
|
| 399 |
+
default=[],
|
| 400 |
+
help="Mark this host or host:port pair as trusted, even though it "
|
| 401 |
+
"does not have valid or any HTTPS.",
|
| 402 |
+
)
|
| 403 |
+
|
| 404 |
+
|
| 405 |
+
def constraints() -> Option:
|
| 406 |
+
return Option(
|
| 407 |
+
"-c",
|
| 408 |
+
"--constraint",
|
| 409 |
+
dest="constraints",
|
| 410 |
+
action="append",
|
| 411 |
+
default=[],
|
| 412 |
+
metavar="file",
|
| 413 |
+
help="Constrain versions using the given constraints file. "
|
| 414 |
+
"This option can be used multiple times.",
|
| 415 |
+
)
|
| 416 |
+
|
| 417 |
+
|
| 418 |
+
def requirements() -> Option:
|
| 419 |
+
return Option(
|
| 420 |
+
"-r",
|
| 421 |
+
"--requirement",
|
| 422 |
+
dest="requirements",
|
| 423 |
+
action="append",
|
| 424 |
+
default=[],
|
| 425 |
+
metavar="file",
|
| 426 |
+
help="Install from the given requirements file. "
|
| 427 |
+
"This option can be used multiple times.",
|
| 428 |
+
)
|
| 429 |
+
|
| 430 |
+
|
| 431 |
+
def editable() -> Option:
|
| 432 |
+
return Option(
|
| 433 |
+
"-e",
|
| 434 |
+
"--editable",
|
| 435 |
+
dest="editables",
|
| 436 |
+
action="append",
|
| 437 |
+
default=[],
|
| 438 |
+
metavar="path/url",
|
| 439 |
+
help=(
|
| 440 |
+
"Install a project in editable mode (i.e. setuptools "
|
| 441 |
+
'"develop mode") from a local project path or a VCS url.'
|
| 442 |
+
),
|
| 443 |
+
)
|
| 444 |
+
|
| 445 |
+
|
| 446 |
+
def _handle_src(option: Option, opt_str: str, value: str, parser: OptionParser) -> None:
|
| 447 |
+
value = os.path.abspath(value)
|
| 448 |
+
setattr(parser.values, option.dest, value)
|
| 449 |
+
|
| 450 |
+
|
| 451 |
+
src: Callable[..., Option] = partial(
|
| 452 |
+
PipOption,
|
| 453 |
+
"--src",
|
| 454 |
+
"--source",
|
| 455 |
+
"--source-dir",
|
| 456 |
+
"--source-directory",
|
| 457 |
+
dest="src_dir",
|
| 458 |
+
type="path",
|
| 459 |
+
metavar="dir",
|
| 460 |
+
default=get_src_prefix(),
|
| 461 |
+
action="callback",
|
| 462 |
+
callback=_handle_src,
|
| 463 |
+
help="Directory to check out editable projects into. "
|
| 464 |
+
'The default in a virtualenv is "<venv path>/src". '
|
| 465 |
+
'The default for global installs is "<current dir>/src".',
|
| 466 |
+
)
|
| 467 |
+
|
| 468 |
+
|
| 469 |
+
def _get_format_control(values: Values, option: Option) -> Any:
|
| 470 |
+
"""Get a format_control object."""
|
| 471 |
+
return getattr(values, option.dest)
|
| 472 |
+
|
| 473 |
+
|
| 474 |
+
def _handle_no_binary(
|
| 475 |
+
option: Option, opt_str: str, value: str, parser: OptionParser
|
| 476 |
+
) -> None:
|
| 477 |
+
existing = _get_format_control(parser.values, option)
|
| 478 |
+
FormatControl.handle_mutual_excludes(
|
| 479 |
+
value,
|
| 480 |
+
existing.no_binary,
|
| 481 |
+
existing.only_binary,
|
| 482 |
+
)
|
| 483 |
+
|
| 484 |
+
|
| 485 |
+
def _handle_only_binary(
|
| 486 |
+
option: Option, opt_str: str, value: str, parser: OptionParser
|
| 487 |
+
) -> None:
|
| 488 |
+
existing = _get_format_control(parser.values, option)
|
| 489 |
+
FormatControl.handle_mutual_excludes(
|
| 490 |
+
value,
|
| 491 |
+
existing.only_binary,
|
| 492 |
+
existing.no_binary,
|
| 493 |
+
)
|
| 494 |
+
|
| 495 |
+
|
| 496 |
+
def no_binary() -> Option:
|
| 497 |
+
format_control = FormatControl(set(), set())
|
| 498 |
+
return Option(
|
| 499 |
+
"--no-binary",
|
| 500 |
+
dest="format_control",
|
| 501 |
+
action="callback",
|
| 502 |
+
callback=_handle_no_binary,
|
| 503 |
+
type="str",
|
| 504 |
+
default=format_control,
|
| 505 |
+
help="Do not use binary packages. Can be supplied multiple times, and "
|
| 506 |
+
'each time adds to the existing value. Accepts either ":all:" to '
|
| 507 |
+
'disable all binary packages, ":none:" to empty the set (notice '
|
| 508 |
+
"the colons), or one or more package names with commas between "
|
| 509 |
+
"them (no colons). Note that some packages are tricky to compile "
|
| 510 |
+
"and may fail to install when this option is used on them.",
|
| 511 |
+
)
|
| 512 |
+
|
| 513 |
+
|
| 514 |
+
def only_binary() -> Option:
|
| 515 |
+
format_control = FormatControl(set(), set())
|
| 516 |
+
return Option(
|
| 517 |
+
"--only-binary",
|
| 518 |
+
dest="format_control",
|
| 519 |
+
action="callback",
|
| 520 |
+
callback=_handle_only_binary,
|
| 521 |
+
type="str",
|
| 522 |
+
default=format_control,
|
| 523 |
+
help="Do not use source packages. Can be supplied multiple times, and "
|
| 524 |
+
'each time adds to the existing value. Accepts either ":all:" to '
|
| 525 |
+
'disable all source packages, ":none:" to empty the set, or one '
|
| 526 |
+
"or more package names with commas between them. Packages "
|
| 527 |
+
"without binary distributions will fail to install when this "
|
| 528 |
+
"option is used on them.",
|
| 529 |
+
)
|
| 530 |
+
|
| 531 |
+
|
| 532 |
+
platforms: Callable[..., Option] = partial(
|
| 533 |
+
Option,
|
| 534 |
+
"--platform",
|
| 535 |
+
dest="platforms",
|
| 536 |
+
metavar="platform",
|
| 537 |
+
action="append",
|
| 538 |
+
default=None,
|
| 539 |
+
help=(
|
| 540 |
+
"Only use wheels compatible with <platform>. Defaults to the "
|
| 541 |
+
"platform of the running system. Use this option multiple times to "
|
| 542 |
+
"specify multiple platforms supported by the target interpreter."
|
| 543 |
+
),
|
| 544 |
+
)
|
| 545 |
+
|
| 546 |
+
|
| 547 |
+
# This was made a separate function for unit-testing purposes.
|
| 548 |
+
def _convert_python_version(value: str) -> Tuple[Tuple[int, ...], Optional[str]]:
|
| 549 |
+
"""
|
| 550 |
+
Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
|
| 551 |
+
|
| 552 |
+
:return: A 2-tuple (version_info, error_msg), where `error_msg` is
|
| 553 |
+
non-None if and only if there was a parsing error.
|
| 554 |
+
"""
|
| 555 |
+
if not value:
|
| 556 |
+
# The empty string is the same as not providing a value.
|
| 557 |
+
return (None, None)
|
| 558 |
+
|
| 559 |
+
parts = value.split(".")
|
| 560 |
+
if len(parts) > 3:
|
| 561 |
+
return ((), "at most three version parts are allowed")
|
| 562 |
+
|
| 563 |
+
if len(parts) == 1:
|
| 564 |
+
# Then we are in the case of "3" or "37".
|
| 565 |
+
value = parts[0]
|
| 566 |
+
if len(value) > 1:
|
| 567 |
+
parts = [value[0], value[1:]]
|
| 568 |
+
|
| 569 |
+
try:
|
| 570 |
+
version_info = tuple(int(part) for part in parts)
|
| 571 |
+
except ValueError:
|
| 572 |
+
return ((), "each version part must be an integer")
|
| 573 |
+
|
| 574 |
+
return (version_info, None)
|
| 575 |
+
|
| 576 |
+
|
| 577 |
+
def _handle_python_version(
|
| 578 |
+
option: Option, opt_str: str, value: str, parser: OptionParser
|
| 579 |
+
) -> None:
|
| 580 |
+
"""
|
| 581 |
+
Handle a provided --python-version value.
|
| 582 |
+
"""
|
| 583 |
+
version_info, error_msg = _convert_python_version(value)
|
| 584 |
+
if error_msg is not None:
|
| 585 |
+
msg = f"invalid --python-version value: {value!r}: {error_msg}"
|
| 586 |
+
raise_option_error(parser, option=option, msg=msg)
|
| 587 |
+
|
| 588 |
+
parser.values.python_version = version_info
|
| 589 |
+
|
| 590 |
+
|
| 591 |
+
python_version: Callable[..., Option] = partial(
|
| 592 |
+
Option,
|
| 593 |
+
"--python-version",
|
| 594 |
+
dest="python_version",
|
| 595 |
+
metavar="python_version",
|
| 596 |
+
action="callback",
|
| 597 |
+
callback=_handle_python_version,
|
| 598 |
+
type="str",
|
| 599 |
+
default=None,
|
| 600 |
+
help=dedent(
|
| 601 |
+
"""\
|
| 602 |
+
The Python interpreter version to use for wheel and "Requires-Python"
|
| 603 |
+
compatibility checks. Defaults to a version derived from the running
|
| 604 |
+
interpreter. The version can be specified using up to three dot-separated
|
| 605 |
+
integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
|
| 606 |
+
version can also be given as a string without dots (e.g. "37" for 3.7.0).
|
| 607 |
+
"""
|
| 608 |
+
),
|
| 609 |
+
)
|
| 610 |
+
|
| 611 |
+
|
| 612 |
+
implementation: Callable[..., Option] = partial(
|
| 613 |
+
Option,
|
| 614 |
+
"--implementation",
|
| 615 |
+
dest="implementation",
|
| 616 |
+
metavar="implementation",
|
| 617 |
+
default=None,
|
| 618 |
+
help=(
|
| 619 |
+
"Only use wheels compatible with Python "
|
| 620 |
+
"implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
|
| 621 |
+
" or 'ip'. If not specified, then the current "
|
| 622 |
+
"interpreter implementation is used. Use 'py' to force "
|
| 623 |
+
"implementation-agnostic wheels."
|
| 624 |
+
),
|
| 625 |
+
)
|
| 626 |
+
|
| 627 |
+
|
| 628 |
+
abis: Callable[..., Option] = partial(
|
| 629 |
+
Option,
|
| 630 |
+
"--abi",
|
| 631 |
+
dest="abis",
|
| 632 |
+
metavar="abi",
|
| 633 |
+
action="append",
|
| 634 |
+
default=None,
|
| 635 |
+
help=(
|
| 636 |
+
"Only use wheels compatible with Python abi <abi>, e.g. 'pypy_41'. "
|
| 637 |
+
"If not specified, then the current interpreter abi tag is used. "
|
| 638 |
+
"Use this option multiple times to specify multiple abis supported "
|
| 639 |
+
"by the target interpreter. Generally you will need to specify "
|
| 640 |
+
"--implementation, --platform, and --python-version when using this "
|
| 641 |
+
"option."
|
| 642 |
+
),
|
| 643 |
+
)
|
| 644 |
+
|
| 645 |
+
|
| 646 |
+
def add_target_python_options(cmd_opts: OptionGroup) -> None:
|
| 647 |
+
cmd_opts.add_option(platforms())
|
| 648 |
+
cmd_opts.add_option(python_version())
|
| 649 |
+
cmd_opts.add_option(implementation())
|
| 650 |
+
cmd_opts.add_option(abis())
|
| 651 |
+
|
| 652 |
+
|
| 653 |
+
def make_target_python(options: Values) -> TargetPython:
|
| 654 |
+
target_python = TargetPython(
|
| 655 |
+
platforms=options.platforms,
|
| 656 |
+
py_version_info=options.python_version,
|
| 657 |
+
abis=options.abis,
|
| 658 |
+
implementation=options.implementation,
|
| 659 |
+
)
|
| 660 |
+
|
| 661 |
+
return target_python
|
| 662 |
+
|
| 663 |
+
|
| 664 |
+
def prefer_binary() -> Option:
|
| 665 |
+
return Option(
|
| 666 |
+
"--prefer-binary",
|
| 667 |
+
dest="prefer_binary",
|
| 668 |
+
action="store_true",
|
| 669 |
+
default=False,
|
| 670 |
+
help=(
|
| 671 |
+
"Prefer binary packages over source packages, even if the "
|
| 672 |
+
"source packages are newer."
|
| 673 |
+
),
|
| 674 |
+
)
|
| 675 |
+
|
| 676 |
+
|
| 677 |
+
cache_dir: Callable[..., Option] = partial(
|
| 678 |
+
PipOption,
|
| 679 |
+
"--cache-dir",
|
| 680 |
+
dest="cache_dir",
|
| 681 |
+
default=USER_CACHE_DIR,
|
| 682 |
+
metavar="dir",
|
| 683 |
+
type="path",
|
| 684 |
+
help="Store the cache data in <dir>.",
|
| 685 |
+
)
|
| 686 |
+
|
| 687 |
+
|
| 688 |
+
def _handle_no_cache_dir(
|
| 689 |
+
option: Option, opt: str, value: str, parser: OptionParser
|
| 690 |
+
) -> None:
|
| 691 |
+
"""
|
| 692 |
+
Process a value provided for the --no-cache-dir option.
|
| 693 |
+
|
| 694 |
+
This is an optparse.Option callback for the --no-cache-dir option.
|
| 695 |
+
"""
|
| 696 |
+
# The value argument will be None if --no-cache-dir is passed via the
|
| 697 |
+
# command-line, since the option doesn't accept arguments. However,
|
| 698 |
+
# the value can be non-None if the option is triggered e.g. by an
|
| 699 |
+
# environment variable, like PIP_NO_CACHE_DIR=true.
|
| 700 |
+
if value is not None:
|
| 701 |
+
# Then parse the string value to get argument error-checking.
|
| 702 |
+
try:
|
| 703 |
+
strtobool(value)
|
| 704 |
+
except ValueError as exc:
|
| 705 |
+
raise_option_error(parser, option=option, msg=str(exc))
|
| 706 |
+
|
| 707 |
+
# Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
|
| 708 |
+
# converted to 0 (like "false" or "no") caused cache_dir to be disabled
|
| 709 |
+
# rather than enabled (logic would say the latter). Thus, we disable
|
| 710 |
+
# the cache directory not just on values that parse to True, but (for
|
| 711 |
+
# backwards compatibility reasons) also on values that parse to False.
|
| 712 |
+
# In other words, always set it to False if the option is provided in
|
| 713 |
+
# some (valid) form.
|
| 714 |
+
parser.values.cache_dir = False
|
| 715 |
+
|
| 716 |
+
|
| 717 |
+
no_cache: Callable[..., Option] = partial(
|
| 718 |
+
Option,
|
| 719 |
+
"--no-cache-dir",
|
| 720 |
+
dest="cache_dir",
|
| 721 |
+
action="callback",
|
| 722 |
+
callback=_handle_no_cache_dir,
|
| 723 |
+
help="Disable the cache.",
|
| 724 |
+
)
|
| 725 |
+
|
| 726 |
+
no_deps: Callable[..., Option] = partial(
|
| 727 |
+
Option,
|
| 728 |
+
"--no-deps",
|
| 729 |
+
"--no-dependencies",
|
| 730 |
+
dest="ignore_dependencies",
|
| 731 |
+
action="store_true",
|
| 732 |
+
default=False,
|
| 733 |
+
help="Don't install package dependencies.",
|
| 734 |
+
)
|
| 735 |
+
|
| 736 |
+
ignore_requires_python: Callable[..., Option] = partial(
|
| 737 |
+
Option,
|
| 738 |
+
"--ignore-requires-python",
|
| 739 |
+
dest="ignore_requires_python",
|
| 740 |
+
action="store_true",
|
| 741 |
+
help="Ignore the Requires-Python information.",
|
| 742 |
+
)
|
| 743 |
+
|
| 744 |
+
no_build_isolation: Callable[..., Option] = partial(
|
| 745 |
+
Option,
|
| 746 |
+
"--no-build-isolation",
|
| 747 |
+
dest="build_isolation",
|
| 748 |
+
action="store_false",
|
| 749 |
+
default=True,
|
| 750 |
+
help="Disable isolation when building a modern source distribution. "
|
| 751 |
+
"Build dependencies specified by PEP 518 must be already installed "
|
| 752 |
+
"if this option is used.",
|
| 753 |
+
)
|
| 754 |
+
|
| 755 |
+
check_build_deps: Callable[..., Option] = partial(
|
| 756 |
+
Option,
|
| 757 |
+
"--check-build-dependencies",
|
| 758 |
+
dest="check_build_deps",
|
| 759 |
+
action="store_true",
|
| 760 |
+
default=False,
|
| 761 |
+
help="Check the build dependencies when PEP517 is used.",
|
| 762 |
+
)
|
| 763 |
+
|
| 764 |
+
|
| 765 |
+
def _handle_no_use_pep517(
|
| 766 |
+
option: Option, opt: str, value: str, parser: OptionParser
|
| 767 |
+
) -> None:
|
| 768 |
+
"""
|
| 769 |
+
Process a value provided for the --no-use-pep517 option.
|
| 770 |
+
|
| 771 |
+
This is an optparse.Option callback for the no_use_pep517 option.
|
| 772 |
+
"""
|
| 773 |
+
# Since --no-use-pep517 doesn't accept arguments, the value argument
|
| 774 |
+
# will be None if --no-use-pep517 is passed via the command-line.
|
| 775 |
+
# However, the value can be non-None if the option is triggered e.g.
|
| 776 |
+
# by an environment variable, for example "PIP_NO_USE_PEP517=true".
|
| 777 |
+
if value is not None:
|
| 778 |
+
msg = """A value was passed for --no-use-pep517,
|
| 779 |
+
probably using either the PIP_NO_USE_PEP517 environment variable
|
| 780 |
+
or the "no-use-pep517" config file option. Use an appropriate value
|
| 781 |
+
of the PIP_USE_PEP517 environment variable or the "use-pep517"
|
| 782 |
+
config file option instead.
|
| 783 |
+
"""
|
| 784 |
+
raise_option_error(parser, option=option, msg=msg)
|
| 785 |
+
|
| 786 |
+
# If user doesn't wish to use pep517, we check if setuptools and wheel are installed
|
| 787 |
+
# and raise error if it is not.
|
| 788 |
+
packages = ("setuptools", "wheel")
|
| 789 |
+
if not all(importlib.util.find_spec(package) for package in packages):
|
| 790 |
+
msg = (
|
| 791 |
+
f"It is not possible to use --no-use-pep517 "
|
| 792 |
+
f"without {' and '.join(packages)} installed."
|
| 793 |
+
)
|
| 794 |
+
raise_option_error(parser, option=option, msg=msg)
|
| 795 |
+
|
| 796 |
+
# Otherwise, --no-use-pep517 was passed via the command-line.
|
| 797 |
+
parser.values.use_pep517 = False
|
| 798 |
+
|
| 799 |
+
|
| 800 |
+
use_pep517: Any = partial(
|
| 801 |
+
Option,
|
| 802 |
+
"--use-pep517",
|
| 803 |
+
dest="use_pep517",
|
| 804 |
+
action="store_true",
|
| 805 |
+
default=None,
|
| 806 |
+
help="Use PEP 517 for building source distributions "
|
| 807 |
+
"(use --no-use-pep517 to force legacy behaviour).",
|
| 808 |
+
)
|
| 809 |
+
|
| 810 |
+
no_use_pep517: Any = partial(
|
| 811 |
+
Option,
|
| 812 |
+
"--no-use-pep517",
|
| 813 |
+
dest="use_pep517",
|
| 814 |
+
action="callback",
|
| 815 |
+
callback=_handle_no_use_pep517,
|
| 816 |
+
default=None,
|
| 817 |
+
help=SUPPRESS_HELP,
|
| 818 |
+
)
|
| 819 |
+
|
| 820 |
+
|
| 821 |
+
def _handle_config_settings(
|
| 822 |
+
option: Option, opt_str: str, value: str, parser: OptionParser
|
| 823 |
+
) -> None:
|
| 824 |
+
key, sep, val = value.partition("=")
|
| 825 |
+
if sep != "=":
|
| 826 |
+
parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL")
|
| 827 |
+
dest = getattr(parser.values, option.dest)
|
| 828 |
+
if dest is None:
|
| 829 |
+
dest = {}
|
| 830 |
+
setattr(parser.values, option.dest, dest)
|
| 831 |
+
if key in dest:
|
| 832 |
+
if isinstance(dest[key], list):
|
| 833 |
+
dest[key].append(val)
|
| 834 |
+
else:
|
| 835 |
+
dest[key] = [dest[key], val]
|
| 836 |
+
else:
|
| 837 |
+
dest[key] = val
|
| 838 |
+
|
| 839 |
+
|
| 840 |
+
config_settings: Callable[..., Option] = partial(
|
| 841 |
+
Option,
|
| 842 |
+
"-C",
|
| 843 |
+
"--config-settings",
|
| 844 |
+
dest="config_settings",
|
| 845 |
+
type=str,
|
| 846 |
+
action="callback",
|
| 847 |
+
callback=_handle_config_settings,
|
| 848 |
+
metavar="settings",
|
| 849 |
+
help="Configuration settings to be passed to the PEP 517 build backend. "
|
| 850 |
+
"Settings take the form KEY=VALUE. Use multiple --config-settings options "
|
| 851 |
+
"to pass multiple keys to the backend.",
|
| 852 |
+
)
|
| 853 |
+
|
| 854 |
+
build_options: Callable[..., Option] = partial(
|
| 855 |
+
Option,
|
| 856 |
+
"--build-option",
|
| 857 |
+
dest="build_options",
|
| 858 |
+
metavar="options",
|
| 859 |
+
action="append",
|
| 860 |
+
help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
|
| 861 |
+
)
|
| 862 |
+
|
| 863 |
+
global_options: Callable[..., Option] = partial(
|
| 864 |
+
Option,
|
| 865 |
+
"--global-option",
|
| 866 |
+
dest="global_options",
|
| 867 |
+
action="append",
|
| 868 |
+
metavar="options",
|
| 869 |
+
help="Extra global options to be supplied to the setup.py "
|
| 870 |
+
"call before the install or bdist_wheel command.",
|
| 871 |
+
)
|
| 872 |
+
|
| 873 |
+
no_clean: Callable[..., Option] = partial(
|
| 874 |
+
Option,
|
| 875 |
+
"--no-clean",
|
| 876 |
+
action="store_true",
|
| 877 |
+
default=False,
|
| 878 |
+
help="Don't clean up build directories.",
|
| 879 |
+
)
|
| 880 |
+
|
| 881 |
+
pre: Callable[..., Option] = partial(
|
| 882 |
+
Option,
|
| 883 |
+
"--pre",
|
| 884 |
+
action="store_true",
|
| 885 |
+
default=False,
|
| 886 |
+
help="Include pre-release and development versions. By default, "
|
| 887 |
+
"pip only finds stable versions.",
|
| 888 |
+
)
|
| 889 |
+
|
| 890 |
+
disable_pip_version_check: Callable[..., Option] = partial(
|
| 891 |
+
Option,
|
| 892 |
+
"--disable-pip-version-check",
|
| 893 |
+
dest="disable_pip_version_check",
|
| 894 |
+
action="store_true",
|
| 895 |
+
default=False,
|
| 896 |
+
help="Don't periodically check PyPI to determine whether a new version "
|
| 897 |
+
"of pip is available for download. Implied with --no-index.",
|
| 898 |
+
)
|
| 899 |
+
|
| 900 |
+
root_user_action: Callable[..., Option] = partial(
|
| 901 |
+
Option,
|
| 902 |
+
"--root-user-action",
|
| 903 |
+
dest="root_user_action",
|
| 904 |
+
default="warn",
|
| 905 |
+
choices=["warn", "ignore"],
|
| 906 |
+
help="Action if pip is run as a root user [warn, ignore] (default: warn)",
|
| 907 |
+
)
|
| 908 |
+
|
| 909 |
+
|
| 910 |
+
def _handle_merge_hash(
|
| 911 |
+
option: Option, opt_str: str, value: str, parser: OptionParser
|
| 912 |
+
) -> None:
|
| 913 |
+
"""Given a value spelled "algo:digest", append the digest to a list
|
| 914 |
+
pointed to in a dict by the algo name."""
|
| 915 |
+
if not parser.values.hashes:
|
| 916 |
+
parser.values.hashes = {}
|
| 917 |
+
try:
|
| 918 |
+
algo, digest = value.split(":", 1)
|
| 919 |
+
except ValueError:
|
| 920 |
+
parser.error(
|
| 921 |
+
f"Arguments to {opt_str} must be a hash name "
|
| 922 |
+
"followed by a value, like --hash=sha256:"
|
| 923 |
+
"abcde..."
|
| 924 |
+
)
|
| 925 |
+
if algo not in STRONG_HASHES:
|
| 926 |
+
parser.error(
|
| 927 |
+
"Allowed hash algorithms for {} are {}.".format(
|
| 928 |
+
opt_str, ", ".join(STRONG_HASHES)
|
| 929 |
+
)
|
| 930 |
+
)
|
| 931 |
+
parser.values.hashes.setdefault(algo, []).append(digest)
|
| 932 |
+
|
| 933 |
+
|
| 934 |
+
hash: Callable[..., Option] = partial(
|
| 935 |
+
Option,
|
| 936 |
+
"--hash",
|
| 937 |
+
# Hash values eventually end up in InstallRequirement.hashes due to
|
| 938 |
+
# __dict__ copying in process_line().
|
| 939 |
+
dest="hashes",
|
| 940 |
+
action="callback",
|
| 941 |
+
callback=_handle_merge_hash,
|
| 942 |
+
type="string",
|
| 943 |
+
help="Verify that the package's archive matches this "
|
| 944 |
+
"hash before installing. Example: --hash=sha256:abcdef...",
|
| 945 |
+
)
|
| 946 |
+
|
| 947 |
+
|
| 948 |
+
require_hashes: Callable[..., Option] = partial(
|
| 949 |
+
Option,
|
| 950 |
+
"--require-hashes",
|
| 951 |
+
dest="require_hashes",
|
| 952 |
+
action="store_true",
|
| 953 |
+
default=False,
|
| 954 |
+
help="Require a hash to check each requirement against, for "
|
| 955 |
+
"repeatable installs. This option is implied when any package in a "
|
| 956 |
+
"requirements file has a --hash option.",
|
| 957 |
+
)
|
| 958 |
+
|
| 959 |
+
|
| 960 |
+
list_path: Callable[..., Option] = partial(
|
| 961 |
+
PipOption,
|
| 962 |
+
"--path",
|
| 963 |
+
dest="path",
|
| 964 |
+
type="path",
|
| 965 |
+
action="append",
|
| 966 |
+
help="Restrict to the specified installation path for listing "
|
| 967 |
+
"packages (can be used multiple times).",
|
| 968 |
+
)
|
| 969 |
+
|
| 970 |
+
|
| 971 |
+
def check_list_path_option(options: Values) -> None:
|
| 972 |
+
if options.path and (options.user or options.local):
|
| 973 |
+
raise CommandError("Cannot combine '--path' with '--user' or '--local'")
|
| 974 |
+
|
| 975 |
+
|
| 976 |
+
list_exclude: Callable[..., Option] = partial(
|
| 977 |
+
PipOption,
|
| 978 |
+
"--exclude",
|
| 979 |
+
dest="excludes",
|
| 980 |
+
action="append",
|
| 981 |
+
metavar="package",
|
| 982 |
+
type="package_name",
|
| 983 |
+
help="Exclude specified package from the output",
|
| 984 |
+
)
|
| 985 |
+
|
| 986 |
+
|
| 987 |
+
no_python_version_warning: Callable[..., Option] = partial(
|
| 988 |
+
Option,
|
| 989 |
+
"--no-python-version-warning",
|
| 990 |
+
dest="no_python_version_warning",
|
| 991 |
+
action="store_true",
|
| 992 |
+
default=False,
|
| 993 |
+
help="Silence deprecation warnings for upcoming unsupported Pythons.",
|
| 994 |
+
)
|
| 995 |
+
|
| 996 |
+
|
| 997 |
+
# Features that are now always on. A warning is printed if they are used.
|
| 998 |
+
ALWAYS_ENABLED_FEATURES = [
|
| 999 |
+
"truststore", # always on since 24.2
|
| 1000 |
+
"no-binary-enable-wheel-cache", # always on since 23.1
|
| 1001 |
+
]
|
| 1002 |
+
|
| 1003 |
+
use_new_feature: Callable[..., Option] = partial(
|
| 1004 |
+
Option,
|
| 1005 |
+
"--use-feature",
|
| 1006 |
+
dest="features_enabled",
|
| 1007 |
+
metavar="feature",
|
| 1008 |
+
action="append",
|
| 1009 |
+
default=[],
|
| 1010 |
+
choices=[
|
| 1011 |
+
"fast-deps",
|
| 1012 |
+
]
|
| 1013 |
+
+ ALWAYS_ENABLED_FEATURES,
|
| 1014 |
+
help="Enable new functionality, that may be backward incompatible.",
|
| 1015 |
+
)
|
| 1016 |
+
|
| 1017 |
+
use_deprecated_feature: Callable[..., Option] = partial(
|
| 1018 |
+
Option,
|
| 1019 |
+
"--use-deprecated",
|
| 1020 |
+
dest="deprecated_features_enabled",
|
| 1021 |
+
metavar="feature",
|
| 1022 |
+
action="append",
|
| 1023 |
+
default=[],
|
| 1024 |
+
choices=[
|
| 1025 |
+
"legacy-resolver",
|
| 1026 |
+
"legacy-certs",
|
| 1027 |
+
],
|
| 1028 |
+
help=("Enable deprecated functionality, that will be removed in the future."),
|
| 1029 |
+
)
|
| 1030 |
+
|
| 1031 |
+
|
| 1032 |
+
##########
|
| 1033 |
+
# groups #
|
| 1034 |
+
##########
|
| 1035 |
+
|
| 1036 |
+
general_group: Dict[str, Any] = {
|
| 1037 |
+
"name": "General Options",
|
| 1038 |
+
"options": [
|
| 1039 |
+
help_,
|
| 1040 |
+
debug_mode,
|
| 1041 |
+
isolated_mode,
|
| 1042 |
+
require_virtualenv,
|
| 1043 |
+
python,
|
| 1044 |
+
verbose,
|
| 1045 |
+
version,
|
| 1046 |
+
quiet,
|
| 1047 |
+
log,
|
| 1048 |
+
no_input,
|
| 1049 |
+
keyring_provider,
|
| 1050 |
+
proxy,
|
| 1051 |
+
retries,
|
| 1052 |
+
timeout,
|
| 1053 |
+
exists_action,
|
| 1054 |
+
trusted_host,
|
| 1055 |
+
cert,
|
| 1056 |
+
client_cert,
|
| 1057 |
+
cache_dir,
|
| 1058 |
+
no_cache,
|
| 1059 |
+
disable_pip_version_check,
|
| 1060 |
+
no_color,
|
| 1061 |
+
no_python_version_warning,
|
| 1062 |
+
use_new_feature,
|
| 1063 |
+
use_deprecated_feature,
|
| 1064 |
+
],
|
| 1065 |
+
}
|
| 1066 |
+
|
| 1067 |
+
index_group: Dict[str, Any] = {
|
| 1068 |
+
"name": "Package Index Options",
|
| 1069 |
+
"options": [
|
| 1070 |
+
index_url,
|
| 1071 |
+
extra_index_url,
|
| 1072 |
+
no_index,
|
| 1073 |
+
find_links,
|
| 1074 |
+
],
|
| 1075 |
+
}
|
llava/lib/python3.10/site-packages/pip/_internal/cli/command_context.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from contextlib import ExitStack, contextmanager
|
| 2 |
+
from typing import ContextManager, Generator, TypeVar
|
| 3 |
+
|
| 4 |
+
_T = TypeVar("_T", covariant=True)
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class CommandContextMixIn:
|
| 8 |
+
def __init__(self) -> None:
|
| 9 |
+
super().__init__()
|
| 10 |
+
self._in_main_context = False
|
| 11 |
+
self._main_context = ExitStack()
|
| 12 |
+
|
| 13 |
+
@contextmanager
|
| 14 |
+
def main_context(self) -> Generator[None, None, None]:
|
| 15 |
+
assert not self._in_main_context
|
| 16 |
+
|
| 17 |
+
self._in_main_context = True
|
| 18 |
+
try:
|
| 19 |
+
with self._main_context:
|
| 20 |
+
yield
|
| 21 |
+
finally:
|
| 22 |
+
self._in_main_context = False
|
| 23 |
+
|
| 24 |
+
def enter_context(self, context_provider: ContextManager[_T]) -> _T:
|
| 25 |
+
assert self._in_main_context
|
| 26 |
+
|
| 27 |
+
return self._main_context.enter_context(context_provider)
|
llava/lib/python3.10/site-packages/pip/_internal/cli/index_command.py
ADDED
|
@@ -0,0 +1,171 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Contains command classes which may interact with an index / the network.
|
| 3 |
+
|
| 4 |
+
Unlike its sister module, req_command, this module still uses lazy imports
|
| 5 |
+
so commands which don't always hit the network (e.g. list w/o --outdated or
|
| 6 |
+
--uptodate) don't need waste time importing PipSession and friends.
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
import logging
|
| 10 |
+
import os
|
| 11 |
+
import sys
|
| 12 |
+
from optparse import Values
|
| 13 |
+
from typing import TYPE_CHECKING, List, Optional
|
| 14 |
+
|
| 15 |
+
from pip._vendor import certifi
|
| 16 |
+
|
| 17 |
+
from pip._internal.cli.base_command import Command
|
| 18 |
+
from pip._internal.cli.command_context import CommandContextMixIn
|
| 19 |
+
|
| 20 |
+
if TYPE_CHECKING:
|
| 21 |
+
from ssl import SSLContext
|
| 22 |
+
|
| 23 |
+
from pip._internal.network.session import PipSession
|
| 24 |
+
|
| 25 |
+
logger = logging.getLogger(__name__)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def _create_truststore_ssl_context() -> Optional["SSLContext"]:
|
| 29 |
+
if sys.version_info < (3, 10):
|
| 30 |
+
logger.debug("Disabling truststore because Python version isn't 3.10+")
|
| 31 |
+
return None
|
| 32 |
+
|
| 33 |
+
try:
|
| 34 |
+
import ssl
|
| 35 |
+
except ImportError:
|
| 36 |
+
logger.warning("Disabling truststore since ssl support is missing")
|
| 37 |
+
return None
|
| 38 |
+
|
| 39 |
+
try:
|
| 40 |
+
from pip._vendor import truststore
|
| 41 |
+
except ImportError:
|
| 42 |
+
logger.warning("Disabling truststore because platform isn't supported")
|
| 43 |
+
return None
|
| 44 |
+
|
| 45 |
+
ctx = truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
| 46 |
+
ctx.load_verify_locations(certifi.where())
|
| 47 |
+
return ctx
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class SessionCommandMixin(CommandContextMixIn):
|
| 51 |
+
"""
|
| 52 |
+
A class mixin for command classes needing _build_session().
|
| 53 |
+
"""
|
| 54 |
+
|
| 55 |
+
def __init__(self) -> None:
|
| 56 |
+
super().__init__()
|
| 57 |
+
self._session: Optional[PipSession] = None
|
| 58 |
+
|
| 59 |
+
@classmethod
|
| 60 |
+
def _get_index_urls(cls, options: Values) -> Optional[List[str]]:
|
| 61 |
+
"""Return a list of index urls from user-provided options."""
|
| 62 |
+
index_urls = []
|
| 63 |
+
if not getattr(options, "no_index", False):
|
| 64 |
+
url = getattr(options, "index_url", None)
|
| 65 |
+
if url:
|
| 66 |
+
index_urls.append(url)
|
| 67 |
+
urls = getattr(options, "extra_index_urls", None)
|
| 68 |
+
if urls:
|
| 69 |
+
index_urls.extend(urls)
|
| 70 |
+
# Return None rather than an empty list
|
| 71 |
+
return index_urls or None
|
| 72 |
+
|
| 73 |
+
def get_default_session(self, options: Values) -> "PipSession":
|
| 74 |
+
"""Get a default-managed session."""
|
| 75 |
+
if self._session is None:
|
| 76 |
+
self._session = self.enter_context(self._build_session(options))
|
| 77 |
+
# there's no type annotation on requests.Session, so it's
|
| 78 |
+
# automatically ContextManager[Any] and self._session becomes Any,
|
| 79 |
+
# then https://github.com/python/mypy/issues/7696 kicks in
|
| 80 |
+
assert self._session is not None
|
| 81 |
+
return self._session
|
| 82 |
+
|
| 83 |
+
def _build_session(
|
| 84 |
+
self,
|
| 85 |
+
options: Values,
|
| 86 |
+
retries: Optional[int] = None,
|
| 87 |
+
timeout: Optional[int] = None,
|
| 88 |
+
) -> "PipSession":
|
| 89 |
+
from pip._internal.network.session import PipSession
|
| 90 |
+
|
| 91 |
+
cache_dir = options.cache_dir
|
| 92 |
+
assert not cache_dir or os.path.isabs(cache_dir)
|
| 93 |
+
|
| 94 |
+
if "legacy-certs" not in options.deprecated_features_enabled:
|
| 95 |
+
ssl_context = _create_truststore_ssl_context()
|
| 96 |
+
else:
|
| 97 |
+
ssl_context = None
|
| 98 |
+
|
| 99 |
+
session = PipSession(
|
| 100 |
+
cache=os.path.join(cache_dir, "http-v2") if cache_dir else None,
|
| 101 |
+
retries=retries if retries is not None else options.retries,
|
| 102 |
+
trusted_hosts=options.trusted_hosts,
|
| 103 |
+
index_urls=self._get_index_urls(options),
|
| 104 |
+
ssl_context=ssl_context,
|
| 105 |
+
)
|
| 106 |
+
|
| 107 |
+
# Handle custom ca-bundles from the user
|
| 108 |
+
if options.cert:
|
| 109 |
+
session.verify = options.cert
|
| 110 |
+
|
| 111 |
+
# Handle SSL client certificate
|
| 112 |
+
if options.client_cert:
|
| 113 |
+
session.cert = options.client_cert
|
| 114 |
+
|
| 115 |
+
# Handle timeouts
|
| 116 |
+
if options.timeout or timeout:
|
| 117 |
+
session.timeout = timeout if timeout is not None else options.timeout
|
| 118 |
+
|
| 119 |
+
# Handle configured proxies
|
| 120 |
+
if options.proxy:
|
| 121 |
+
session.proxies = {
|
| 122 |
+
"http": options.proxy,
|
| 123 |
+
"https": options.proxy,
|
| 124 |
+
}
|
| 125 |
+
session.trust_env = False
|
| 126 |
+
session.pip_proxy = options.proxy
|
| 127 |
+
|
| 128 |
+
# Determine if we can prompt the user for authentication or not
|
| 129 |
+
session.auth.prompting = not options.no_input
|
| 130 |
+
session.auth.keyring_provider = options.keyring_provider
|
| 131 |
+
|
| 132 |
+
return session
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def _pip_self_version_check(session: "PipSession", options: Values) -> None:
|
| 136 |
+
from pip._internal.self_outdated_check import pip_self_version_check as check
|
| 137 |
+
|
| 138 |
+
check(session, options)
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
class IndexGroupCommand(Command, SessionCommandMixin):
|
| 142 |
+
"""
|
| 143 |
+
Abstract base class for commands with the index_group options.
|
| 144 |
+
|
| 145 |
+
This also corresponds to the commands that permit the pip version check.
|
| 146 |
+
"""
|
| 147 |
+
|
| 148 |
+
def handle_pip_version_check(self, options: Values) -> None:
|
| 149 |
+
"""
|
| 150 |
+
Do the pip version check if not disabled.
|
| 151 |
+
|
| 152 |
+
This overrides the default behavior of not doing the check.
|
| 153 |
+
"""
|
| 154 |
+
# Make sure the index_group options are present.
|
| 155 |
+
assert hasattr(options, "no_index")
|
| 156 |
+
|
| 157 |
+
if options.disable_pip_version_check or options.no_index:
|
| 158 |
+
return
|
| 159 |
+
|
| 160 |
+
try:
|
| 161 |
+
# Otherwise, check if we're using the latest version of pip available.
|
| 162 |
+
session = self._build_session(
|
| 163 |
+
options,
|
| 164 |
+
retries=0,
|
| 165 |
+
timeout=min(5, options.timeout),
|
| 166 |
+
)
|
| 167 |
+
with session:
|
| 168 |
+
_pip_self_version_check(session, options)
|
| 169 |
+
except Exception:
|
| 170 |
+
logger.warning("There was an error checking the latest version of pip.")
|
| 171 |
+
logger.debug("See below for error", exc_info=True)
|
llava/lib/python3.10/site-packages/pip/_internal/cli/main.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Primary application entrypoint.
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import locale
|
| 5 |
+
import logging
|
| 6 |
+
import os
|
| 7 |
+
import sys
|
| 8 |
+
import warnings
|
| 9 |
+
from typing import List, Optional
|
| 10 |
+
|
| 11 |
+
from pip._internal.cli.autocompletion import autocomplete
|
| 12 |
+
from pip._internal.cli.main_parser import parse_command
|
| 13 |
+
from pip._internal.commands import create_command
|
| 14 |
+
from pip._internal.exceptions import PipError
|
| 15 |
+
from pip._internal.utils import deprecation
|
| 16 |
+
|
| 17 |
+
logger = logging.getLogger(__name__)
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
# Do not import and use main() directly! Using it directly is actively
|
| 21 |
+
# discouraged by pip's maintainers. The name, location and behavior of
|
| 22 |
+
# this function is subject to change, so calling it directly is not
|
| 23 |
+
# portable across different pip versions.
|
| 24 |
+
|
| 25 |
+
# In addition, running pip in-process is unsupported and unsafe. This is
|
| 26 |
+
# elaborated in detail at
|
| 27 |
+
# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program.
|
| 28 |
+
# That document also provides suggestions that should work for nearly
|
| 29 |
+
# all users that are considering importing and using main() directly.
|
| 30 |
+
|
| 31 |
+
# However, we know that certain users will still want to invoke pip
|
| 32 |
+
# in-process. If you understand and accept the implications of using pip
|
| 33 |
+
# in an unsupported manner, the best approach is to use runpy to avoid
|
| 34 |
+
# depending on the exact location of this entry point.
|
| 35 |
+
|
| 36 |
+
# The following example shows how to use runpy to invoke pip in that
|
| 37 |
+
# case:
|
| 38 |
+
#
|
| 39 |
+
# sys.argv = ["pip", your, args, here]
|
| 40 |
+
# runpy.run_module("pip", run_name="__main__")
|
| 41 |
+
#
|
| 42 |
+
# Note that this will exit the process after running, unlike a direct
|
| 43 |
+
# call to main. As it is not safe to do any processing after calling
|
| 44 |
+
# main, this should not be an issue in practice.
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def main(args: Optional[List[str]] = None) -> int:
|
| 48 |
+
if args is None:
|
| 49 |
+
args = sys.argv[1:]
|
| 50 |
+
|
| 51 |
+
# Suppress the pkg_resources deprecation warning
|
| 52 |
+
# Note - we use a module of .*pkg_resources to cover
|
| 53 |
+
# the normal case (pip._vendor.pkg_resources) and the
|
| 54 |
+
# devendored case (a bare pkg_resources)
|
| 55 |
+
warnings.filterwarnings(
|
| 56 |
+
action="ignore", category=DeprecationWarning, module=".*pkg_resources"
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
# Configure our deprecation warnings to be sent through loggers
|
| 60 |
+
deprecation.install_warning_logger()
|
| 61 |
+
|
| 62 |
+
autocomplete()
|
| 63 |
+
|
| 64 |
+
try:
|
| 65 |
+
cmd_name, cmd_args = parse_command(args)
|
| 66 |
+
except PipError as exc:
|
| 67 |
+
sys.stderr.write(f"ERROR: {exc}")
|
| 68 |
+
sys.stderr.write(os.linesep)
|
| 69 |
+
sys.exit(1)
|
| 70 |
+
|
| 71 |
+
# Needed for locale.getpreferredencoding(False) to work
|
| 72 |
+
# in pip._internal.utils.encoding.auto_decode
|
| 73 |
+
try:
|
| 74 |
+
locale.setlocale(locale.LC_ALL, "")
|
| 75 |
+
except locale.Error as e:
|
| 76 |
+
# setlocale can apparently crash if locale are uninitialized
|
| 77 |
+
logger.debug("Ignoring error %s when setting locale", e)
|
| 78 |
+
command = create_command(cmd_name, isolated=("--isolated" in cmd_args))
|
| 79 |
+
|
| 80 |
+
return command.main(cmd_args)
|
llava/lib/python3.10/site-packages/pip/_internal/cli/main_parser.py
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""A single place for constructing and exposing the main parser
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
import os
|
| 5 |
+
import subprocess
|
| 6 |
+
import sys
|
| 7 |
+
from typing import List, Optional, Tuple
|
| 8 |
+
|
| 9 |
+
from pip._internal.build_env import get_runnable_pip
|
| 10 |
+
from pip._internal.cli import cmdoptions
|
| 11 |
+
from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
|
| 12 |
+
from pip._internal.commands import commands_dict, get_similar_commands
|
| 13 |
+
from pip._internal.exceptions import CommandError
|
| 14 |
+
from pip._internal.utils.misc import get_pip_version, get_prog
|
| 15 |
+
|
| 16 |
+
__all__ = ["create_main_parser", "parse_command"]
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def create_main_parser() -> ConfigOptionParser:
|
| 20 |
+
"""Creates and returns the main parser for pip's CLI"""
|
| 21 |
+
|
| 22 |
+
parser = ConfigOptionParser(
|
| 23 |
+
usage="\n%prog <command> [options]",
|
| 24 |
+
add_help_option=False,
|
| 25 |
+
formatter=UpdatingDefaultsHelpFormatter(),
|
| 26 |
+
name="global",
|
| 27 |
+
prog=get_prog(),
|
| 28 |
+
)
|
| 29 |
+
parser.disable_interspersed_args()
|
| 30 |
+
|
| 31 |
+
parser.version = get_pip_version()
|
| 32 |
+
|
| 33 |
+
# add the general options
|
| 34 |
+
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
|
| 35 |
+
parser.add_option_group(gen_opts)
|
| 36 |
+
|
| 37 |
+
# so the help formatter knows
|
| 38 |
+
parser.main = True # type: ignore
|
| 39 |
+
|
| 40 |
+
# create command listing for description
|
| 41 |
+
description = [""] + [
|
| 42 |
+
f"{name:27} {command_info.summary}"
|
| 43 |
+
for name, command_info in commands_dict.items()
|
| 44 |
+
]
|
| 45 |
+
parser.description = "\n".join(description)
|
| 46 |
+
|
| 47 |
+
return parser
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def identify_python_interpreter(python: str) -> Optional[str]:
|
| 51 |
+
# If the named file exists, use it.
|
| 52 |
+
# If it's a directory, assume it's a virtual environment and
|
| 53 |
+
# look for the environment's Python executable.
|
| 54 |
+
if os.path.exists(python):
|
| 55 |
+
if os.path.isdir(python):
|
| 56 |
+
# bin/python for Unix, Scripts/python.exe for Windows
|
| 57 |
+
# Try both in case of odd cases like cygwin.
|
| 58 |
+
for exe in ("bin/python", "Scripts/python.exe"):
|
| 59 |
+
py = os.path.join(python, exe)
|
| 60 |
+
if os.path.exists(py):
|
| 61 |
+
return py
|
| 62 |
+
else:
|
| 63 |
+
return python
|
| 64 |
+
|
| 65 |
+
# Could not find the interpreter specified
|
| 66 |
+
return None
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def parse_command(args: List[str]) -> Tuple[str, List[str]]:
|
| 70 |
+
parser = create_main_parser()
|
| 71 |
+
|
| 72 |
+
# Note: parser calls disable_interspersed_args(), so the result of this
|
| 73 |
+
# call is to split the initial args into the general options before the
|
| 74 |
+
# subcommand and everything else.
|
| 75 |
+
# For example:
|
| 76 |
+
# args: ['--timeout=5', 'install', '--user', 'INITools']
|
| 77 |
+
# general_options: ['--timeout==5']
|
| 78 |
+
# args_else: ['install', '--user', 'INITools']
|
| 79 |
+
general_options, args_else = parser.parse_args(args)
|
| 80 |
+
|
| 81 |
+
# --python
|
| 82 |
+
if general_options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
|
| 83 |
+
# Re-invoke pip using the specified Python interpreter
|
| 84 |
+
interpreter = identify_python_interpreter(general_options.python)
|
| 85 |
+
if interpreter is None:
|
| 86 |
+
raise CommandError(
|
| 87 |
+
f"Could not locate Python interpreter {general_options.python}"
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
pip_cmd = [
|
| 91 |
+
interpreter,
|
| 92 |
+
get_runnable_pip(),
|
| 93 |
+
]
|
| 94 |
+
pip_cmd.extend(args)
|
| 95 |
+
|
| 96 |
+
# Set a flag so the child doesn't re-invoke itself, causing
|
| 97 |
+
# an infinite loop.
|
| 98 |
+
os.environ["_PIP_RUNNING_IN_SUBPROCESS"] = "1"
|
| 99 |
+
returncode = 0
|
| 100 |
+
try:
|
| 101 |
+
proc = subprocess.run(pip_cmd)
|
| 102 |
+
returncode = proc.returncode
|
| 103 |
+
except (subprocess.SubprocessError, OSError) as exc:
|
| 104 |
+
raise CommandError(f"Failed to run pip under {interpreter}: {exc}")
|
| 105 |
+
sys.exit(returncode)
|
| 106 |
+
|
| 107 |
+
# --version
|
| 108 |
+
if general_options.version:
|
| 109 |
+
sys.stdout.write(parser.version)
|
| 110 |
+
sys.stdout.write(os.linesep)
|
| 111 |
+
sys.exit()
|
| 112 |
+
|
| 113 |
+
# pip || pip help -> print_help()
|
| 114 |
+
if not args_else or (args_else[0] == "help" and len(args_else) == 1):
|
| 115 |
+
parser.print_help()
|
| 116 |
+
sys.exit()
|
| 117 |
+
|
| 118 |
+
# the subcommand name
|
| 119 |
+
cmd_name = args_else[0]
|
| 120 |
+
|
| 121 |
+
if cmd_name not in commands_dict:
|
| 122 |
+
guess = get_similar_commands(cmd_name)
|
| 123 |
+
|
| 124 |
+
msg = [f'unknown command "{cmd_name}"']
|
| 125 |
+
if guess:
|
| 126 |
+
msg.append(f'maybe you meant "{guess}"')
|
| 127 |
+
|
| 128 |
+
raise CommandError(" - ".join(msg))
|
| 129 |
+
|
| 130 |
+
# all the args without the subcommand
|
| 131 |
+
cmd_args = args[:]
|
| 132 |
+
cmd_args.remove(cmd_name)
|
| 133 |
+
|
| 134 |
+
return cmd_name, cmd_args
|
llava/lib/python3.10/site-packages/pip/_internal/cli/parser.py
ADDED
|
@@ -0,0 +1,294 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Base option parser setup"""
|
| 2 |
+
|
| 3 |
+
import logging
|
| 4 |
+
import optparse
|
| 5 |
+
import shutil
|
| 6 |
+
import sys
|
| 7 |
+
import textwrap
|
| 8 |
+
from contextlib import suppress
|
| 9 |
+
from typing import Any, Dict, Generator, List, NoReturn, Optional, Tuple
|
| 10 |
+
|
| 11 |
+
from pip._internal.cli.status_codes import UNKNOWN_ERROR
|
| 12 |
+
from pip._internal.configuration import Configuration, ConfigurationError
|
| 13 |
+
from pip._internal.utils.misc import redact_auth_from_url, strtobool
|
| 14 |
+
|
| 15 |
+
logger = logging.getLogger(__name__)
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
|
| 19 |
+
"""A prettier/less verbose help formatter for optparse."""
|
| 20 |
+
|
| 21 |
+
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
| 22 |
+
# help position must be aligned with __init__.parseopts.description
|
| 23 |
+
kwargs["max_help_position"] = 30
|
| 24 |
+
kwargs["indent_increment"] = 1
|
| 25 |
+
kwargs["width"] = shutil.get_terminal_size()[0] - 2
|
| 26 |
+
super().__init__(*args, **kwargs)
|
| 27 |
+
|
| 28 |
+
def format_option_strings(self, option: optparse.Option) -> str:
|
| 29 |
+
return self._format_option_strings(option)
|
| 30 |
+
|
| 31 |
+
def _format_option_strings(
|
| 32 |
+
self, option: optparse.Option, mvarfmt: str = " <{}>", optsep: str = ", "
|
| 33 |
+
) -> str:
|
| 34 |
+
"""
|
| 35 |
+
Return a comma-separated list of option strings and metavars.
|
| 36 |
+
|
| 37 |
+
:param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
|
| 38 |
+
:param mvarfmt: metavar format string
|
| 39 |
+
:param optsep: separator
|
| 40 |
+
"""
|
| 41 |
+
opts = []
|
| 42 |
+
|
| 43 |
+
if option._short_opts:
|
| 44 |
+
opts.append(option._short_opts[0])
|
| 45 |
+
if option._long_opts:
|
| 46 |
+
opts.append(option._long_opts[0])
|
| 47 |
+
if len(opts) > 1:
|
| 48 |
+
opts.insert(1, optsep)
|
| 49 |
+
|
| 50 |
+
if option.takes_value():
|
| 51 |
+
assert option.dest is not None
|
| 52 |
+
metavar = option.metavar or option.dest.lower()
|
| 53 |
+
opts.append(mvarfmt.format(metavar.lower()))
|
| 54 |
+
|
| 55 |
+
return "".join(opts)
|
| 56 |
+
|
| 57 |
+
def format_heading(self, heading: str) -> str:
|
| 58 |
+
if heading == "Options":
|
| 59 |
+
return ""
|
| 60 |
+
return heading + ":\n"
|
| 61 |
+
|
| 62 |
+
def format_usage(self, usage: str) -> str:
|
| 63 |
+
"""
|
| 64 |
+
Ensure there is only one newline between usage and the first heading
|
| 65 |
+
if there is no description.
|
| 66 |
+
"""
|
| 67 |
+
msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " "))
|
| 68 |
+
return msg
|
| 69 |
+
|
| 70 |
+
def format_description(self, description: Optional[str]) -> str:
|
| 71 |
+
# leave full control over description to us
|
| 72 |
+
if description:
|
| 73 |
+
if hasattr(self.parser, "main"):
|
| 74 |
+
label = "Commands"
|
| 75 |
+
else:
|
| 76 |
+
label = "Description"
|
| 77 |
+
# some doc strings have initial newlines, some don't
|
| 78 |
+
description = description.lstrip("\n")
|
| 79 |
+
# some doc strings have final newlines and spaces, some don't
|
| 80 |
+
description = description.rstrip()
|
| 81 |
+
# dedent, then reindent
|
| 82 |
+
description = self.indent_lines(textwrap.dedent(description), " ")
|
| 83 |
+
description = f"{label}:\n{description}\n"
|
| 84 |
+
return description
|
| 85 |
+
else:
|
| 86 |
+
return ""
|
| 87 |
+
|
| 88 |
+
def format_epilog(self, epilog: Optional[str]) -> str:
|
| 89 |
+
# leave full control over epilog to us
|
| 90 |
+
if epilog:
|
| 91 |
+
return epilog
|
| 92 |
+
else:
|
| 93 |
+
return ""
|
| 94 |
+
|
| 95 |
+
def indent_lines(self, text: str, indent: str) -> str:
|
| 96 |
+
new_lines = [indent + line for line in text.split("\n")]
|
| 97 |
+
return "\n".join(new_lines)
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
|
| 101 |
+
"""Custom help formatter for use in ConfigOptionParser.
|
| 102 |
+
|
| 103 |
+
This is updates the defaults before expanding them, allowing
|
| 104 |
+
them to show up correctly in the help listing.
|
| 105 |
+
|
| 106 |
+
Also redact auth from url type options
|
| 107 |
+
"""
|
| 108 |
+
|
| 109 |
+
def expand_default(self, option: optparse.Option) -> str:
|
| 110 |
+
default_values = None
|
| 111 |
+
if self.parser is not None:
|
| 112 |
+
assert isinstance(self.parser, ConfigOptionParser)
|
| 113 |
+
self.parser._update_defaults(self.parser.defaults)
|
| 114 |
+
assert option.dest is not None
|
| 115 |
+
default_values = self.parser.defaults.get(option.dest)
|
| 116 |
+
help_text = super().expand_default(option)
|
| 117 |
+
|
| 118 |
+
if default_values and option.metavar == "URL":
|
| 119 |
+
if isinstance(default_values, str):
|
| 120 |
+
default_values = [default_values]
|
| 121 |
+
|
| 122 |
+
# If its not a list, we should abort and just return the help text
|
| 123 |
+
if not isinstance(default_values, list):
|
| 124 |
+
default_values = []
|
| 125 |
+
|
| 126 |
+
for val in default_values:
|
| 127 |
+
help_text = help_text.replace(val, redact_auth_from_url(val))
|
| 128 |
+
|
| 129 |
+
return help_text
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
class CustomOptionParser(optparse.OptionParser):
|
| 133 |
+
def insert_option_group(
|
| 134 |
+
self, idx: int, *args: Any, **kwargs: Any
|
| 135 |
+
) -> optparse.OptionGroup:
|
| 136 |
+
"""Insert an OptionGroup at a given position."""
|
| 137 |
+
group = self.add_option_group(*args, **kwargs)
|
| 138 |
+
|
| 139 |
+
self.option_groups.pop()
|
| 140 |
+
self.option_groups.insert(idx, group)
|
| 141 |
+
|
| 142 |
+
return group
|
| 143 |
+
|
| 144 |
+
@property
|
| 145 |
+
def option_list_all(self) -> List[optparse.Option]:
|
| 146 |
+
"""Get a list of all options, including those in option groups."""
|
| 147 |
+
res = self.option_list[:]
|
| 148 |
+
for i in self.option_groups:
|
| 149 |
+
res.extend(i.option_list)
|
| 150 |
+
|
| 151 |
+
return res
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
class ConfigOptionParser(CustomOptionParser):
|
| 155 |
+
"""Custom option parser which updates its defaults by checking the
|
| 156 |
+
configuration files and environmental variables"""
|
| 157 |
+
|
| 158 |
+
def __init__(
|
| 159 |
+
self,
|
| 160 |
+
*args: Any,
|
| 161 |
+
name: str,
|
| 162 |
+
isolated: bool = False,
|
| 163 |
+
**kwargs: Any,
|
| 164 |
+
) -> None:
|
| 165 |
+
self.name = name
|
| 166 |
+
self.config = Configuration(isolated)
|
| 167 |
+
|
| 168 |
+
assert self.name
|
| 169 |
+
super().__init__(*args, **kwargs)
|
| 170 |
+
|
| 171 |
+
def check_default(self, option: optparse.Option, key: str, val: Any) -> Any:
|
| 172 |
+
try:
|
| 173 |
+
return option.check_value(key, val)
|
| 174 |
+
except optparse.OptionValueError as exc:
|
| 175 |
+
print(f"An error occurred during configuration: {exc}")
|
| 176 |
+
sys.exit(3)
|
| 177 |
+
|
| 178 |
+
def _get_ordered_configuration_items(
|
| 179 |
+
self,
|
| 180 |
+
) -> Generator[Tuple[str, Any], None, None]:
|
| 181 |
+
# Configuration gives keys in an unordered manner. Order them.
|
| 182 |
+
override_order = ["global", self.name, ":env:"]
|
| 183 |
+
|
| 184 |
+
# Pool the options into different groups
|
| 185 |
+
section_items: Dict[str, List[Tuple[str, Any]]] = {
|
| 186 |
+
name: [] for name in override_order
|
| 187 |
+
}
|
| 188 |
+
for section_key, val in self.config.items():
|
| 189 |
+
# ignore empty values
|
| 190 |
+
if not val:
|
| 191 |
+
logger.debug(
|
| 192 |
+
"Ignoring configuration key '%s' as it's value is empty.",
|
| 193 |
+
section_key,
|
| 194 |
+
)
|
| 195 |
+
continue
|
| 196 |
+
|
| 197 |
+
section, key = section_key.split(".", 1)
|
| 198 |
+
if section in override_order:
|
| 199 |
+
section_items[section].append((key, val))
|
| 200 |
+
|
| 201 |
+
# Yield each group in their override order
|
| 202 |
+
for section in override_order:
|
| 203 |
+
for key, val in section_items[section]:
|
| 204 |
+
yield key, val
|
| 205 |
+
|
| 206 |
+
def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]:
|
| 207 |
+
"""Updates the given defaults with values from the config files and
|
| 208 |
+
the environ. Does a little special handling for certain types of
|
| 209 |
+
options (lists)."""
|
| 210 |
+
|
| 211 |
+
# Accumulate complex default state.
|
| 212 |
+
self.values = optparse.Values(self.defaults)
|
| 213 |
+
late_eval = set()
|
| 214 |
+
# Then set the options with those values
|
| 215 |
+
for key, val in self._get_ordered_configuration_items():
|
| 216 |
+
# '--' because configuration supports only long names
|
| 217 |
+
option = self.get_option("--" + key)
|
| 218 |
+
|
| 219 |
+
# Ignore options not present in this parser. E.g. non-globals put
|
| 220 |
+
# in [global] by users that want them to apply to all applicable
|
| 221 |
+
# commands.
|
| 222 |
+
if option is None:
|
| 223 |
+
continue
|
| 224 |
+
|
| 225 |
+
assert option.dest is not None
|
| 226 |
+
|
| 227 |
+
if option.action in ("store_true", "store_false"):
|
| 228 |
+
try:
|
| 229 |
+
val = strtobool(val)
|
| 230 |
+
except ValueError:
|
| 231 |
+
self.error(
|
| 232 |
+
f"{val} is not a valid value for {key} option, "
|
| 233 |
+
"please specify a boolean value like yes/no, "
|
| 234 |
+
"true/false or 1/0 instead."
|
| 235 |
+
)
|
| 236 |
+
elif option.action == "count":
|
| 237 |
+
with suppress(ValueError):
|
| 238 |
+
val = strtobool(val)
|
| 239 |
+
with suppress(ValueError):
|
| 240 |
+
val = int(val)
|
| 241 |
+
if not isinstance(val, int) or val < 0:
|
| 242 |
+
self.error(
|
| 243 |
+
f"{val} is not a valid value for {key} option, "
|
| 244 |
+
"please instead specify either a non-negative integer "
|
| 245 |
+
"or a boolean value like yes/no or false/true "
|
| 246 |
+
"which is equivalent to 1/0."
|
| 247 |
+
)
|
| 248 |
+
elif option.action == "append":
|
| 249 |
+
val = val.split()
|
| 250 |
+
val = [self.check_default(option, key, v) for v in val]
|
| 251 |
+
elif option.action == "callback":
|
| 252 |
+
assert option.callback is not None
|
| 253 |
+
late_eval.add(option.dest)
|
| 254 |
+
opt_str = option.get_opt_string()
|
| 255 |
+
val = option.convert_value(opt_str, val)
|
| 256 |
+
# From take_action
|
| 257 |
+
args = option.callback_args or ()
|
| 258 |
+
kwargs = option.callback_kwargs or {}
|
| 259 |
+
option.callback(option, opt_str, val, self, *args, **kwargs)
|
| 260 |
+
else:
|
| 261 |
+
val = self.check_default(option, key, val)
|
| 262 |
+
|
| 263 |
+
defaults[option.dest] = val
|
| 264 |
+
|
| 265 |
+
for key in late_eval:
|
| 266 |
+
defaults[key] = getattr(self.values, key)
|
| 267 |
+
self.values = None
|
| 268 |
+
return defaults
|
| 269 |
+
|
| 270 |
+
def get_default_values(self) -> optparse.Values:
|
| 271 |
+
"""Overriding to make updating the defaults after instantiation of
|
| 272 |
+
the option parser possible, _update_defaults() does the dirty work."""
|
| 273 |
+
if not self.process_default_values:
|
| 274 |
+
# Old, pre-Optik 1.5 behaviour.
|
| 275 |
+
return optparse.Values(self.defaults)
|
| 276 |
+
|
| 277 |
+
# Load the configuration, or error out in case of an error
|
| 278 |
+
try:
|
| 279 |
+
self.config.load()
|
| 280 |
+
except ConfigurationError as err:
|
| 281 |
+
self.exit(UNKNOWN_ERROR, str(err))
|
| 282 |
+
|
| 283 |
+
defaults = self._update_defaults(self.defaults.copy()) # ours
|
| 284 |
+
for option in self._get_all_options():
|
| 285 |
+
assert option.dest is not None
|
| 286 |
+
default = defaults.get(option.dest)
|
| 287 |
+
if isinstance(default, str):
|
| 288 |
+
opt_str = option.get_opt_string()
|
| 289 |
+
defaults[option.dest] = option.check_value(opt_str, default)
|
| 290 |
+
return optparse.Values(defaults)
|
| 291 |
+
|
| 292 |
+
def error(self, msg: str) -> NoReturn:
|
| 293 |
+
self.print_usage(sys.stderr)
|
| 294 |
+
self.exit(UNKNOWN_ERROR, f"{msg}\n")
|
llava/lib/python3.10/site-packages/pip/_internal/cli/progress_bars.py
ADDED
|
@@ -0,0 +1,94 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import sys
|
| 3 |
+
from typing import Callable, Generator, Iterable, Iterator, Optional, Tuple
|
| 4 |
+
|
| 5 |
+
from pip._vendor.rich.progress import (
|
| 6 |
+
BarColumn,
|
| 7 |
+
DownloadColumn,
|
| 8 |
+
FileSizeColumn,
|
| 9 |
+
Progress,
|
| 10 |
+
ProgressColumn,
|
| 11 |
+
SpinnerColumn,
|
| 12 |
+
TextColumn,
|
| 13 |
+
TimeElapsedColumn,
|
| 14 |
+
TimeRemainingColumn,
|
| 15 |
+
TransferSpeedColumn,
|
| 16 |
+
)
|
| 17 |
+
|
| 18 |
+
from pip._internal.cli.spinners import RateLimiter
|
| 19 |
+
from pip._internal.utils.logging import get_indentation
|
| 20 |
+
|
| 21 |
+
DownloadProgressRenderer = Callable[[Iterable[bytes]], Iterator[bytes]]
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def _rich_progress_bar(
|
| 25 |
+
iterable: Iterable[bytes],
|
| 26 |
+
*,
|
| 27 |
+
bar_type: str,
|
| 28 |
+
size: Optional[int],
|
| 29 |
+
) -> Generator[bytes, None, None]:
|
| 30 |
+
assert bar_type == "on", "This should only be used in the default mode."
|
| 31 |
+
|
| 32 |
+
if not size:
|
| 33 |
+
total = float("inf")
|
| 34 |
+
columns: Tuple[ProgressColumn, ...] = (
|
| 35 |
+
TextColumn("[progress.description]{task.description}"),
|
| 36 |
+
SpinnerColumn("line", speed=1.5),
|
| 37 |
+
FileSizeColumn(),
|
| 38 |
+
TransferSpeedColumn(),
|
| 39 |
+
TimeElapsedColumn(),
|
| 40 |
+
)
|
| 41 |
+
else:
|
| 42 |
+
total = size
|
| 43 |
+
columns = (
|
| 44 |
+
TextColumn("[progress.description]{task.description}"),
|
| 45 |
+
BarColumn(),
|
| 46 |
+
DownloadColumn(),
|
| 47 |
+
TransferSpeedColumn(),
|
| 48 |
+
TextColumn("eta"),
|
| 49 |
+
TimeRemainingColumn(),
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
progress = Progress(*columns, refresh_per_second=5)
|
| 53 |
+
task_id = progress.add_task(" " * (get_indentation() + 2), total=total)
|
| 54 |
+
with progress:
|
| 55 |
+
for chunk in iterable:
|
| 56 |
+
yield chunk
|
| 57 |
+
progress.update(task_id, advance=len(chunk))
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def _raw_progress_bar(
|
| 61 |
+
iterable: Iterable[bytes],
|
| 62 |
+
*,
|
| 63 |
+
size: Optional[int],
|
| 64 |
+
) -> Generator[bytes, None, None]:
|
| 65 |
+
def write_progress(current: int, total: int) -> None:
|
| 66 |
+
sys.stdout.write(f"Progress {current} of {total}\n")
|
| 67 |
+
sys.stdout.flush()
|
| 68 |
+
|
| 69 |
+
current = 0
|
| 70 |
+
total = size or 0
|
| 71 |
+
rate_limiter = RateLimiter(0.25)
|
| 72 |
+
|
| 73 |
+
write_progress(current, total)
|
| 74 |
+
for chunk in iterable:
|
| 75 |
+
current += len(chunk)
|
| 76 |
+
if rate_limiter.ready() or current == total:
|
| 77 |
+
write_progress(current, total)
|
| 78 |
+
rate_limiter.reset()
|
| 79 |
+
yield chunk
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def get_download_progress_renderer(
|
| 83 |
+
*, bar_type: str, size: Optional[int] = None
|
| 84 |
+
) -> DownloadProgressRenderer:
|
| 85 |
+
"""Get an object that can be used to render the download progress.
|
| 86 |
+
|
| 87 |
+
Returns a callable, that takes an iterable to "wrap".
|
| 88 |
+
"""
|
| 89 |
+
if bar_type == "on":
|
| 90 |
+
return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size)
|
| 91 |
+
elif bar_type == "raw":
|
| 92 |
+
return functools.partial(_raw_progress_bar, size=size)
|
| 93 |
+
else:
|
| 94 |
+
return iter # no-op, when passed an iterator
|
llava/lib/python3.10/site-packages/pip/_internal/cli/req_command.py
ADDED
|
@@ -0,0 +1,329 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Contains the RequirementCommand base class.
|
| 2 |
+
|
| 3 |
+
This class is in a separate module so the commands that do not always
|
| 4 |
+
need PackageFinder capability don't unnecessarily import the
|
| 5 |
+
PackageFinder machinery and all its vendored dependencies, etc.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import logging
|
| 9 |
+
from functools import partial
|
| 10 |
+
from optparse import Values
|
| 11 |
+
from typing import Any, List, Optional, Tuple
|
| 12 |
+
|
| 13 |
+
from pip._internal.cache import WheelCache
|
| 14 |
+
from pip._internal.cli import cmdoptions
|
| 15 |
+
from pip._internal.cli.index_command import IndexGroupCommand
|
| 16 |
+
from pip._internal.cli.index_command import SessionCommandMixin as SessionCommandMixin
|
| 17 |
+
from pip._internal.exceptions import CommandError, PreviousBuildDirError
|
| 18 |
+
from pip._internal.index.collector import LinkCollector
|
| 19 |
+
from pip._internal.index.package_finder import PackageFinder
|
| 20 |
+
from pip._internal.models.selection_prefs import SelectionPreferences
|
| 21 |
+
from pip._internal.models.target_python import TargetPython
|
| 22 |
+
from pip._internal.network.session import PipSession
|
| 23 |
+
from pip._internal.operations.build.build_tracker import BuildTracker
|
| 24 |
+
from pip._internal.operations.prepare import RequirementPreparer
|
| 25 |
+
from pip._internal.req.constructors import (
|
| 26 |
+
install_req_from_editable,
|
| 27 |
+
install_req_from_line,
|
| 28 |
+
install_req_from_parsed_requirement,
|
| 29 |
+
install_req_from_req_string,
|
| 30 |
+
)
|
| 31 |
+
from pip._internal.req.req_file import parse_requirements
|
| 32 |
+
from pip._internal.req.req_install import InstallRequirement
|
| 33 |
+
from pip._internal.resolution.base import BaseResolver
|
| 34 |
+
from pip._internal.utils.temp_dir import (
|
| 35 |
+
TempDirectory,
|
| 36 |
+
TempDirectoryTypeRegistry,
|
| 37 |
+
tempdir_kinds,
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
logger = logging.getLogger(__name__)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
KEEPABLE_TEMPDIR_TYPES = [
|
| 44 |
+
tempdir_kinds.BUILD_ENV,
|
| 45 |
+
tempdir_kinds.EPHEM_WHEEL_CACHE,
|
| 46 |
+
tempdir_kinds.REQ_BUILD,
|
| 47 |
+
]
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def with_cleanup(func: Any) -> Any:
|
| 51 |
+
"""Decorator for common logic related to managing temporary
|
| 52 |
+
directories.
|
| 53 |
+
"""
|
| 54 |
+
|
| 55 |
+
def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None:
|
| 56 |
+
for t in KEEPABLE_TEMPDIR_TYPES:
|
| 57 |
+
registry.set_delete(t, False)
|
| 58 |
+
|
| 59 |
+
def wrapper(
|
| 60 |
+
self: RequirementCommand, options: Values, args: List[Any]
|
| 61 |
+
) -> Optional[int]:
|
| 62 |
+
assert self.tempdir_registry is not None
|
| 63 |
+
if options.no_clean:
|
| 64 |
+
configure_tempdir_registry(self.tempdir_registry)
|
| 65 |
+
|
| 66 |
+
try:
|
| 67 |
+
return func(self, options, args)
|
| 68 |
+
except PreviousBuildDirError:
|
| 69 |
+
# This kind of conflict can occur when the user passes an explicit
|
| 70 |
+
# build directory with a pre-existing folder. In that case we do
|
| 71 |
+
# not want to accidentally remove it.
|
| 72 |
+
configure_tempdir_registry(self.tempdir_registry)
|
| 73 |
+
raise
|
| 74 |
+
|
| 75 |
+
return wrapper
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
class RequirementCommand(IndexGroupCommand):
|
| 79 |
+
def __init__(self, *args: Any, **kw: Any) -> None:
|
| 80 |
+
super().__init__(*args, **kw)
|
| 81 |
+
|
| 82 |
+
self.cmd_opts.add_option(cmdoptions.no_clean())
|
| 83 |
+
|
| 84 |
+
@staticmethod
|
| 85 |
+
def determine_resolver_variant(options: Values) -> str:
|
| 86 |
+
"""Determines which resolver should be used, based on the given options."""
|
| 87 |
+
if "legacy-resolver" in options.deprecated_features_enabled:
|
| 88 |
+
return "legacy"
|
| 89 |
+
|
| 90 |
+
return "resolvelib"
|
| 91 |
+
|
| 92 |
+
@classmethod
|
| 93 |
+
def make_requirement_preparer(
|
| 94 |
+
cls,
|
| 95 |
+
temp_build_dir: TempDirectory,
|
| 96 |
+
options: Values,
|
| 97 |
+
build_tracker: BuildTracker,
|
| 98 |
+
session: PipSession,
|
| 99 |
+
finder: PackageFinder,
|
| 100 |
+
use_user_site: bool,
|
| 101 |
+
download_dir: Optional[str] = None,
|
| 102 |
+
verbosity: int = 0,
|
| 103 |
+
) -> RequirementPreparer:
|
| 104 |
+
"""
|
| 105 |
+
Create a RequirementPreparer instance for the given parameters.
|
| 106 |
+
"""
|
| 107 |
+
temp_build_dir_path = temp_build_dir.path
|
| 108 |
+
assert temp_build_dir_path is not None
|
| 109 |
+
legacy_resolver = False
|
| 110 |
+
|
| 111 |
+
resolver_variant = cls.determine_resolver_variant(options)
|
| 112 |
+
if resolver_variant == "resolvelib":
|
| 113 |
+
lazy_wheel = "fast-deps" in options.features_enabled
|
| 114 |
+
if lazy_wheel:
|
| 115 |
+
logger.warning(
|
| 116 |
+
"pip is using lazily downloaded wheels using HTTP "
|
| 117 |
+
"range requests to obtain dependency information. "
|
| 118 |
+
"This experimental feature is enabled through "
|
| 119 |
+
"--use-feature=fast-deps and it is not ready for "
|
| 120 |
+
"production."
|
| 121 |
+
)
|
| 122 |
+
else:
|
| 123 |
+
legacy_resolver = True
|
| 124 |
+
lazy_wheel = False
|
| 125 |
+
if "fast-deps" in options.features_enabled:
|
| 126 |
+
logger.warning(
|
| 127 |
+
"fast-deps has no effect when used with the legacy resolver."
|
| 128 |
+
)
|
| 129 |
+
|
| 130 |
+
return RequirementPreparer(
|
| 131 |
+
build_dir=temp_build_dir_path,
|
| 132 |
+
src_dir=options.src_dir,
|
| 133 |
+
download_dir=download_dir,
|
| 134 |
+
build_isolation=options.build_isolation,
|
| 135 |
+
check_build_deps=options.check_build_deps,
|
| 136 |
+
build_tracker=build_tracker,
|
| 137 |
+
session=session,
|
| 138 |
+
progress_bar=options.progress_bar,
|
| 139 |
+
finder=finder,
|
| 140 |
+
require_hashes=options.require_hashes,
|
| 141 |
+
use_user_site=use_user_site,
|
| 142 |
+
lazy_wheel=lazy_wheel,
|
| 143 |
+
verbosity=verbosity,
|
| 144 |
+
legacy_resolver=legacy_resolver,
|
| 145 |
+
)
|
| 146 |
+
|
| 147 |
+
@classmethod
|
| 148 |
+
def make_resolver(
|
| 149 |
+
cls,
|
| 150 |
+
preparer: RequirementPreparer,
|
| 151 |
+
finder: PackageFinder,
|
| 152 |
+
options: Values,
|
| 153 |
+
wheel_cache: Optional[WheelCache] = None,
|
| 154 |
+
use_user_site: bool = False,
|
| 155 |
+
ignore_installed: bool = True,
|
| 156 |
+
ignore_requires_python: bool = False,
|
| 157 |
+
force_reinstall: bool = False,
|
| 158 |
+
upgrade_strategy: str = "to-satisfy-only",
|
| 159 |
+
use_pep517: Optional[bool] = None,
|
| 160 |
+
py_version_info: Optional[Tuple[int, ...]] = None,
|
| 161 |
+
) -> BaseResolver:
|
| 162 |
+
"""
|
| 163 |
+
Create a Resolver instance for the given parameters.
|
| 164 |
+
"""
|
| 165 |
+
make_install_req = partial(
|
| 166 |
+
install_req_from_req_string,
|
| 167 |
+
isolated=options.isolated_mode,
|
| 168 |
+
use_pep517=use_pep517,
|
| 169 |
+
)
|
| 170 |
+
resolver_variant = cls.determine_resolver_variant(options)
|
| 171 |
+
# The long import name and duplicated invocation is needed to convince
|
| 172 |
+
# Mypy into correctly typechecking. Otherwise it would complain the
|
| 173 |
+
# "Resolver" class being redefined.
|
| 174 |
+
if resolver_variant == "resolvelib":
|
| 175 |
+
import pip._internal.resolution.resolvelib.resolver
|
| 176 |
+
|
| 177 |
+
return pip._internal.resolution.resolvelib.resolver.Resolver(
|
| 178 |
+
preparer=preparer,
|
| 179 |
+
finder=finder,
|
| 180 |
+
wheel_cache=wheel_cache,
|
| 181 |
+
make_install_req=make_install_req,
|
| 182 |
+
use_user_site=use_user_site,
|
| 183 |
+
ignore_dependencies=options.ignore_dependencies,
|
| 184 |
+
ignore_installed=ignore_installed,
|
| 185 |
+
ignore_requires_python=ignore_requires_python,
|
| 186 |
+
force_reinstall=force_reinstall,
|
| 187 |
+
upgrade_strategy=upgrade_strategy,
|
| 188 |
+
py_version_info=py_version_info,
|
| 189 |
+
)
|
| 190 |
+
import pip._internal.resolution.legacy.resolver
|
| 191 |
+
|
| 192 |
+
return pip._internal.resolution.legacy.resolver.Resolver(
|
| 193 |
+
preparer=preparer,
|
| 194 |
+
finder=finder,
|
| 195 |
+
wheel_cache=wheel_cache,
|
| 196 |
+
make_install_req=make_install_req,
|
| 197 |
+
use_user_site=use_user_site,
|
| 198 |
+
ignore_dependencies=options.ignore_dependencies,
|
| 199 |
+
ignore_installed=ignore_installed,
|
| 200 |
+
ignore_requires_python=ignore_requires_python,
|
| 201 |
+
force_reinstall=force_reinstall,
|
| 202 |
+
upgrade_strategy=upgrade_strategy,
|
| 203 |
+
py_version_info=py_version_info,
|
| 204 |
+
)
|
| 205 |
+
|
| 206 |
+
def get_requirements(
|
| 207 |
+
self,
|
| 208 |
+
args: List[str],
|
| 209 |
+
options: Values,
|
| 210 |
+
finder: PackageFinder,
|
| 211 |
+
session: PipSession,
|
| 212 |
+
) -> List[InstallRequirement]:
|
| 213 |
+
"""
|
| 214 |
+
Parse command-line arguments into the corresponding requirements.
|
| 215 |
+
"""
|
| 216 |
+
requirements: List[InstallRequirement] = []
|
| 217 |
+
for filename in options.constraints:
|
| 218 |
+
for parsed_req in parse_requirements(
|
| 219 |
+
filename,
|
| 220 |
+
constraint=True,
|
| 221 |
+
finder=finder,
|
| 222 |
+
options=options,
|
| 223 |
+
session=session,
|
| 224 |
+
):
|
| 225 |
+
req_to_add = install_req_from_parsed_requirement(
|
| 226 |
+
parsed_req,
|
| 227 |
+
isolated=options.isolated_mode,
|
| 228 |
+
user_supplied=False,
|
| 229 |
+
)
|
| 230 |
+
requirements.append(req_to_add)
|
| 231 |
+
|
| 232 |
+
for req in args:
|
| 233 |
+
req_to_add = install_req_from_line(
|
| 234 |
+
req,
|
| 235 |
+
comes_from=None,
|
| 236 |
+
isolated=options.isolated_mode,
|
| 237 |
+
use_pep517=options.use_pep517,
|
| 238 |
+
user_supplied=True,
|
| 239 |
+
config_settings=getattr(options, "config_settings", None),
|
| 240 |
+
)
|
| 241 |
+
requirements.append(req_to_add)
|
| 242 |
+
|
| 243 |
+
for req in options.editables:
|
| 244 |
+
req_to_add = install_req_from_editable(
|
| 245 |
+
req,
|
| 246 |
+
user_supplied=True,
|
| 247 |
+
isolated=options.isolated_mode,
|
| 248 |
+
use_pep517=options.use_pep517,
|
| 249 |
+
config_settings=getattr(options, "config_settings", None),
|
| 250 |
+
)
|
| 251 |
+
requirements.append(req_to_add)
|
| 252 |
+
|
| 253 |
+
# NOTE: options.require_hashes may be set if --require-hashes is True
|
| 254 |
+
for filename in options.requirements:
|
| 255 |
+
for parsed_req in parse_requirements(
|
| 256 |
+
filename, finder=finder, options=options, session=session
|
| 257 |
+
):
|
| 258 |
+
req_to_add = install_req_from_parsed_requirement(
|
| 259 |
+
parsed_req,
|
| 260 |
+
isolated=options.isolated_mode,
|
| 261 |
+
use_pep517=options.use_pep517,
|
| 262 |
+
user_supplied=True,
|
| 263 |
+
config_settings=(
|
| 264 |
+
parsed_req.options.get("config_settings")
|
| 265 |
+
if parsed_req.options
|
| 266 |
+
else None
|
| 267 |
+
),
|
| 268 |
+
)
|
| 269 |
+
requirements.append(req_to_add)
|
| 270 |
+
|
| 271 |
+
# If any requirement has hash options, enable hash checking.
|
| 272 |
+
if any(req.has_hash_options for req in requirements):
|
| 273 |
+
options.require_hashes = True
|
| 274 |
+
|
| 275 |
+
if not (args or options.editables or options.requirements):
|
| 276 |
+
opts = {"name": self.name}
|
| 277 |
+
if options.find_links:
|
| 278 |
+
raise CommandError(
|
| 279 |
+
"You must give at least one requirement to {name} "
|
| 280 |
+
'(maybe you meant "pip {name} {links}"?)'.format(
|
| 281 |
+
**dict(opts, links=" ".join(options.find_links))
|
| 282 |
+
)
|
| 283 |
+
)
|
| 284 |
+
else:
|
| 285 |
+
raise CommandError(
|
| 286 |
+
"You must give at least one requirement to {name} "
|
| 287 |
+
'(see "pip help {name}")'.format(**opts)
|
| 288 |
+
)
|
| 289 |
+
|
| 290 |
+
return requirements
|
| 291 |
+
|
| 292 |
+
@staticmethod
|
| 293 |
+
def trace_basic_info(finder: PackageFinder) -> None:
|
| 294 |
+
"""
|
| 295 |
+
Trace basic information about the provided objects.
|
| 296 |
+
"""
|
| 297 |
+
# Display where finder is looking for packages
|
| 298 |
+
search_scope = finder.search_scope
|
| 299 |
+
locations = search_scope.get_formatted_locations()
|
| 300 |
+
if locations:
|
| 301 |
+
logger.info(locations)
|
| 302 |
+
|
| 303 |
+
def _build_package_finder(
|
| 304 |
+
self,
|
| 305 |
+
options: Values,
|
| 306 |
+
session: PipSession,
|
| 307 |
+
target_python: Optional[TargetPython] = None,
|
| 308 |
+
ignore_requires_python: Optional[bool] = None,
|
| 309 |
+
) -> PackageFinder:
|
| 310 |
+
"""
|
| 311 |
+
Create a package finder appropriate to this requirement command.
|
| 312 |
+
|
| 313 |
+
:param ignore_requires_python: Whether to ignore incompatible
|
| 314 |
+
"Requires-Python" values in links. Defaults to False.
|
| 315 |
+
"""
|
| 316 |
+
link_collector = LinkCollector.create(session, options=options)
|
| 317 |
+
selection_prefs = SelectionPreferences(
|
| 318 |
+
allow_yanked=True,
|
| 319 |
+
format_control=options.format_control,
|
| 320 |
+
allow_all_prereleases=options.pre,
|
| 321 |
+
prefer_binary=options.prefer_binary,
|
| 322 |
+
ignore_requires_python=ignore_requires_python,
|
| 323 |
+
)
|
| 324 |
+
|
| 325 |
+
return PackageFinder.create(
|
| 326 |
+
link_collector=link_collector,
|
| 327 |
+
selection_prefs=selection_prefs,
|
| 328 |
+
target_python=target_python,
|
| 329 |
+
)
|
llava/lib/python3.10/site-packages/pip/_internal/cli/spinners.py
ADDED
|
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import itertools
|
| 3 |
+
import logging
|
| 4 |
+
import sys
|
| 5 |
+
import time
|
| 6 |
+
from typing import IO, Generator, Optional
|
| 7 |
+
|
| 8 |
+
from pip._internal.utils.compat import WINDOWS
|
| 9 |
+
from pip._internal.utils.logging import get_indentation
|
| 10 |
+
|
| 11 |
+
logger = logging.getLogger(__name__)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class SpinnerInterface:
|
| 15 |
+
def spin(self) -> None:
|
| 16 |
+
raise NotImplementedError()
|
| 17 |
+
|
| 18 |
+
def finish(self, final_status: str) -> None:
|
| 19 |
+
raise NotImplementedError()
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class InteractiveSpinner(SpinnerInterface):
|
| 23 |
+
def __init__(
|
| 24 |
+
self,
|
| 25 |
+
message: str,
|
| 26 |
+
file: Optional[IO[str]] = None,
|
| 27 |
+
spin_chars: str = "-\\|/",
|
| 28 |
+
# Empirically, 8 updates/second looks nice
|
| 29 |
+
min_update_interval_seconds: float = 0.125,
|
| 30 |
+
):
|
| 31 |
+
self._message = message
|
| 32 |
+
if file is None:
|
| 33 |
+
file = sys.stdout
|
| 34 |
+
self._file = file
|
| 35 |
+
self._rate_limiter = RateLimiter(min_update_interval_seconds)
|
| 36 |
+
self._finished = False
|
| 37 |
+
|
| 38 |
+
self._spin_cycle = itertools.cycle(spin_chars)
|
| 39 |
+
|
| 40 |
+
self._file.write(" " * get_indentation() + self._message + " ... ")
|
| 41 |
+
self._width = 0
|
| 42 |
+
|
| 43 |
+
def _write(self, status: str) -> None:
|
| 44 |
+
assert not self._finished
|
| 45 |
+
# Erase what we wrote before by backspacing to the beginning, writing
|
| 46 |
+
# spaces to overwrite the old text, and then backspacing again
|
| 47 |
+
backup = "\b" * self._width
|
| 48 |
+
self._file.write(backup + " " * self._width + backup)
|
| 49 |
+
# Now we have a blank slate to add our status
|
| 50 |
+
self._file.write(status)
|
| 51 |
+
self._width = len(status)
|
| 52 |
+
self._file.flush()
|
| 53 |
+
self._rate_limiter.reset()
|
| 54 |
+
|
| 55 |
+
def spin(self) -> None:
|
| 56 |
+
if self._finished:
|
| 57 |
+
return
|
| 58 |
+
if not self._rate_limiter.ready():
|
| 59 |
+
return
|
| 60 |
+
self._write(next(self._spin_cycle))
|
| 61 |
+
|
| 62 |
+
def finish(self, final_status: str) -> None:
|
| 63 |
+
if self._finished:
|
| 64 |
+
return
|
| 65 |
+
self._write(final_status)
|
| 66 |
+
self._file.write("\n")
|
| 67 |
+
self._file.flush()
|
| 68 |
+
self._finished = True
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
# Used for dumb terminals, non-interactive installs (no tty), etc.
|
| 72 |
+
# We still print updates occasionally (once every 60 seconds by default) to
|
| 73 |
+
# act as a keep-alive for systems like Travis-CI that take lack-of-output as
|
| 74 |
+
# an indication that a task has frozen.
|
| 75 |
+
class NonInteractiveSpinner(SpinnerInterface):
|
| 76 |
+
def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None:
|
| 77 |
+
self._message = message
|
| 78 |
+
self._finished = False
|
| 79 |
+
self._rate_limiter = RateLimiter(min_update_interval_seconds)
|
| 80 |
+
self._update("started")
|
| 81 |
+
|
| 82 |
+
def _update(self, status: str) -> None:
|
| 83 |
+
assert not self._finished
|
| 84 |
+
self._rate_limiter.reset()
|
| 85 |
+
logger.info("%s: %s", self._message, status)
|
| 86 |
+
|
| 87 |
+
def spin(self) -> None:
|
| 88 |
+
if self._finished:
|
| 89 |
+
return
|
| 90 |
+
if not self._rate_limiter.ready():
|
| 91 |
+
return
|
| 92 |
+
self._update("still running...")
|
| 93 |
+
|
| 94 |
+
def finish(self, final_status: str) -> None:
|
| 95 |
+
if self._finished:
|
| 96 |
+
return
|
| 97 |
+
self._update(f"finished with status '{final_status}'")
|
| 98 |
+
self._finished = True
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
class RateLimiter:
|
| 102 |
+
def __init__(self, min_update_interval_seconds: float) -> None:
|
| 103 |
+
self._min_update_interval_seconds = min_update_interval_seconds
|
| 104 |
+
self._last_update: float = 0
|
| 105 |
+
|
| 106 |
+
def ready(self) -> bool:
|
| 107 |
+
now = time.time()
|
| 108 |
+
delta = now - self._last_update
|
| 109 |
+
return delta >= self._min_update_interval_seconds
|
| 110 |
+
|
| 111 |
+
def reset(self) -> None:
|
| 112 |
+
self._last_update = time.time()
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
@contextlib.contextmanager
|
| 116 |
+
def open_spinner(message: str) -> Generator[SpinnerInterface, None, None]:
|
| 117 |
+
# Interactive spinner goes directly to sys.stdout rather than being routed
|
| 118 |
+
# through the logging system, but it acts like it has level INFO,
|
| 119 |
+
# i.e. it's only displayed if we're at level INFO or better.
|
| 120 |
+
# Non-interactive spinner goes through the logging system, so it is always
|
| 121 |
+
# in sync with logging configuration.
|
| 122 |
+
if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
|
| 123 |
+
spinner: SpinnerInterface = InteractiveSpinner(message)
|
| 124 |
+
else:
|
| 125 |
+
spinner = NonInteractiveSpinner(message)
|
| 126 |
+
try:
|
| 127 |
+
with hidden_cursor(sys.stdout):
|
| 128 |
+
yield spinner
|
| 129 |
+
except KeyboardInterrupt:
|
| 130 |
+
spinner.finish("canceled")
|
| 131 |
+
raise
|
| 132 |
+
except Exception:
|
| 133 |
+
spinner.finish("error")
|
| 134 |
+
raise
|
| 135 |
+
else:
|
| 136 |
+
spinner.finish("done")
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
HIDE_CURSOR = "\x1b[?25l"
|
| 140 |
+
SHOW_CURSOR = "\x1b[?25h"
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
@contextlib.contextmanager
|
| 144 |
+
def hidden_cursor(file: IO[str]) -> Generator[None, None, None]:
|
| 145 |
+
# The Windows terminal does not support the hide/show cursor ANSI codes,
|
| 146 |
+
# even via colorama. So don't even try.
|
| 147 |
+
if WINDOWS:
|
| 148 |
+
yield
|
| 149 |
+
# We don't want to clutter the output with control characters if we're
|
| 150 |
+
# writing to a file, or if the user is running with --quiet.
|
| 151 |
+
# See https://github.com/pypa/pip/issues/3418
|
| 152 |
+
elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
|
| 153 |
+
yield
|
| 154 |
+
else:
|
| 155 |
+
file.write(HIDE_CURSOR)
|
| 156 |
+
try:
|
| 157 |
+
yield
|
| 158 |
+
finally:
|
| 159 |
+
file.write(SHOW_CURSOR)
|
llava/lib/python3.10/site-packages/pip/_internal/cli/status_codes.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
SUCCESS = 0
|
| 2 |
+
ERROR = 1
|
| 3 |
+
UNKNOWN_ERROR = 2
|
| 4 |
+
VIRTUALENV_NOT_FOUND = 3
|
| 5 |
+
PREVIOUS_BUILD_DIR_ERROR = 4
|
| 6 |
+
NO_MATCHES_FOUND = 23
|
llava/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (4.78 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-310.pyc
ADDED
|
Binary file (2.23 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-310.pyc
ADDED
|
Binary file (11 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/__init__.py
ADDED
|
@@ -0,0 +1,116 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
pip._vendor is for vendoring dependencies of pip to prevent needing pip to
|
| 3 |
+
depend on something external.
|
| 4 |
+
|
| 5 |
+
Files inside of pip._vendor should be considered immutable and should only be
|
| 6 |
+
updated to versions from upstream.
|
| 7 |
+
"""
|
| 8 |
+
from __future__ import absolute_import
|
| 9 |
+
|
| 10 |
+
import glob
|
| 11 |
+
import os.path
|
| 12 |
+
import sys
|
| 13 |
+
|
| 14 |
+
# Downstream redistributors which have debundled our dependencies should also
|
| 15 |
+
# patch this value to be true. This will trigger the additional patching
|
| 16 |
+
# to cause things like "six" to be available as pip.
|
| 17 |
+
DEBUNDLED = False
|
| 18 |
+
|
| 19 |
+
# By default, look in this directory for a bunch of .whl files which we will
|
| 20 |
+
# add to the beginning of sys.path before attempting to import anything. This
|
| 21 |
+
# is done to support downstream re-distributors like Debian and Fedora who
|
| 22 |
+
# wish to create their own Wheels for our dependencies to aid in debundling.
|
| 23 |
+
WHEEL_DIR = os.path.abspath(os.path.dirname(__file__))
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
# Define a small helper function to alias our vendored modules to the real ones
|
| 27 |
+
# if the vendored ones do not exist. This idea of this was taken from
|
| 28 |
+
# https://github.com/kennethreitz/requests/pull/2567.
|
| 29 |
+
def vendored(modulename):
|
| 30 |
+
vendored_name = "{0}.{1}".format(__name__, modulename)
|
| 31 |
+
|
| 32 |
+
try:
|
| 33 |
+
__import__(modulename, globals(), locals(), level=0)
|
| 34 |
+
except ImportError:
|
| 35 |
+
# We can just silently allow import failures to pass here. If we
|
| 36 |
+
# got to this point it means that ``import pip._vendor.whatever``
|
| 37 |
+
# failed and so did ``import whatever``. Since we're importing this
|
| 38 |
+
# upfront in an attempt to alias imports, not erroring here will
|
| 39 |
+
# just mean we get a regular import error whenever pip *actually*
|
| 40 |
+
# tries to import one of these modules to use it, which actually
|
| 41 |
+
# gives us a better error message than we would have otherwise
|
| 42 |
+
# gotten.
|
| 43 |
+
pass
|
| 44 |
+
else:
|
| 45 |
+
sys.modules[vendored_name] = sys.modules[modulename]
|
| 46 |
+
base, head = vendored_name.rsplit(".", 1)
|
| 47 |
+
setattr(sys.modules[base], head, sys.modules[modulename])
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
# If we're operating in a debundled setup, then we want to go ahead and trigger
|
| 51 |
+
# the aliasing of our vendored libraries as well as looking for wheels to add
|
| 52 |
+
# to our sys.path. This will cause all of this code to be a no-op typically
|
| 53 |
+
# however downstream redistributors can enable it in a consistent way across
|
| 54 |
+
# all platforms.
|
| 55 |
+
if DEBUNDLED:
|
| 56 |
+
# Actually look inside of WHEEL_DIR to find .whl files and add them to the
|
| 57 |
+
# front of our sys.path.
|
| 58 |
+
sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path
|
| 59 |
+
|
| 60 |
+
# Actually alias all of our vendored dependencies.
|
| 61 |
+
vendored("cachecontrol")
|
| 62 |
+
vendored("certifi")
|
| 63 |
+
vendored("distlib")
|
| 64 |
+
vendored("distro")
|
| 65 |
+
vendored("packaging")
|
| 66 |
+
vendored("packaging.version")
|
| 67 |
+
vendored("packaging.specifiers")
|
| 68 |
+
vendored("pkg_resources")
|
| 69 |
+
vendored("platformdirs")
|
| 70 |
+
vendored("progress")
|
| 71 |
+
vendored("pyproject_hooks")
|
| 72 |
+
vendored("requests")
|
| 73 |
+
vendored("requests.exceptions")
|
| 74 |
+
vendored("requests.packages")
|
| 75 |
+
vendored("requests.packages.urllib3")
|
| 76 |
+
vendored("requests.packages.urllib3._collections")
|
| 77 |
+
vendored("requests.packages.urllib3.connection")
|
| 78 |
+
vendored("requests.packages.urllib3.connectionpool")
|
| 79 |
+
vendored("requests.packages.urllib3.contrib")
|
| 80 |
+
vendored("requests.packages.urllib3.contrib.ntlmpool")
|
| 81 |
+
vendored("requests.packages.urllib3.contrib.pyopenssl")
|
| 82 |
+
vendored("requests.packages.urllib3.exceptions")
|
| 83 |
+
vendored("requests.packages.urllib3.fields")
|
| 84 |
+
vendored("requests.packages.urllib3.filepost")
|
| 85 |
+
vendored("requests.packages.urllib3.packages")
|
| 86 |
+
vendored("requests.packages.urllib3.packages.ordered_dict")
|
| 87 |
+
vendored("requests.packages.urllib3.packages.six")
|
| 88 |
+
vendored("requests.packages.urllib3.packages.ssl_match_hostname")
|
| 89 |
+
vendored("requests.packages.urllib3.packages.ssl_match_hostname."
|
| 90 |
+
"_implementation")
|
| 91 |
+
vendored("requests.packages.urllib3.poolmanager")
|
| 92 |
+
vendored("requests.packages.urllib3.request")
|
| 93 |
+
vendored("requests.packages.urllib3.response")
|
| 94 |
+
vendored("requests.packages.urllib3.util")
|
| 95 |
+
vendored("requests.packages.urllib3.util.connection")
|
| 96 |
+
vendored("requests.packages.urllib3.util.request")
|
| 97 |
+
vendored("requests.packages.urllib3.util.response")
|
| 98 |
+
vendored("requests.packages.urllib3.util.retry")
|
| 99 |
+
vendored("requests.packages.urllib3.util.ssl_")
|
| 100 |
+
vendored("requests.packages.urllib3.util.timeout")
|
| 101 |
+
vendored("requests.packages.urllib3.util.url")
|
| 102 |
+
vendored("resolvelib")
|
| 103 |
+
vendored("rich")
|
| 104 |
+
vendored("rich.console")
|
| 105 |
+
vendored("rich.highlighter")
|
| 106 |
+
vendored("rich.logging")
|
| 107 |
+
vendored("rich.markup")
|
| 108 |
+
vendored("rich.progress")
|
| 109 |
+
vendored("rich.segment")
|
| 110 |
+
vendored("rich.style")
|
| 111 |
+
vendored("rich.text")
|
| 112 |
+
vendored("rich.traceback")
|
| 113 |
+
if sys.version_info < (3, 11):
|
| 114 |
+
vendored("tomli")
|
| 115 |
+
vendored("truststore")
|
| 116 |
+
vendored("urllib3")
|
llava/lib/python3.10/site-packages/pip/_vendor/platformdirs/__main__.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Main entry point."""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
from pip._vendor.platformdirs import PlatformDirs, __version__
|
| 6 |
+
|
| 7 |
+
PROPS = (
|
| 8 |
+
"user_data_dir",
|
| 9 |
+
"user_config_dir",
|
| 10 |
+
"user_cache_dir",
|
| 11 |
+
"user_state_dir",
|
| 12 |
+
"user_log_dir",
|
| 13 |
+
"user_documents_dir",
|
| 14 |
+
"user_downloads_dir",
|
| 15 |
+
"user_pictures_dir",
|
| 16 |
+
"user_videos_dir",
|
| 17 |
+
"user_music_dir",
|
| 18 |
+
"user_runtime_dir",
|
| 19 |
+
"site_data_dir",
|
| 20 |
+
"site_config_dir",
|
| 21 |
+
"site_cache_dir",
|
| 22 |
+
"site_runtime_dir",
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def main() -> None:
|
| 27 |
+
"""Run the main entry point."""
|
| 28 |
+
app_name = "MyApp"
|
| 29 |
+
app_author = "MyCompany"
|
| 30 |
+
|
| 31 |
+
print(f"-- platformdirs {__version__} --") # noqa: T201
|
| 32 |
+
|
| 33 |
+
print("-- app dirs (with optional 'version')") # noqa: T201
|
| 34 |
+
dirs = PlatformDirs(app_name, app_author, version="1.0")
|
| 35 |
+
for prop in PROPS:
|
| 36 |
+
print(f"{prop}: {getattr(dirs, prop)}") # noqa: T201
|
| 37 |
+
|
| 38 |
+
print("\n-- app dirs (without optional 'version')") # noqa: T201
|
| 39 |
+
dirs = PlatformDirs(app_name, app_author)
|
| 40 |
+
for prop in PROPS:
|
| 41 |
+
print(f"{prop}: {getattr(dirs, prop)}") # noqa: T201
|
| 42 |
+
|
| 43 |
+
print("\n-- app dirs (without optional 'appauthor')") # noqa: T201
|
| 44 |
+
dirs = PlatformDirs(app_name)
|
| 45 |
+
for prop in PROPS:
|
| 46 |
+
print(f"{prop}: {getattr(dirs, prop)}") # noqa: T201
|
| 47 |
+
|
| 48 |
+
print("\n-- app dirs (with disabled 'appauthor')") # noqa: T201
|
| 49 |
+
dirs = PlatformDirs(app_name, appauthor=False)
|
| 50 |
+
for prop in PROPS:
|
| 51 |
+
print(f"{prop}: {getattr(dirs, prop)}") # noqa: T201
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
if __name__ == "__main__":
|
| 55 |
+
main()
|
llava/lib/python3.10/site-packages/pip/_vendor/vendor.txt
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
CacheControl==0.14.1
|
| 2 |
+
distlib==0.3.9
|
| 3 |
+
distro==1.9.0
|
| 4 |
+
msgpack==1.1.0
|
| 5 |
+
packaging==24.2
|
| 6 |
+
platformdirs==4.3.6
|
| 7 |
+
pyproject-hooks==1.2.0
|
| 8 |
+
requests==2.32.3
|
| 9 |
+
certifi==2024.8.30
|
| 10 |
+
idna==3.10
|
| 11 |
+
urllib3==1.26.20
|
| 12 |
+
rich==13.9.4
|
| 13 |
+
pygments==2.18.0
|
| 14 |
+
typing_extensions==4.12.2
|
| 15 |
+
resolvelib==1.0.1
|
| 16 |
+
setuptools==70.3.0
|
| 17 |
+
tomli==2.2.1
|
| 18 |
+
truststore==0.10.0
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/__init__.py
ADDED
|
File without changes
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/aggregate.py
ADDED
|
@@ -0,0 +1,365 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import math
|
| 2 |
+
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Union
|
| 3 |
+
|
| 4 |
+
from ray.data._internal.null_aggregate import (
|
| 5 |
+
_null_wrap_accumulate_block,
|
| 6 |
+
_null_wrap_accumulate_row,
|
| 7 |
+
_null_wrap_finalize,
|
| 8 |
+
_null_wrap_init,
|
| 9 |
+
_null_wrap_merge,
|
| 10 |
+
)
|
| 11 |
+
from ray.data._internal.planner.exchange.sort_task_spec import SortKey
|
| 12 |
+
from ray.data.aggregate import AggregateFn
|
| 13 |
+
from ray.data.block import AggType, Block, BlockAccessor
|
| 14 |
+
|
| 15 |
+
if TYPE_CHECKING:
|
| 16 |
+
import pyarrow as pa
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class _AggregateOnKeyBase(AggregateFn):
|
| 20 |
+
def _set_key_fn(self, on: str):
|
| 21 |
+
self._key_fn = on
|
| 22 |
+
|
| 23 |
+
def _validate(self, schema: Optional[Union[type, "pa.lib.Schema"]]) -> None:
|
| 24 |
+
SortKey(self._key_fn).validate_schema(schema)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class Count(AggregateFn):
|
| 28 |
+
"""Defines count aggregation."""
|
| 29 |
+
|
| 30 |
+
def __init__(self):
|
| 31 |
+
super().__init__(
|
| 32 |
+
init=lambda k: 0,
|
| 33 |
+
accumulate_block=(
|
| 34 |
+
lambda a, block: a + BlockAccessor.for_block(block).num_rows()
|
| 35 |
+
),
|
| 36 |
+
merge=lambda a1, a2: a1 + a2,
|
| 37 |
+
name="count()",
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class Sum(_AggregateOnKeyBase):
|
| 42 |
+
"""Defines sum aggregation."""
|
| 43 |
+
|
| 44 |
+
def __init__(
|
| 45 |
+
self,
|
| 46 |
+
on: Optional[str] = None,
|
| 47 |
+
ignore_nulls: bool = True,
|
| 48 |
+
alias_name: Optional[str] = None,
|
| 49 |
+
):
|
| 50 |
+
self._set_key_fn(on)
|
| 51 |
+
if alias_name:
|
| 52 |
+
self._rs_name = alias_name
|
| 53 |
+
else:
|
| 54 |
+
self._rs_name = f"sum({str(on)})"
|
| 55 |
+
|
| 56 |
+
null_merge = _null_wrap_merge(ignore_nulls, lambda a1, a2: a1 + a2)
|
| 57 |
+
|
| 58 |
+
super().__init__(
|
| 59 |
+
init=_null_wrap_init(lambda k: 0),
|
| 60 |
+
merge=null_merge,
|
| 61 |
+
accumulate_block=_null_wrap_accumulate_block(
|
| 62 |
+
ignore_nulls,
|
| 63 |
+
lambda block: BlockAccessor.for_block(block).sum(on, ignore_nulls),
|
| 64 |
+
null_merge,
|
| 65 |
+
),
|
| 66 |
+
finalize=_null_wrap_finalize(lambda a: a),
|
| 67 |
+
name=(self._rs_name),
|
| 68 |
+
)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class Min(_AggregateOnKeyBase):
|
| 72 |
+
"""Defines min aggregation."""
|
| 73 |
+
|
| 74 |
+
def __init__(
|
| 75 |
+
self,
|
| 76 |
+
on: Optional[str] = None,
|
| 77 |
+
ignore_nulls: bool = True,
|
| 78 |
+
alias_name: Optional[str] = None,
|
| 79 |
+
):
|
| 80 |
+
self._set_key_fn(on)
|
| 81 |
+
if alias_name:
|
| 82 |
+
self._rs_name = alias_name
|
| 83 |
+
else:
|
| 84 |
+
self._rs_name = f"min({str(on)})"
|
| 85 |
+
|
| 86 |
+
null_merge = _null_wrap_merge(ignore_nulls, min)
|
| 87 |
+
|
| 88 |
+
super().__init__(
|
| 89 |
+
init=_null_wrap_init(lambda k: float("inf")),
|
| 90 |
+
merge=null_merge,
|
| 91 |
+
accumulate_block=_null_wrap_accumulate_block(
|
| 92 |
+
ignore_nulls,
|
| 93 |
+
lambda block: BlockAccessor.for_block(block).min(on, ignore_nulls),
|
| 94 |
+
null_merge,
|
| 95 |
+
),
|
| 96 |
+
finalize=_null_wrap_finalize(lambda a: a),
|
| 97 |
+
name=(self._rs_name),
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
class Max(_AggregateOnKeyBase):
|
| 102 |
+
"""Defines max aggregation."""
|
| 103 |
+
|
| 104 |
+
def __init__(
|
| 105 |
+
self,
|
| 106 |
+
on: Optional[str] = None,
|
| 107 |
+
ignore_nulls: bool = True,
|
| 108 |
+
alias_name: Optional[str] = None,
|
| 109 |
+
):
|
| 110 |
+
self._set_key_fn(on)
|
| 111 |
+
if alias_name:
|
| 112 |
+
self._rs_name = alias_name
|
| 113 |
+
else:
|
| 114 |
+
self._rs_name = f"max({str(on)})"
|
| 115 |
+
|
| 116 |
+
null_merge = _null_wrap_merge(ignore_nulls, max)
|
| 117 |
+
|
| 118 |
+
super().__init__(
|
| 119 |
+
init=_null_wrap_init(lambda k: float("-inf")),
|
| 120 |
+
merge=null_merge,
|
| 121 |
+
accumulate_block=_null_wrap_accumulate_block(
|
| 122 |
+
ignore_nulls,
|
| 123 |
+
lambda block: BlockAccessor.for_block(block).max(on, ignore_nulls),
|
| 124 |
+
null_merge,
|
| 125 |
+
),
|
| 126 |
+
finalize=_null_wrap_finalize(lambda a: a),
|
| 127 |
+
name=(self._rs_name),
|
| 128 |
+
)
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
class Mean(_AggregateOnKeyBase):
|
| 132 |
+
"""Defines mean aggregation."""
|
| 133 |
+
|
| 134 |
+
def __init__(
|
| 135 |
+
self,
|
| 136 |
+
on: Optional[str] = None,
|
| 137 |
+
ignore_nulls: bool = True,
|
| 138 |
+
alias_name: Optional[str] = None,
|
| 139 |
+
):
|
| 140 |
+
self._set_key_fn(on)
|
| 141 |
+
if alias_name:
|
| 142 |
+
self._rs_name = alias_name
|
| 143 |
+
else:
|
| 144 |
+
self._rs_name = f"mean({str(on)})"
|
| 145 |
+
|
| 146 |
+
null_merge = _null_wrap_merge(
|
| 147 |
+
ignore_nulls, lambda a1, a2: [a1[0] + a2[0], a1[1] + a2[1]]
|
| 148 |
+
)
|
| 149 |
+
|
| 150 |
+
def vectorized_mean(block: Block) -> AggType:
|
| 151 |
+
block_acc = BlockAccessor.for_block(block)
|
| 152 |
+
count = block_acc.count(on)
|
| 153 |
+
if count == 0 or count is None:
|
| 154 |
+
# Empty or all null.
|
| 155 |
+
return None
|
| 156 |
+
sum_ = block_acc.sum(on, ignore_nulls)
|
| 157 |
+
if sum_ is None:
|
| 158 |
+
# ignore_nulls=False and at least one null.
|
| 159 |
+
return None
|
| 160 |
+
return [sum_, count]
|
| 161 |
+
|
| 162 |
+
super().__init__(
|
| 163 |
+
init=_null_wrap_init(lambda k: [0, 0]),
|
| 164 |
+
merge=null_merge,
|
| 165 |
+
accumulate_block=_null_wrap_accumulate_block(
|
| 166 |
+
ignore_nulls,
|
| 167 |
+
vectorized_mean,
|
| 168 |
+
null_merge,
|
| 169 |
+
),
|
| 170 |
+
finalize=_null_wrap_finalize(lambda a: a[0] / a[1]),
|
| 171 |
+
name=(self._rs_name),
|
| 172 |
+
)
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
class Std(_AggregateOnKeyBase):
|
| 176 |
+
"""Defines standard deviation aggregation.
|
| 177 |
+
|
| 178 |
+
Uses Welford's online method for an accumulator-style computation of the
|
| 179 |
+
standard deviation. This method was chosen due to its numerical
|
| 180 |
+
stability, and it being computable in a single pass.
|
| 181 |
+
This may give different (but more accurate) results than NumPy, Pandas,
|
| 182 |
+
and sklearn, which use a less numerically stable two-pass algorithm.
|
| 183 |
+
See
|
| 184 |
+
https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Welford's_online_algorithm
|
| 185 |
+
"""
|
| 186 |
+
|
| 187 |
+
def __init__(
|
| 188 |
+
self,
|
| 189 |
+
on: Optional[str] = None,
|
| 190 |
+
ddof: int = 1,
|
| 191 |
+
ignore_nulls: bool = True,
|
| 192 |
+
alias_name: Optional[str] = None,
|
| 193 |
+
):
|
| 194 |
+
self._set_key_fn(on)
|
| 195 |
+
if alias_name:
|
| 196 |
+
self._rs_name = alias_name
|
| 197 |
+
else:
|
| 198 |
+
self._rs_name = f"std({str(on)})"
|
| 199 |
+
|
| 200 |
+
def merge(a: List[float], b: List[float]):
|
| 201 |
+
# Merges two accumulations into one.
|
| 202 |
+
# See
|
| 203 |
+
# https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm
|
| 204 |
+
M2_a, mean_a, count_a = a
|
| 205 |
+
M2_b, mean_b, count_b = b
|
| 206 |
+
delta = mean_b - mean_a
|
| 207 |
+
count = count_a + count_b
|
| 208 |
+
# NOTE: We use this mean calculation since it's more numerically
|
| 209 |
+
# stable than mean_a + delta * count_b / count, which actually
|
| 210 |
+
# deviates from Pandas in the ~15th decimal place and causes our
|
| 211 |
+
# exact comparison tests to fail.
|
| 212 |
+
mean = (mean_a * count_a + mean_b * count_b) / count
|
| 213 |
+
# Update the sum of squared differences.
|
| 214 |
+
M2 = M2_a + M2_b + (delta**2) * count_a * count_b / count
|
| 215 |
+
return [M2, mean, count]
|
| 216 |
+
|
| 217 |
+
null_merge = _null_wrap_merge(ignore_nulls, merge)
|
| 218 |
+
|
| 219 |
+
def vectorized_std(block: Block) -> AggType:
|
| 220 |
+
block_acc = BlockAccessor.for_block(block)
|
| 221 |
+
count = block_acc.count(on)
|
| 222 |
+
if count == 0 or count is None:
|
| 223 |
+
# Empty or all null.
|
| 224 |
+
return None
|
| 225 |
+
sum_ = block_acc.sum(on, ignore_nulls)
|
| 226 |
+
if sum_ is None:
|
| 227 |
+
# ignore_nulls=False and at least one null.
|
| 228 |
+
return None
|
| 229 |
+
mean = sum_ / count
|
| 230 |
+
M2 = block_acc.sum_of_squared_diffs_from_mean(on, ignore_nulls, mean)
|
| 231 |
+
return [M2, mean, count]
|
| 232 |
+
|
| 233 |
+
def finalize(a: List[float]):
|
| 234 |
+
# Compute the final standard deviation from the accumulated
|
| 235 |
+
# sum of squared differences from current mean and the count.
|
| 236 |
+
M2, mean, count = a
|
| 237 |
+
if count < 2:
|
| 238 |
+
return 0.0
|
| 239 |
+
return math.sqrt(M2 / (count - ddof))
|
| 240 |
+
|
| 241 |
+
super().__init__(
|
| 242 |
+
init=_null_wrap_init(lambda k: [0, 0, 0]),
|
| 243 |
+
merge=null_merge,
|
| 244 |
+
accumulate_block=_null_wrap_accumulate_block(
|
| 245 |
+
ignore_nulls,
|
| 246 |
+
vectorized_std,
|
| 247 |
+
null_merge,
|
| 248 |
+
),
|
| 249 |
+
finalize=_null_wrap_finalize(finalize),
|
| 250 |
+
name=(self._rs_name),
|
| 251 |
+
)
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
class AbsMax(_AggregateOnKeyBase):
|
| 255 |
+
"""Defines absolute max aggregation."""
|
| 256 |
+
|
| 257 |
+
def __init__(
|
| 258 |
+
self,
|
| 259 |
+
on: Optional[str] = None,
|
| 260 |
+
ignore_nulls: bool = True,
|
| 261 |
+
alias_name: Optional[str] = None,
|
| 262 |
+
):
|
| 263 |
+
self._set_key_fn(on)
|
| 264 |
+
on_fn = _to_on_fn(on)
|
| 265 |
+
if alias_name:
|
| 266 |
+
self._rs_name = alias_name
|
| 267 |
+
else:
|
| 268 |
+
self._rs_name = f"abs_max({str(on)})"
|
| 269 |
+
|
| 270 |
+
super().__init__(
|
| 271 |
+
init=_null_wrap_init(lambda k: 0),
|
| 272 |
+
merge=_null_wrap_merge(ignore_nulls, max),
|
| 273 |
+
accumulate_row=_null_wrap_accumulate_row(
|
| 274 |
+
ignore_nulls, on_fn, lambda a, r: max(a, abs(r))
|
| 275 |
+
),
|
| 276 |
+
finalize=_null_wrap_finalize(lambda a: a),
|
| 277 |
+
name=(self._rs_name),
|
| 278 |
+
)
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
def _to_on_fn(on: Optional[str]):
|
| 282 |
+
if on is None:
|
| 283 |
+
return lambda r: r
|
| 284 |
+
elif isinstance(on, str):
|
| 285 |
+
return lambda r: r[on]
|
| 286 |
+
else:
|
| 287 |
+
return on
|
| 288 |
+
|
| 289 |
+
|
| 290 |
+
class Quantile(_AggregateOnKeyBase):
|
| 291 |
+
"""Defines Quantile aggregation."""
|
| 292 |
+
|
| 293 |
+
def __init__(
|
| 294 |
+
self,
|
| 295 |
+
on: Optional[str] = None,
|
| 296 |
+
q: float = 0.5,
|
| 297 |
+
ignore_nulls: bool = True,
|
| 298 |
+
alias_name: Optional[str] = None,
|
| 299 |
+
):
|
| 300 |
+
self._set_key_fn(on)
|
| 301 |
+
self._q = q
|
| 302 |
+
if alias_name:
|
| 303 |
+
self._rs_name = alias_name
|
| 304 |
+
else:
|
| 305 |
+
self._rs_name = f"quantile({str(on)})"
|
| 306 |
+
|
| 307 |
+
def merge(a: List[int], b: List[int]):
|
| 308 |
+
if isinstance(a, List) and isinstance(b, List):
|
| 309 |
+
a.extend(b)
|
| 310 |
+
return a
|
| 311 |
+
if isinstance(a, List) and (not isinstance(b, List)):
|
| 312 |
+
if b is not None and b != "":
|
| 313 |
+
a.append(b)
|
| 314 |
+
return a
|
| 315 |
+
if isinstance(b, List) and (not isinstance(a, List)):
|
| 316 |
+
if a is not None and a != "":
|
| 317 |
+
b.append(a)
|
| 318 |
+
return b
|
| 319 |
+
|
| 320 |
+
ls = []
|
| 321 |
+
if a is not None and a != "":
|
| 322 |
+
ls.append(a)
|
| 323 |
+
if b is not None and b != "":
|
| 324 |
+
ls.append(b)
|
| 325 |
+
return ls
|
| 326 |
+
|
| 327 |
+
null_merge = _null_wrap_merge(ignore_nulls, merge)
|
| 328 |
+
|
| 329 |
+
def block_row_ls(block: Block) -> AggType:
|
| 330 |
+
block_acc = BlockAccessor.for_block(block)
|
| 331 |
+
ls = []
|
| 332 |
+
for row in block_acc.iter_rows(public_row_format=False):
|
| 333 |
+
ls.append(row.get(on))
|
| 334 |
+
return ls
|
| 335 |
+
|
| 336 |
+
import math
|
| 337 |
+
|
| 338 |
+
def percentile(input_values, key: Optional[Callable[[Any], Any]] = None):
|
| 339 |
+
if not input_values:
|
| 340 |
+
return None
|
| 341 |
+
|
| 342 |
+
if key is None:
|
| 343 |
+
key = lambda x: x # noqa: E731
|
| 344 |
+
|
| 345 |
+
input_values = sorted(input_values)
|
| 346 |
+
k = (len(input_values) - 1) * self._q
|
| 347 |
+
f = math.floor(k)
|
| 348 |
+
c = math.ceil(k)
|
| 349 |
+
if f == c:
|
| 350 |
+
return key(input_values[int(k)])
|
| 351 |
+
d0 = key(input_values[int(f)]) * (c - k)
|
| 352 |
+
d1 = key(input_values[int(c)]) * (k - f)
|
| 353 |
+
return round(d0 + d1, 5)
|
| 354 |
+
|
| 355 |
+
super().__init__(
|
| 356 |
+
init=_null_wrap_init(lambda k: [0]),
|
| 357 |
+
merge=null_merge,
|
| 358 |
+
accumulate_block=_null_wrap_accumulate_block(
|
| 359 |
+
ignore_nulls,
|
| 360 |
+
block_row_ls,
|
| 361 |
+
null_merge,
|
| 362 |
+
),
|
| 363 |
+
finalize=_null_wrap_finalize(percentile),
|
| 364 |
+
name=(self._rs_name),
|
| 365 |
+
)
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/arrow_block.py
ADDED
|
@@ -0,0 +1,650 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
import heapq
|
| 3 |
+
import logging
|
| 4 |
+
import random
|
| 5 |
+
from typing import (
|
| 6 |
+
TYPE_CHECKING,
|
| 7 |
+
Any,
|
| 8 |
+
Callable,
|
| 9 |
+
Dict,
|
| 10 |
+
Iterator,
|
| 11 |
+
List,
|
| 12 |
+
Optional,
|
| 13 |
+
Sequence,
|
| 14 |
+
Tuple,
|
| 15 |
+
TypeVar,
|
| 16 |
+
Union,
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
import numpy as np
|
| 20 |
+
|
| 21 |
+
from ray._private.utils import _get_pyarrow_version
|
| 22 |
+
from ray.air.constants import TENSOR_COLUMN_NAME
|
| 23 |
+
from ray.air.util.tensor_extensions.arrow import (
|
| 24 |
+
convert_to_pyarrow_array,
|
| 25 |
+
pyarrow_table_from_pydict,
|
| 26 |
+
)
|
| 27 |
+
from ray.data._internal.arrow_ops import transform_polars, transform_pyarrow
|
| 28 |
+
from ray.data._internal.numpy_support import convert_to_numpy
|
| 29 |
+
from ray.data._internal.row import TableRow
|
| 30 |
+
from ray.data._internal.table_block import TableBlockAccessor, TableBlockBuilder
|
| 31 |
+
from ray.data._internal.util import NULL_SENTINEL, find_partitions
|
| 32 |
+
from ray.data.block import (
|
| 33 |
+
Block,
|
| 34 |
+
BlockAccessor,
|
| 35 |
+
BlockExecStats,
|
| 36 |
+
BlockMetadata,
|
| 37 |
+
BlockType,
|
| 38 |
+
KeyType,
|
| 39 |
+
U,
|
| 40 |
+
)
|
| 41 |
+
from ray.data.context import DataContext
|
| 42 |
+
|
| 43 |
+
try:
|
| 44 |
+
import pyarrow
|
| 45 |
+
except ImportError:
|
| 46 |
+
pyarrow = None
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
if TYPE_CHECKING:
|
| 50 |
+
import pandas
|
| 51 |
+
|
| 52 |
+
from ray.data._internal.planner.exchange.sort_task_spec import SortKey
|
| 53 |
+
from ray.data.aggregate import AggregateFn
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
T = TypeVar("T")
|
| 57 |
+
logger = logging.getLogger(__name__)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
# We offload some transformations to polars for performance.
|
| 61 |
+
def get_sort_transform(context: DataContext) -> Callable:
|
| 62 |
+
if context.use_polars:
|
| 63 |
+
return transform_polars.sort
|
| 64 |
+
else:
|
| 65 |
+
return transform_pyarrow.sort
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def get_concat_and_sort_transform(context: DataContext) -> Callable:
|
| 69 |
+
if context.use_polars:
|
| 70 |
+
return transform_polars.concat_and_sort
|
| 71 |
+
else:
|
| 72 |
+
return transform_pyarrow.concat_and_sort
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class ArrowRow(TableRow):
|
| 76 |
+
"""
|
| 77 |
+
Row of a tabular Dataset backed by a Arrow Table block.
|
| 78 |
+
"""
|
| 79 |
+
|
| 80 |
+
def __getitem__(self, key: Union[str, List[str]]) -> Any:
|
| 81 |
+
from ray.data.extensions import get_arrow_extension_tensor_types
|
| 82 |
+
|
| 83 |
+
tensor_arrow_extension_types = get_arrow_extension_tensor_types()
|
| 84 |
+
|
| 85 |
+
def get_item(keys: List[str]) -> Any:
|
| 86 |
+
schema = self._row.schema
|
| 87 |
+
if isinstance(schema.field(keys[0]).type, tensor_arrow_extension_types):
|
| 88 |
+
# Build a tensor row.
|
| 89 |
+
return tuple(
|
| 90 |
+
[
|
| 91 |
+
ArrowBlockAccessor._build_tensor_row(self._row, col_name=key)
|
| 92 |
+
for key in keys
|
| 93 |
+
]
|
| 94 |
+
)
|
| 95 |
+
|
| 96 |
+
table = self._row.select(keys)
|
| 97 |
+
if len(table) == 0:
|
| 98 |
+
return None
|
| 99 |
+
|
| 100 |
+
items = [col[0] for col in table.columns]
|
| 101 |
+
try:
|
| 102 |
+
# Try to interpret this as a pyarrow.Scalar value.
|
| 103 |
+
return tuple([item.as_py() for item in items])
|
| 104 |
+
|
| 105 |
+
except AttributeError:
|
| 106 |
+
# Assume that this row is an element of an extension array, and
|
| 107 |
+
# that it is bypassing pyarrow's scalar model for Arrow < 8.0.0.
|
| 108 |
+
return items
|
| 109 |
+
|
| 110 |
+
is_single_item = isinstance(key, str)
|
| 111 |
+
keys = [key] if is_single_item else key
|
| 112 |
+
|
| 113 |
+
items = get_item(keys)
|
| 114 |
+
|
| 115 |
+
if items is None:
|
| 116 |
+
return None
|
| 117 |
+
elif is_single_item:
|
| 118 |
+
return items[0]
|
| 119 |
+
else:
|
| 120 |
+
return items
|
| 121 |
+
|
| 122 |
+
def __iter__(self) -> Iterator:
|
| 123 |
+
for k in self._row.column_names:
|
| 124 |
+
yield k
|
| 125 |
+
|
| 126 |
+
def __len__(self):
|
| 127 |
+
return self._row.num_columns
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
class ArrowBlockBuilder(TableBlockBuilder):
|
| 131 |
+
def __init__(self):
|
| 132 |
+
if pyarrow is None:
|
| 133 |
+
raise ImportError("Run `pip install pyarrow` for Arrow support")
|
| 134 |
+
super().__init__((pyarrow.Table, bytes))
|
| 135 |
+
|
| 136 |
+
@staticmethod
|
| 137 |
+
def _table_from_pydict(columns: Dict[str, List[Any]]) -> Block:
|
| 138 |
+
pa_cols: Dict[str, pyarrow.Array] = dict()
|
| 139 |
+
|
| 140 |
+
for col_name, col_vals in columns.items():
|
| 141 |
+
np_col_vals = convert_to_numpy(col_vals)
|
| 142 |
+
|
| 143 |
+
pa_cols[col_name] = convert_to_pyarrow_array(np_col_vals, col_name)
|
| 144 |
+
|
| 145 |
+
return pyarrow_table_from_pydict(pa_cols)
|
| 146 |
+
|
| 147 |
+
@staticmethod
|
| 148 |
+
def _concat_tables(tables: List[Block]) -> Block:
|
| 149 |
+
return transform_pyarrow.concat(tables)
|
| 150 |
+
|
| 151 |
+
@staticmethod
|
| 152 |
+
def _concat_would_copy() -> bool:
|
| 153 |
+
return False
|
| 154 |
+
|
| 155 |
+
@staticmethod
|
| 156 |
+
def _empty_table() -> "pyarrow.Table":
|
| 157 |
+
return pyarrow_table_from_pydict({})
|
| 158 |
+
|
| 159 |
+
def block_type(self) -> BlockType:
|
| 160 |
+
return BlockType.ARROW
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
class ArrowBlockAccessor(TableBlockAccessor):
|
| 164 |
+
ROW_TYPE = ArrowRow
|
| 165 |
+
|
| 166 |
+
def __init__(self, table: "pyarrow.Table"):
|
| 167 |
+
if pyarrow is None:
|
| 168 |
+
raise ImportError("Run `pip install pyarrow` for Arrow support")
|
| 169 |
+
super().__init__(table)
|
| 170 |
+
|
| 171 |
+
def column_names(self) -> List[str]:
|
| 172 |
+
return self._table.column_names
|
| 173 |
+
|
| 174 |
+
def append_column(self, name: str, data: Any) -> Block:
|
| 175 |
+
assert name not in self._table.column_names
|
| 176 |
+
|
| 177 |
+
if any(isinstance(item, np.ndarray) for item in data):
|
| 178 |
+
raise NotImplementedError(
|
| 179 |
+
f"`{self.__class__.__name__}.append_column()` doesn't support "
|
| 180 |
+
"array-like data."
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
return self._table.append_column(name, [data])
|
| 184 |
+
|
| 185 |
+
@classmethod
|
| 186 |
+
def from_bytes(cls, data: bytes) -> "ArrowBlockAccessor":
|
| 187 |
+
reader = pyarrow.ipc.open_stream(data)
|
| 188 |
+
return cls(reader.read_all())
|
| 189 |
+
|
| 190 |
+
@staticmethod
|
| 191 |
+
def _build_tensor_row(
|
| 192 |
+
row: ArrowRow, col_name: str = TENSOR_COLUMN_NAME
|
| 193 |
+
) -> np.ndarray:
|
| 194 |
+
from packaging.version import parse as parse_version
|
| 195 |
+
|
| 196 |
+
element = row[col_name][0]
|
| 197 |
+
# TODO(Clark): Reduce this to np.asarray(element) once we only support Arrow
|
| 198 |
+
# 9.0.0+.
|
| 199 |
+
pyarrow_version = _get_pyarrow_version()
|
| 200 |
+
if pyarrow_version is not None:
|
| 201 |
+
pyarrow_version = parse_version(pyarrow_version)
|
| 202 |
+
if pyarrow_version is None or pyarrow_version >= parse_version("8.0.0"):
|
| 203 |
+
assert isinstance(element, pyarrow.ExtensionScalar)
|
| 204 |
+
if pyarrow_version is None or pyarrow_version >= parse_version("9.0.0"):
|
| 205 |
+
# For Arrow 9.0.0+, accessing an element in a chunked tensor array
|
| 206 |
+
# produces an ArrowTensorScalar, which we convert to an ndarray using
|
| 207 |
+
# .as_py().
|
| 208 |
+
element = element.as_py()
|
| 209 |
+
else:
|
| 210 |
+
# For Arrow 8.*, accessing an element in a chunked tensor array produces
|
| 211 |
+
# an ExtensionScalar, which we convert to an ndarray using our custom
|
| 212 |
+
# method.
|
| 213 |
+
element = element.type._extension_scalar_to_ndarray(element)
|
| 214 |
+
# For Arrow < 8.0.0, accessing an element in a chunked tensor array produces an
|
| 215 |
+
# ndarray, which we return directly.
|
| 216 |
+
assert isinstance(element, np.ndarray), type(element)
|
| 217 |
+
return element
|
| 218 |
+
|
| 219 |
+
def slice(self, start: int, end: int, copy: bool = False) -> "pyarrow.Table":
|
| 220 |
+
view = self._table.slice(start, end - start)
|
| 221 |
+
if copy:
|
| 222 |
+
view = transform_pyarrow.combine_chunks(view)
|
| 223 |
+
return view
|
| 224 |
+
|
| 225 |
+
def random_shuffle(self, random_seed: Optional[int]) -> "pyarrow.Table":
|
| 226 |
+
# TODO(swang): Creating this np.array index can add a lot of memory
|
| 227 |
+
# pressure when there are a large number of small rows. Investigate
|
| 228 |
+
# random shuffling in place to reduce memory pressure.
|
| 229 |
+
# See https://github.com/ray-project/ray/issues/42146.
|
| 230 |
+
random = np.random.RandomState(random_seed)
|
| 231 |
+
return self.take(random.permutation(self.num_rows()))
|
| 232 |
+
|
| 233 |
+
def schema(self) -> "pyarrow.lib.Schema":
|
| 234 |
+
return self._table.schema
|
| 235 |
+
|
| 236 |
+
def to_pandas(self) -> "pandas.DataFrame":
|
| 237 |
+
from ray.air.util.data_batch_conversion import _cast_tensor_columns_to_ndarrays
|
| 238 |
+
|
| 239 |
+
df = self._table.to_pandas()
|
| 240 |
+
ctx = DataContext.get_current()
|
| 241 |
+
if ctx.enable_tensor_extension_casting:
|
| 242 |
+
df = _cast_tensor_columns_to_ndarrays(df)
|
| 243 |
+
return df
|
| 244 |
+
|
| 245 |
+
def to_numpy(
|
| 246 |
+
self, columns: Optional[Union[str, List[str]]] = None
|
| 247 |
+
) -> Union[np.ndarray, Dict[str, np.ndarray]]:
|
| 248 |
+
if columns is None:
|
| 249 |
+
columns = self._table.column_names
|
| 250 |
+
should_be_single_ndarray = False
|
| 251 |
+
elif isinstance(columns, list):
|
| 252 |
+
should_be_single_ndarray = False
|
| 253 |
+
else:
|
| 254 |
+
columns = [columns]
|
| 255 |
+
should_be_single_ndarray = True
|
| 256 |
+
|
| 257 |
+
column_names_set = set(self._table.column_names)
|
| 258 |
+
for column in columns:
|
| 259 |
+
if column not in column_names_set:
|
| 260 |
+
raise ValueError(
|
| 261 |
+
f"Cannot find column {column}, available columns: "
|
| 262 |
+
f"{column_names_set}"
|
| 263 |
+
)
|
| 264 |
+
|
| 265 |
+
column_values_ndarrays = []
|
| 266 |
+
|
| 267 |
+
for col_name in columns:
|
| 268 |
+
col = self._table[col_name]
|
| 269 |
+
|
| 270 |
+
# Combine columnar values arrays to make these contiguous
|
| 271 |
+
# (making them compatible with numpy format)
|
| 272 |
+
combined_array = transform_pyarrow.combine_chunked_array(col)
|
| 273 |
+
|
| 274 |
+
column_values_ndarrays.append(
|
| 275 |
+
transform_pyarrow.to_numpy(combined_array, zero_copy_only=False)
|
| 276 |
+
)
|
| 277 |
+
|
| 278 |
+
if should_be_single_ndarray:
|
| 279 |
+
assert len(columns) == 1
|
| 280 |
+
return column_values_ndarrays[0]
|
| 281 |
+
else:
|
| 282 |
+
return dict(zip(columns, column_values_ndarrays))
|
| 283 |
+
|
| 284 |
+
def to_arrow(self) -> "pyarrow.Table":
|
| 285 |
+
return self._table
|
| 286 |
+
|
| 287 |
+
def num_rows(self) -> int:
|
| 288 |
+
# Arrow may represent an empty table via an N > 0 row, 0-column table, e.g. when
|
| 289 |
+
# slicing an empty table, so we return 0 if num_columns == 0.
|
| 290 |
+
return self._table.num_rows if self._table.num_columns > 0 else 0
|
| 291 |
+
|
| 292 |
+
def size_bytes(self) -> int:
|
| 293 |
+
return self._table.nbytes
|
| 294 |
+
|
| 295 |
+
def _zip(self, acc: BlockAccessor) -> "Block":
|
| 296 |
+
r = self.to_arrow()
|
| 297 |
+
s = acc.to_arrow()
|
| 298 |
+
for col_name in s.column_names:
|
| 299 |
+
col = s.column(col_name)
|
| 300 |
+
# Ensure the column names are unique after zip.
|
| 301 |
+
if col_name in r.column_names:
|
| 302 |
+
i = 1
|
| 303 |
+
new_name = col_name
|
| 304 |
+
while new_name in r.column_names:
|
| 305 |
+
new_name = "{}_{}".format(col_name, i)
|
| 306 |
+
i += 1
|
| 307 |
+
col_name = new_name
|
| 308 |
+
r = r.append_column(col_name, col)
|
| 309 |
+
return r
|
| 310 |
+
|
| 311 |
+
@staticmethod
|
| 312 |
+
def builder() -> ArrowBlockBuilder:
|
| 313 |
+
return ArrowBlockBuilder()
|
| 314 |
+
|
| 315 |
+
@staticmethod
|
| 316 |
+
def _empty_table() -> "pyarrow.Table":
|
| 317 |
+
return ArrowBlockBuilder._empty_table()
|
| 318 |
+
|
| 319 |
+
def take(
|
| 320 |
+
self,
|
| 321 |
+
indices: Union[List[int], "pyarrow.Array", "pyarrow.ChunkedArray"],
|
| 322 |
+
) -> "pyarrow.Table":
|
| 323 |
+
"""Select rows from the underlying table.
|
| 324 |
+
|
| 325 |
+
This method is an alternative to pyarrow.Table.take(), which breaks for
|
| 326 |
+
extension arrays.
|
| 327 |
+
"""
|
| 328 |
+
return transform_pyarrow.take_table(self._table, indices)
|
| 329 |
+
|
| 330 |
+
def select(self, columns: List[str]) -> "pyarrow.Table":
|
| 331 |
+
if not all(isinstance(col, str) for col in columns):
|
| 332 |
+
raise ValueError(
|
| 333 |
+
"Columns must be a list of column name strings when aggregating on "
|
| 334 |
+
f"Arrow blocks, but got: {columns}."
|
| 335 |
+
)
|
| 336 |
+
return self._table.select(columns)
|
| 337 |
+
|
| 338 |
+
def _sample(self, n_samples: int, sort_key: "SortKey") -> "pyarrow.Table":
|
| 339 |
+
indices = random.sample(range(self._table.num_rows), n_samples)
|
| 340 |
+
table = self._table.select(sort_key.get_columns())
|
| 341 |
+
return transform_pyarrow.take_table(table, indices)
|
| 342 |
+
|
| 343 |
+
def count(self, on: str) -> Optional[U]:
|
| 344 |
+
"""Count the number of non-null values in the provided column."""
|
| 345 |
+
import pyarrow.compute as pac
|
| 346 |
+
|
| 347 |
+
if not isinstance(on, str):
|
| 348 |
+
raise ValueError(
|
| 349 |
+
"on must be a string when aggregating on Arrow blocks, but got:"
|
| 350 |
+
f"{type(on)}."
|
| 351 |
+
)
|
| 352 |
+
|
| 353 |
+
if self.num_rows() == 0:
|
| 354 |
+
return None
|
| 355 |
+
|
| 356 |
+
col = self._table[on]
|
| 357 |
+
return pac.count(col).as_py()
|
| 358 |
+
|
| 359 |
+
def _apply_arrow_compute(
|
| 360 |
+
self, compute_fn: Callable, on: str, ignore_nulls: bool
|
| 361 |
+
) -> Optional[U]:
|
| 362 |
+
"""Helper providing null handling around applying an aggregation to a column."""
|
| 363 |
+
import pyarrow as pa
|
| 364 |
+
|
| 365 |
+
if not isinstance(on, str):
|
| 366 |
+
raise ValueError(
|
| 367 |
+
"on must be a string when aggregating on Arrow blocks, but got:"
|
| 368 |
+
f"{type(on)}."
|
| 369 |
+
)
|
| 370 |
+
|
| 371 |
+
if self.num_rows() == 0:
|
| 372 |
+
return None
|
| 373 |
+
|
| 374 |
+
col = self._table[on]
|
| 375 |
+
if pa.types.is_null(col.type):
|
| 376 |
+
return None
|
| 377 |
+
else:
|
| 378 |
+
return compute_fn(col, skip_nulls=ignore_nulls).as_py()
|
| 379 |
+
|
| 380 |
+
def sum(self, on: str, ignore_nulls: bool) -> Optional[U]:
|
| 381 |
+
import pyarrow.compute as pac
|
| 382 |
+
|
| 383 |
+
return self._apply_arrow_compute(pac.sum, on, ignore_nulls)
|
| 384 |
+
|
| 385 |
+
def min(self, on: str, ignore_nulls: bool) -> Optional[U]:
|
| 386 |
+
import pyarrow.compute as pac
|
| 387 |
+
|
| 388 |
+
return self._apply_arrow_compute(pac.min, on, ignore_nulls)
|
| 389 |
+
|
| 390 |
+
def max(self, on: str, ignore_nulls: bool) -> Optional[U]:
|
| 391 |
+
import pyarrow.compute as pac
|
| 392 |
+
|
| 393 |
+
return self._apply_arrow_compute(pac.max, on, ignore_nulls)
|
| 394 |
+
|
| 395 |
+
def mean(self, on: str, ignore_nulls: bool) -> Optional[U]:
|
| 396 |
+
import pyarrow.compute as pac
|
| 397 |
+
|
| 398 |
+
return self._apply_arrow_compute(pac.mean, on, ignore_nulls)
|
| 399 |
+
|
| 400 |
+
def sum_of_squared_diffs_from_mean(
|
| 401 |
+
self,
|
| 402 |
+
on: str,
|
| 403 |
+
ignore_nulls: bool,
|
| 404 |
+
mean: Optional[U] = None,
|
| 405 |
+
) -> Optional[U]:
|
| 406 |
+
import pyarrow.compute as pac
|
| 407 |
+
|
| 408 |
+
if mean is None:
|
| 409 |
+
# If precomputed mean not given, we compute it ourselves.
|
| 410 |
+
mean = self.mean(on, ignore_nulls)
|
| 411 |
+
if mean is None:
|
| 412 |
+
return None
|
| 413 |
+
return self._apply_arrow_compute(
|
| 414 |
+
lambda col, skip_nulls: pac.sum(
|
| 415 |
+
pac.power(pac.subtract(col, mean), 2),
|
| 416 |
+
skip_nulls=skip_nulls,
|
| 417 |
+
),
|
| 418 |
+
on,
|
| 419 |
+
ignore_nulls,
|
| 420 |
+
)
|
| 421 |
+
|
| 422 |
+
def sort_and_partition(
|
| 423 |
+
self, boundaries: List[T], sort_key: "SortKey"
|
| 424 |
+
) -> List["Block"]:
|
| 425 |
+
if self._table.num_rows == 0:
|
| 426 |
+
# If the pyarrow table is empty we may not have schema
|
| 427 |
+
# so calling sort_indices() will raise an error.
|
| 428 |
+
return [self._empty_table() for _ in range(len(boundaries) + 1)]
|
| 429 |
+
|
| 430 |
+
context = DataContext.get_current()
|
| 431 |
+
sort = get_sort_transform(context)
|
| 432 |
+
|
| 433 |
+
table = sort(self._table, sort_key)
|
| 434 |
+
if len(boundaries) == 0:
|
| 435 |
+
return [table]
|
| 436 |
+
return find_partitions(table, boundaries, sort_key)
|
| 437 |
+
|
| 438 |
+
def combine(self, sort_key: "SortKey", aggs: Tuple["AggregateFn"]) -> Block:
|
| 439 |
+
"""Combine rows with the same key into an accumulator.
|
| 440 |
+
|
| 441 |
+
This assumes the block is already sorted by key in ascending order.
|
| 442 |
+
|
| 443 |
+
Args:
|
| 444 |
+
sort_key: A column name or list of column names.
|
| 445 |
+
If this is ``None``, place all rows in a single group.
|
| 446 |
+
|
| 447 |
+
aggs: The aggregations to do.
|
| 448 |
+
|
| 449 |
+
Returns:
|
| 450 |
+
A sorted block of [k, v_1, ..., v_n] columns where k is the groupby
|
| 451 |
+
key and v_i is the partially combined accumulator for the ith given
|
| 452 |
+
aggregation.
|
| 453 |
+
If key is None then the k column is omitted.
|
| 454 |
+
"""
|
| 455 |
+
keys: List[str] = sort_key.get_columns()
|
| 456 |
+
|
| 457 |
+
def iter_groups() -> Iterator[Tuple[Sequence[KeyType], Block]]:
|
| 458 |
+
"""Creates an iterator over zero-copy group views."""
|
| 459 |
+
if not keys:
|
| 460 |
+
# Global aggregation consists of a single "group", so we short-circuit.
|
| 461 |
+
yield tuple(), self.to_block()
|
| 462 |
+
return
|
| 463 |
+
|
| 464 |
+
start = end = 0
|
| 465 |
+
iter = self.iter_rows(public_row_format=False)
|
| 466 |
+
next_row = None
|
| 467 |
+
while True:
|
| 468 |
+
try:
|
| 469 |
+
if next_row is None:
|
| 470 |
+
next_row = next(iter)
|
| 471 |
+
next_keys = next_row[keys]
|
| 472 |
+
while next_row[keys] == next_keys:
|
| 473 |
+
end += 1
|
| 474 |
+
try:
|
| 475 |
+
next_row = next(iter)
|
| 476 |
+
except StopIteration:
|
| 477 |
+
next_row = None
|
| 478 |
+
break
|
| 479 |
+
yield next_keys, self.slice(start, end)
|
| 480 |
+
start = end
|
| 481 |
+
except StopIteration:
|
| 482 |
+
break
|
| 483 |
+
|
| 484 |
+
builder = ArrowBlockBuilder()
|
| 485 |
+
for group_keys, group_view in iter_groups():
|
| 486 |
+
# Aggregate.
|
| 487 |
+
init_vals = group_keys
|
| 488 |
+
if len(group_keys) == 1:
|
| 489 |
+
init_vals = group_keys[0]
|
| 490 |
+
|
| 491 |
+
accumulators = [agg.init(init_vals) for agg in aggs]
|
| 492 |
+
for i in range(len(aggs)):
|
| 493 |
+
accumulators[i] = aggs[i].accumulate_block(accumulators[i], group_view)
|
| 494 |
+
|
| 495 |
+
# Build the row.
|
| 496 |
+
row = {}
|
| 497 |
+
if keys:
|
| 498 |
+
for k, gk in zip(keys, group_keys):
|
| 499 |
+
row[k] = gk
|
| 500 |
+
|
| 501 |
+
count = collections.defaultdict(int)
|
| 502 |
+
for agg, accumulator in zip(aggs, accumulators):
|
| 503 |
+
name = agg.name
|
| 504 |
+
# Check for conflicts with existing aggregation name.
|
| 505 |
+
if count[name] > 0:
|
| 506 |
+
name = self._munge_conflict(name, count[name])
|
| 507 |
+
count[name] += 1
|
| 508 |
+
row[name] = accumulator
|
| 509 |
+
|
| 510 |
+
builder.add(row)
|
| 511 |
+
|
| 512 |
+
return builder.build()
|
| 513 |
+
|
| 514 |
+
@staticmethod
|
| 515 |
+
def _munge_conflict(name, count):
|
| 516 |
+
return f"{name}_{count+1}"
|
| 517 |
+
|
| 518 |
+
@staticmethod
|
| 519 |
+
def merge_sorted_blocks(
|
| 520 |
+
blocks: List[Block], sort_key: "SortKey"
|
| 521 |
+
) -> Tuple[Block, BlockMetadata]:
|
| 522 |
+
stats = BlockExecStats.builder()
|
| 523 |
+
blocks = [b for b in blocks if b.num_rows > 0]
|
| 524 |
+
if len(blocks) == 0:
|
| 525 |
+
ret = ArrowBlockAccessor._empty_table()
|
| 526 |
+
else:
|
| 527 |
+
# Handle blocks of different types.
|
| 528 |
+
blocks = TableBlockAccessor.normalize_block_types(blocks, "arrow")
|
| 529 |
+
concat_and_sort = get_concat_and_sort_transform(DataContext.get_current())
|
| 530 |
+
ret = concat_and_sort(blocks, sort_key)
|
| 531 |
+
return ret, ArrowBlockAccessor(ret).get_metadata(exec_stats=stats.build())
|
| 532 |
+
|
| 533 |
+
@staticmethod
|
| 534 |
+
def aggregate_combined_blocks(
|
| 535 |
+
blocks: List[Block],
|
| 536 |
+
sort_key: "SortKey",
|
| 537 |
+
aggs: Tuple["AggregateFn"],
|
| 538 |
+
finalize: bool,
|
| 539 |
+
) -> Tuple[Block, BlockMetadata]:
|
| 540 |
+
"""Aggregate sorted, partially combined blocks with the same key range.
|
| 541 |
+
|
| 542 |
+
This assumes blocks are already sorted by key in ascending order,
|
| 543 |
+
so we can do merge sort to get all the rows with the same key.
|
| 544 |
+
|
| 545 |
+
Args:
|
| 546 |
+
blocks: A list of partially combined and sorted blocks.
|
| 547 |
+
sort_key: The column name of key or None for global aggregation.
|
| 548 |
+
aggs: The aggregations to do.
|
| 549 |
+
finalize: Whether to finalize the aggregation. This is used as an
|
| 550 |
+
optimization for cases where we repeatedly combine partially
|
| 551 |
+
aggregated groups.
|
| 552 |
+
|
| 553 |
+
Returns:
|
| 554 |
+
A block of [k, v_1, ..., v_n] columns and its metadata where k is
|
| 555 |
+
the groupby key and v_i is the corresponding aggregation result for
|
| 556 |
+
the ith given aggregation.
|
| 557 |
+
If key is None then the k column is omitted.
|
| 558 |
+
"""
|
| 559 |
+
|
| 560 |
+
stats = BlockExecStats.builder()
|
| 561 |
+
keys = sort_key.get_columns()
|
| 562 |
+
|
| 563 |
+
def key_fn(r):
|
| 564 |
+
if keys:
|
| 565 |
+
return tuple(r[keys])
|
| 566 |
+
else:
|
| 567 |
+
return (0,)
|
| 568 |
+
|
| 569 |
+
# Replace Nones with NULL_SENTINEL to ensure safe sorting.
|
| 570 |
+
def key_fn_with_null_sentinel(r):
|
| 571 |
+
values = key_fn(r)
|
| 572 |
+
return [NULL_SENTINEL if v is None else v for v in values]
|
| 573 |
+
|
| 574 |
+
# Handle blocks of different types.
|
| 575 |
+
blocks = TableBlockAccessor.normalize_block_types(blocks, "arrow")
|
| 576 |
+
|
| 577 |
+
iter = heapq.merge(
|
| 578 |
+
*[
|
| 579 |
+
ArrowBlockAccessor(block).iter_rows(public_row_format=False)
|
| 580 |
+
for block in blocks
|
| 581 |
+
],
|
| 582 |
+
key=key_fn_with_null_sentinel,
|
| 583 |
+
)
|
| 584 |
+
next_row = None
|
| 585 |
+
builder = ArrowBlockBuilder()
|
| 586 |
+
while True:
|
| 587 |
+
try:
|
| 588 |
+
if next_row is None:
|
| 589 |
+
next_row = next(iter)
|
| 590 |
+
next_keys = key_fn(next_row)
|
| 591 |
+
next_key_columns = keys
|
| 592 |
+
|
| 593 |
+
def gen():
|
| 594 |
+
nonlocal iter
|
| 595 |
+
nonlocal next_row
|
| 596 |
+
while key_fn(next_row) == next_keys:
|
| 597 |
+
yield next_row
|
| 598 |
+
try:
|
| 599 |
+
next_row = next(iter)
|
| 600 |
+
except StopIteration:
|
| 601 |
+
next_row = None
|
| 602 |
+
break
|
| 603 |
+
|
| 604 |
+
# Merge.
|
| 605 |
+
first = True
|
| 606 |
+
accumulators = [None] * len(aggs)
|
| 607 |
+
resolved_agg_names = [None] * len(aggs)
|
| 608 |
+
for r in gen():
|
| 609 |
+
if first:
|
| 610 |
+
count = collections.defaultdict(int)
|
| 611 |
+
for i in range(len(aggs)):
|
| 612 |
+
name = aggs[i].name
|
| 613 |
+
# Check for conflicts with existing aggregation
|
| 614 |
+
# name.
|
| 615 |
+
if count[name] > 0:
|
| 616 |
+
name = ArrowBlockAccessor._munge_conflict(
|
| 617 |
+
name, count[name]
|
| 618 |
+
)
|
| 619 |
+
count[name] += 1
|
| 620 |
+
resolved_agg_names[i] = name
|
| 621 |
+
accumulators[i] = r[name]
|
| 622 |
+
first = False
|
| 623 |
+
else:
|
| 624 |
+
for i in range(len(aggs)):
|
| 625 |
+
accumulators[i] = aggs[i].merge(
|
| 626 |
+
accumulators[i], r[resolved_agg_names[i]]
|
| 627 |
+
)
|
| 628 |
+
# Build the row.
|
| 629 |
+
row = {}
|
| 630 |
+
if keys:
|
| 631 |
+
for col_name, next_key in zip(next_key_columns, next_keys):
|
| 632 |
+
row[col_name] = next_key
|
| 633 |
+
|
| 634 |
+
for agg, agg_name, accumulator in zip(
|
| 635 |
+
aggs, resolved_agg_names, accumulators
|
| 636 |
+
):
|
| 637 |
+
if finalize:
|
| 638 |
+
row[agg_name] = agg.finalize(accumulator)
|
| 639 |
+
else:
|
| 640 |
+
row[agg_name] = accumulator
|
| 641 |
+
|
| 642 |
+
builder.add(row)
|
| 643 |
+
except StopIteration:
|
| 644 |
+
break
|
| 645 |
+
|
| 646 |
+
ret = builder.build()
|
| 647 |
+
return ret, ArrowBlockAccessor(ret).get_metadata(exec_stats=stats.build())
|
| 648 |
+
|
| 649 |
+
def block_type(self) -> BlockType:
|
| 650 |
+
return BlockType.ARROW
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/batcher.py
ADDED
|
@@ -0,0 +1,325 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Optional
|
| 2 |
+
|
| 3 |
+
from ray.data._internal.arrow_block import ArrowBlockAccessor
|
| 4 |
+
from ray.data._internal.arrow_ops import transform_pyarrow
|
| 5 |
+
from ray.data._internal.delegating_block_builder import DelegatingBlockBuilder
|
| 6 |
+
from ray.data.block import Block, BlockAccessor
|
| 7 |
+
|
| 8 |
+
# pyarrow.Table.slice is slow when the table has many chunks
|
| 9 |
+
# so we combine chunks into a single one to make slice faster
|
| 10 |
+
# with the cost of an extra copy.
|
| 11 |
+
# See https://github.com/ray-project/ray/issues/31108 for more details.
|
| 12 |
+
# TODO(jjyao): remove this once
|
| 13 |
+
# https://github.com/apache/arrow/issues/35126 is resolved.
|
| 14 |
+
MIN_NUM_CHUNKS_TO_TRIGGER_COMBINE_CHUNKS = 10
|
| 15 |
+
|
| 16 |
+
# Delay compaction until the shuffle buffer has reached this ratio over the min
|
| 17 |
+
# shuffle buffer size. Setting this to 1 minimizes memory usage, at the cost of
|
| 18 |
+
# frequent compactions. Setting this to higher values increases memory usage but
|
| 19 |
+
# reduces compaction frequency.
|
| 20 |
+
SHUFFLE_BUFFER_COMPACTION_RATIO = 1.5
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class BatcherInterface:
|
| 24 |
+
def add(self, block: Block):
|
| 25 |
+
"""Add a block to the block buffer.
|
| 26 |
+
|
| 27 |
+
Args:
|
| 28 |
+
block: Block to add to the block buffer.
|
| 29 |
+
"""
|
| 30 |
+
raise NotImplementedError()
|
| 31 |
+
|
| 32 |
+
def done_adding(self) -> bool:
|
| 33 |
+
"""Indicate to the batcher that no more blocks will be added to the buffer."""
|
| 34 |
+
raise NotImplementedError()
|
| 35 |
+
|
| 36 |
+
def has_batch(self) -> bool:
|
| 37 |
+
"""Whether this Batcher has any full batches."""
|
| 38 |
+
raise NotImplementedError()
|
| 39 |
+
|
| 40 |
+
def has_any(self) -> bool:
|
| 41 |
+
"""Whether this Batcher has any data."""
|
| 42 |
+
raise NotImplementedError()
|
| 43 |
+
|
| 44 |
+
def next_batch(self) -> Block:
|
| 45 |
+
"""Get the next batch from the block buffer.
|
| 46 |
+
|
| 47 |
+
Returns:
|
| 48 |
+
A batch represented as a Block.
|
| 49 |
+
"""
|
| 50 |
+
raise NotImplementedError()
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class Batcher(BatcherInterface):
|
| 54 |
+
"""Chunks blocks into batches."""
|
| 55 |
+
|
| 56 |
+
# Implementation Note: When there are multiple batches per block, this batcher will
|
| 57 |
+
# slice off and return each batch and add the remaining block back to the buffer
|
| 58 |
+
# instead of optimally slicing and returning all batches from the block at once.
|
| 59 |
+
# This will result in extra (and nested) block slicing. However, since slices are
|
| 60 |
+
# zero-copy views, we sacrifice what should be a small performance hit for better
|
| 61 |
+
# readability.
|
| 62 |
+
|
| 63 |
+
def __init__(self, batch_size: Optional[int], ensure_copy: bool = False):
|
| 64 |
+
"""
|
| 65 |
+
Construct a batcher that yields batches of batch_sizes rows.
|
| 66 |
+
|
| 67 |
+
Args:
|
| 68 |
+
batch_size: The size of batches to yield.
|
| 69 |
+
ensure_copy: Whether batches are always copied from the underlying base
|
| 70 |
+
blocks (not zero-copy views).
|
| 71 |
+
"""
|
| 72 |
+
self._batch_size = batch_size
|
| 73 |
+
self._buffer = []
|
| 74 |
+
self._buffer_size = 0
|
| 75 |
+
self._done_adding = False
|
| 76 |
+
self._ensure_copy = ensure_copy
|
| 77 |
+
|
| 78 |
+
def add(self, block: Block):
|
| 79 |
+
"""Add a block to the block buffer.
|
| 80 |
+
|
| 81 |
+
Note empty block is not added to buffer.
|
| 82 |
+
|
| 83 |
+
Args:
|
| 84 |
+
block: Block to add to the block buffer.
|
| 85 |
+
"""
|
| 86 |
+
if BlockAccessor.for_block(block).num_rows() > 0:
|
| 87 |
+
self._buffer.append(block)
|
| 88 |
+
self._buffer_size += BlockAccessor.for_block(block).num_rows()
|
| 89 |
+
|
| 90 |
+
def done_adding(self) -> bool:
|
| 91 |
+
"""Indicate to the batcher that no more blocks will be added to the batcher."""
|
| 92 |
+
self._done_adding = True
|
| 93 |
+
|
| 94 |
+
def has_batch(self) -> bool:
|
| 95 |
+
"""Whether this Batcher has any full batches."""
|
| 96 |
+
return self.has_any() and (
|
| 97 |
+
self._batch_size is None or self._buffer_size >= self._batch_size
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
def has_any(self) -> bool:
|
| 101 |
+
"""Whether this Batcher has any data."""
|
| 102 |
+
return self._buffer_size > 0
|
| 103 |
+
|
| 104 |
+
def next_batch(self) -> Block:
|
| 105 |
+
"""Get the next batch from the block buffer.
|
| 106 |
+
|
| 107 |
+
Returns:
|
| 108 |
+
A batch represented as a Block.
|
| 109 |
+
"""
|
| 110 |
+
assert self.has_batch() or (self._done_adding and self.has_any())
|
| 111 |
+
needs_copy = self._ensure_copy
|
| 112 |
+
# If no batch size, short-circuit.
|
| 113 |
+
if self._batch_size is None:
|
| 114 |
+
assert len(self._buffer) == 1
|
| 115 |
+
block = self._buffer[0]
|
| 116 |
+
if needs_copy:
|
| 117 |
+
# Copy block if needing to ensure fresh batch copy.
|
| 118 |
+
block = BlockAccessor.for_block(block)
|
| 119 |
+
block = block.slice(0, block.num_rows(), copy=True)
|
| 120 |
+
self._buffer = []
|
| 121 |
+
self._buffer_size = 0
|
| 122 |
+
return block
|
| 123 |
+
output = DelegatingBlockBuilder()
|
| 124 |
+
leftover = []
|
| 125 |
+
needed = self._batch_size
|
| 126 |
+
for block in self._buffer:
|
| 127 |
+
accessor = BlockAccessor.for_block(block)
|
| 128 |
+
if needed <= 0:
|
| 129 |
+
# We already have a full batch, so add this block to
|
| 130 |
+
# the leftovers.
|
| 131 |
+
leftover.append(block)
|
| 132 |
+
elif accessor.num_rows() <= needed:
|
| 133 |
+
output.add_block(accessor.to_block())
|
| 134 |
+
needed -= accessor.num_rows()
|
| 135 |
+
else:
|
| 136 |
+
if (
|
| 137 |
+
isinstance(accessor, ArrowBlockAccessor)
|
| 138 |
+
and block.num_columns > 0
|
| 139 |
+
and block.column(0).num_chunks
|
| 140 |
+
>= MIN_NUM_CHUNKS_TO_TRIGGER_COMBINE_CHUNKS
|
| 141 |
+
):
|
| 142 |
+
accessor = BlockAccessor.for_block(
|
| 143 |
+
transform_pyarrow.combine_chunks(block)
|
| 144 |
+
)
|
| 145 |
+
# We only need part of the block to fill out a batch.
|
| 146 |
+
output.add_block(accessor.slice(0, needed, copy=False))
|
| 147 |
+
# Add the rest of the block to the leftovers.
|
| 148 |
+
leftover.append(accessor.slice(needed, accessor.num_rows(), copy=False))
|
| 149 |
+
needed = 0
|
| 150 |
+
|
| 151 |
+
# Move the leftovers into the block buffer so they're the first
|
| 152 |
+
# blocks consumed on the next batch extraction.
|
| 153 |
+
self._buffer = leftover
|
| 154 |
+
self._buffer_size -= self._batch_size
|
| 155 |
+
needs_copy = needs_copy and not output.will_build_yield_copy()
|
| 156 |
+
batch = output.build()
|
| 157 |
+
if needs_copy:
|
| 158 |
+
# Need to ensure that the batch is a fresh copy.
|
| 159 |
+
batch = BlockAccessor.for_block(batch)
|
| 160 |
+
batch = batch.slice(0, batch.num_rows(), copy=True)
|
| 161 |
+
return batch
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
class ShufflingBatcher(BatcherInterface):
|
| 165 |
+
"""Chunks blocks into shuffled batches, using a local in-memory shuffle buffer."""
|
| 166 |
+
|
| 167 |
+
# Implementation Note:
|
| 168 |
+
#
|
| 169 |
+
# This shuffling batcher lazily builds a shuffle buffer from added blocks, and once
|
| 170 |
+
# a batch is requested via .next_batch(), it concatenates the blocks into a concrete
|
| 171 |
+
# shuffle buffer and randomly shuffles the entire buffer.
|
| 172 |
+
#
|
| 173 |
+
# Adding of more blocks can be intermixed with retrieving batches, but it should be
|
| 174 |
+
# noted that we can end up performing two expensive operations on each retrieval:
|
| 175 |
+
# 1. Build added blocks into a concrete shuffle buffer.
|
| 176 |
+
# 2. Shuffling the entire buffer.
|
| 177 |
+
# To amortize the overhead of this process, we only shuffle the blocks after a
|
| 178 |
+
# delay designated by SHUFFLE_BUFFER_COMPACTION_RATIO.
|
| 179 |
+
#
|
| 180 |
+
# Similarly, adding blocks is very cheap. Each added block will be appended to a
|
| 181 |
+
# list, with concatenation of the underlying data delayed until the next batch
|
| 182 |
+
# compaction.
|
| 183 |
+
|
| 184 |
+
def __init__(
|
| 185 |
+
self,
|
| 186 |
+
batch_size: Optional[int],
|
| 187 |
+
shuffle_buffer_min_size: int,
|
| 188 |
+
shuffle_seed: Optional[int] = None,
|
| 189 |
+
):
|
| 190 |
+
"""Constructs a random-shuffling block batcher.
|
| 191 |
+
|
| 192 |
+
Args:
|
| 193 |
+
batch_size: Record batch size.
|
| 194 |
+
shuffle_buffer_min_size: Minimum number of rows that must be in the local
|
| 195 |
+
in-memory shuffle buffer in order to yield a batch. When there are no
|
| 196 |
+
more rows to be added to the buffer, the number of rows in the buffer
|
| 197 |
+
*will* decrease below this value while yielding the remaining batches,
|
| 198 |
+
and the final batch may have less than ``batch_size`` rows. Increasing
|
| 199 |
+
this will improve the randomness of the shuffle but may increase the
|
| 200 |
+
latency to the first batch.
|
| 201 |
+
shuffle_seed: The seed to use for the local random shuffle.
|
| 202 |
+
"""
|
| 203 |
+
if batch_size is None:
|
| 204 |
+
raise ValueError("Must specify a batch_size if using a local shuffle.")
|
| 205 |
+
self._batch_size = batch_size
|
| 206 |
+
self._shuffle_seed = shuffle_seed
|
| 207 |
+
if shuffle_buffer_min_size < batch_size:
|
| 208 |
+
# Round it up internally to `batch_size` since our algorithm requires it.
|
| 209 |
+
# This is harmless since it only offers extra randomization.
|
| 210 |
+
shuffle_buffer_min_size = batch_size
|
| 211 |
+
self._buffer_min_size = shuffle_buffer_min_size
|
| 212 |
+
self._builder = DelegatingBlockBuilder()
|
| 213 |
+
self._shuffle_buffer: Block = None
|
| 214 |
+
self._batch_head = 0
|
| 215 |
+
self._done_adding = False
|
| 216 |
+
|
| 217 |
+
def add(self, block: Block):
|
| 218 |
+
"""Add a block to the shuffle buffer.
|
| 219 |
+
|
| 220 |
+
Note empty block is not added to buffer.
|
| 221 |
+
|
| 222 |
+
Args:
|
| 223 |
+
block: Block to add to the shuffle buffer.
|
| 224 |
+
"""
|
| 225 |
+
if BlockAccessor.for_block(block).num_rows() > 0:
|
| 226 |
+
self._builder.add_block(block)
|
| 227 |
+
|
| 228 |
+
def done_adding(self) -> bool:
|
| 229 |
+
"""Indicate to the batcher that no more blocks will be added to the batcher.
|
| 230 |
+
|
| 231 |
+
No more blocks should be added to the batcher after calling this.
|
| 232 |
+
"""
|
| 233 |
+
self._done_adding = True
|
| 234 |
+
|
| 235 |
+
def has_any(self) -> bool:
|
| 236 |
+
"""Whether this batcher has any data."""
|
| 237 |
+
return self._buffer_size() > 0
|
| 238 |
+
|
| 239 |
+
def has_batch(self) -> bool:
|
| 240 |
+
"""Whether this batcher has any batches."""
|
| 241 |
+
buffer_size = self._buffer_size()
|
| 242 |
+
|
| 243 |
+
if not self._done_adding:
|
| 244 |
+
# Delay pulling of batches until the buffer is large enough in order to
|
| 245 |
+
# amortize compaction overhead.
|
| 246 |
+
return self._materialized_buffer_size() >= self._buffer_min_size or (
|
| 247 |
+
buffer_size - self._batch_size
|
| 248 |
+
>= self._buffer_min_size * SHUFFLE_BUFFER_COMPACTION_RATIO
|
| 249 |
+
)
|
| 250 |
+
else:
|
| 251 |
+
return buffer_size >= self._batch_size
|
| 252 |
+
|
| 253 |
+
def _buffer_size(self) -> int:
|
| 254 |
+
"""Return shuffle buffer size."""
|
| 255 |
+
buffer_size = self._builder.num_rows()
|
| 256 |
+
buffer_size += self._materialized_buffer_size()
|
| 257 |
+
return buffer_size
|
| 258 |
+
|
| 259 |
+
def _materialized_buffer_size(self) -> int:
|
| 260 |
+
"""Return materialized (compacted portion of) shuffle buffer size."""
|
| 261 |
+
if self._shuffle_buffer is None:
|
| 262 |
+
return 0
|
| 263 |
+
# The size of the concrete (materialized) shuffle buffer, adjusting
|
| 264 |
+
# for the batch head position, which also serves as a counter of the number
|
| 265 |
+
# of already-yielded rows from the current concrete shuffle buffer.
|
| 266 |
+
return max(
|
| 267 |
+
0,
|
| 268 |
+
BlockAccessor.for_block(self._shuffle_buffer).num_rows() - self._batch_head,
|
| 269 |
+
)
|
| 270 |
+
|
| 271 |
+
def next_batch(self) -> Block:
|
| 272 |
+
"""Get the next shuffled batch from the shuffle buffer.
|
| 273 |
+
|
| 274 |
+
Returns:
|
| 275 |
+
A batch represented as a Block.
|
| 276 |
+
"""
|
| 277 |
+
assert self.has_batch() or (self._done_adding and self.has_any())
|
| 278 |
+
# Add rows in the builder to the shuffle buffer. Note that we delay compaction
|
| 279 |
+
# as much as possible to amortize the concatenation overhead. Compaction is
|
| 280 |
+
# only necessary when the materialized buffer size falls below the min size.
|
| 281 |
+
if self._builder.num_rows() > 0 and (
|
| 282 |
+
self._done_adding
|
| 283 |
+
or self._materialized_buffer_size() <= self._buffer_min_size
|
| 284 |
+
):
|
| 285 |
+
if self._shuffle_buffer is not None:
|
| 286 |
+
if self._batch_head > 0:
|
| 287 |
+
# Compact the materialized shuffle buffer.
|
| 288 |
+
block = BlockAccessor.for_block(self._shuffle_buffer)
|
| 289 |
+
self._shuffle_buffer = block.slice(
|
| 290 |
+
self._batch_head, block.num_rows()
|
| 291 |
+
)
|
| 292 |
+
# Add the unyielded rows from the existing shuffle buffer.
|
| 293 |
+
self._builder.add_block(self._shuffle_buffer)
|
| 294 |
+
# Build the new shuffle buffer.
|
| 295 |
+
self._shuffle_buffer = self._builder.build()
|
| 296 |
+
self._shuffle_buffer = BlockAccessor.for_block(
|
| 297 |
+
self._shuffle_buffer
|
| 298 |
+
).random_shuffle(self._shuffle_seed)
|
| 299 |
+
if self._shuffle_seed is not None:
|
| 300 |
+
self._shuffle_seed += 1
|
| 301 |
+
if (
|
| 302 |
+
isinstance(
|
| 303 |
+
BlockAccessor.for_block(self._shuffle_buffer), ArrowBlockAccessor
|
| 304 |
+
)
|
| 305 |
+
and self._shuffle_buffer.num_columns > 0
|
| 306 |
+
and self._shuffle_buffer.column(0).num_chunks
|
| 307 |
+
>= MIN_NUM_CHUNKS_TO_TRIGGER_COMBINE_CHUNKS
|
| 308 |
+
):
|
| 309 |
+
self._shuffle_buffer = transform_pyarrow.combine_chunks(
|
| 310 |
+
self._shuffle_buffer
|
| 311 |
+
)
|
| 312 |
+
# Reset the builder.
|
| 313 |
+
self._builder = DelegatingBlockBuilder()
|
| 314 |
+
self._batch_head = 0
|
| 315 |
+
|
| 316 |
+
assert self._shuffle_buffer is not None
|
| 317 |
+
buffer_size = BlockAccessor.for_block(self._shuffle_buffer).num_rows()
|
| 318 |
+
# Truncate the batch to the buffer size, if necessary.
|
| 319 |
+
batch_size = min(self._batch_size, buffer_size)
|
| 320 |
+
slice_start = self._batch_head
|
| 321 |
+
self._batch_head += batch_size
|
| 322 |
+
# Yield the shuffled batch.
|
| 323 |
+
return BlockAccessor.for_block(self._shuffle_buffer).slice(
|
| 324 |
+
slice_start, self._batch_head
|
| 325 |
+
)
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/block_builder.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Generic
|
| 2 |
+
|
| 3 |
+
from ray.data.block import Block, BlockAccessor, BlockType, T
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class BlockBuilder(Generic[T]):
|
| 7 |
+
"""A builder class for blocks."""
|
| 8 |
+
|
| 9 |
+
@staticmethod
|
| 10 |
+
def for_block(block: Block) -> "BlockBuilder":
|
| 11 |
+
return BlockAccessor.for_block(block).builder()
|
| 12 |
+
|
| 13 |
+
def add(self, item: T) -> None:
|
| 14 |
+
"""Append a single row to the block being built."""
|
| 15 |
+
raise NotImplementedError
|
| 16 |
+
|
| 17 |
+
def add_block(self, block: Block) -> None:
|
| 18 |
+
"""Append an entire block to the block being built."""
|
| 19 |
+
raise NotImplementedError
|
| 20 |
+
|
| 21 |
+
def will_build_yield_copy(self) -> bool:
|
| 22 |
+
"""Whether building this block will yield a new block copy."""
|
| 23 |
+
raise NotImplementedError
|
| 24 |
+
|
| 25 |
+
def build(self) -> Block:
|
| 26 |
+
"""Build the block."""
|
| 27 |
+
raise NotImplementedError
|
| 28 |
+
|
| 29 |
+
def num_rows(self) -> int:
|
| 30 |
+
"""Return the number of rows added in the block."""
|
| 31 |
+
raise NotImplementedError
|
| 32 |
+
|
| 33 |
+
def get_estimated_memory_usage(self) -> int:
|
| 34 |
+
"""Return the estimated memory usage so far in bytes."""
|
| 35 |
+
raise NotImplementedError
|
| 36 |
+
|
| 37 |
+
def block_type(self) -> BlockType:
|
| 38 |
+
"""Return the block type."""
|
| 39 |
+
raise NotImplementedError
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/block_list.py
ADDED
|
@@ -0,0 +1,98 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Iterator, List, Tuple
|
| 2 |
+
|
| 3 |
+
from ray.data._internal.memory_tracing import trace_allocation
|
| 4 |
+
from ray.data.block import Block, BlockMetadata
|
| 5 |
+
from ray.types import ObjectRef
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class BlockList:
|
| 9 |
+
"""A list of blocks that may be computed or pending computation.
|
| 10 |
+
|
| 11 |
+
All blocks are known ahead of time
|
| 12 |
+
"""
|
| 13 |
+
|
| 14 |
+
def __init__(
|
| 15 |
+
self,
|
| 16 |
+
blocks: List[ObjectRef[Block]],
|
| 17 |
+
metadata: List[BlockMetadata],
|
| 18 |
+
*,
|
| 19 |
+
owned_by_consumer: bool,
|
| 20 |
+
):
|
| 21 |
+
assert len(blocks) == len(metadata), (blocks, metadata)
|
| 22 |
+
for b in blocks:
|
| 23 |
+
trace_allocation(b, "BlockList.__init__")
|
| 24 |
+
self._blocks: List[ObjectRef[Block]] = blocks
|
| 25 |
+
self._num_blocks = len(self._blocks)
|
| 26 |
+
self._metadata: List[BlockMetadata] = metadata
|
| 27 |
+
# Whether the block list is owned by consuming APIs, and if so it can be
|
| 28 |
+
# eagerly deleted after read by the consumer.
|
| 29 |
+
self._owned_by_consumer = owned_by_consumer
|
| 30 |
+
# This field can be set to indicate the number of estimated output blocks,
|
| 31 |
+
# since each read task may produce multiple output blocks after splitting.
|
| 32 |
+
self._estimated_num_blocks = None
|
| 33 |
+
|
| 34 |
+
def __repr__(self):
|
| 35 |
+
return f"BlockList(owned_by_consumer={self._owned_by_consumer})"
|
| 36 |
+
|
| 37 |
+
def get_metadata(self, fetch_if_missing: bool = False) -> List[BlockMetadata]:
|
| 38 |
+
"""Get the metadata for all blocks."""
|
| 39 |
+
return self._metadata.copy()
|
| 40 |
+
|
| 41 |
+
def copy(self) -> "BlockList":
|
| 42 |
+
"""Perform a shallow copy of this BlockList."""
|
| 43 |
+
return BlockList(
|
| 44 |
+
self._blocks, self._metadata, owned_by_consumer=self._owned_by_consumer
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
def clear(self) -> None:
|
| 48 |
+
"""Erase references to the tasks tracked by the BlockList."""
|
| 49 |
+
self._blocks = None
|
| 50 |
+
|
| 51 |
+
def is_cleared(self) -> bool:
|
| 52 |
+
"""Whether this BlockList has been cleared."""
|
| 53 |
+
return self._blocks is None
|
| 54 |
+
|
| 55 |
+
def _check_if_cleared(self) -> None:
|
| 56 |
+
"""Raise an error if this BlockList has been previously cleared."""
|
| 57 |
+
if self.is_cleared():
|
| 58 |
+
raise ValueError(
|
| 59 |
+
"This Dataset's blocks have been moved, which means that you "
|
| 60 |
+
"can no longer use this Dataset."
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
def get_blocks(self) -> List[ObjectRef[Block]]:
|
| 64 |
+
"""Get list of the blocks of this block list.
|
| 65 |
+
|
| 66 |
+
This blocks on the execution of the tasks generating block outputs.
|
| 67 |
+
The length of this iterator is not known until execution.
|
| 68 |
+
"""
|
| 69 |
+
self._check_if_cleared()
|
| 70 |
+
return list(self._blocks)
|
| 71 |
+
|
| 72 |
+
def get_blocks_with_metadata(self) -> List[Tuple[ObjectRef[Block], BlockMetadata]]:
|
| 73 |
+
"""Bulk version of iter_blocks_with_metadata().
|
| 74 |
+
|
| 75 |
+
Prefer calling this instead of the iter form for performance if you
|
| 76 |
+
don't need lazy evaluation.
|
| 77 |
+
"""
|
| 78 |
+
self.get_blocks()
|
| 79 |
+
return list(self.iter_blocks_with_metadata())
|
| 80 |
+
|
| 81 |
+
def iter_blocks_with_metadata(
|
| 82 |
+
self,
|
| 83 |
+
) -> Iterator[Tuple[ObjectRef[Block], BlockMetadata]]:
|
| 84 |
+
"""Iterate over the blocks along with their runtime metadata.
|
| 85 |
+
|
| 86 |
+
This blocks on the execution of the tasks generating block outputs.
|
| 87 |
+
The length of this iterator is not known until execution.
|
| 88 |
+
"""
|
| 89 |
+
self._check_if_cleared()
|
| 90 |
+
return zip(self._blocks, self._metadata)
|
| 91 |
+
|
| 92 |
+
def initial_num_blocks(self) -> int:
|
| 93 |
+
"""Returns the number of blocks of this BlockList."""
|
| 94 |
+
return self._num_blocks
|
| 95 |
+
|
| 96 |
+
def estimated_num_blocks(self) -> int:
|
| 97 |
+
"""Estimate of number of output blocks, without triggering actual execution."""
|
| 98 |
+
return self._estimated_num_blocks or self._num_blocks
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/compute.py
ADDED
|
@@ -0,0 +1,151 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from typing import Any, Callable, Iterable, Optional, TypeVar, Union
|
| 3 |
+
|
| 4 |
+
from ray.data._internal.execution.interfaces import TaskContext
|
| 5 |
+
from ray.data.block import Block, UserDefinedFunction
|
| 6 |
+
from ray.util.annotations import DeveloperAPI
|
| 7 |
+
|
| 8 |
+
logger = logging.getLogger(__name__)
|
| 9 |
+
|
| 10 |
+
T = TypeVar("T")
|
| 11 |
+
U = TypeVar("U")
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
# Block transform function applied by task and actor pools.
|
| 15 |
+
BlockTransform = Union[
|
| 16 |
+
# TODO(Clark): Once Ray only supports Python 3.8+, use protocol to constrain block
|
| 17 |
+
# transform type.
|
| 18 |
+
# Callable[[Block, ...], Iterable[Block]]
|
| 19 |
+
# Callable[[Block, UserDefinedFunction, ...], Iterable[Block]],
|
| 20 |
+
Callable[[Iterable[Block], TaskContext], Iterable[Block]],
|
| 21 |
+
Callable[[Iterable[Block], TaskContext, UserDefinedFunction], Iterable[Block]],
|
| 22 |
+
Callable[..., Iterable[Block]],
|
| 23 |
+
]
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
@DeveloperAPI
|
| 27 |
+
class ComputeStrategy:
|
| 28 |
+
pass
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
@DeveloperAPI
|
| 32 |
+
class TaskPoolStrategy(ComputeStrategy):
|
| 33 |
+
def __init__(
|
| 34 |
+
self,
|
| 35 |
+
size: Optional[int] = None,
|
| 36 |
+
):
|
| 37 |
+
"""Construct TaskPoolStrategy for a Dataset transform.
|
| 38 |
+
|
| 39 |
+
Args:
|
| 40 |
+
size: Specify the maximum size of the task pool.
|
| 41 |
+
"""
|
| 42 |
+
|
| 43 |
+
if size is not None and size < 1:
|
| 44 |
+
raise ValueError("`size` must be >= 1", size)
|
| 45 |
+
self.size = size
|
| 46 |
+
|
| 47 |
+
def __eq__(self, other: Any) -> bool:
|
| 48 |
+
return (isinstance(other, TaskPoolStrategy) and self.size == other.size) or (
|
| 49 |
+
other == "tasks" and self.size is None
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class ActorPoolStrategy(ComputeStrategy):
|
| 54 |
+
"""Specify the compute strategy for a Dataset transform.
|
| 55 |
+
|
| 56 |
+
ActorPoolStrategy specifies that an autoscaling pool of actors should be used
|
| 57 |
+
for a given Dataset transform. This is useful for stateful setup of callable
|
| 58 |
+
classes.
|
| 59 |
+
|
| 60 |
+
For a fixed-sized pool of size ``n``, specify ``compute=ActorPoolStrategy(size=n)``.
|
| 61 |
+
To autoscale from ``m`` to ``n`` actors, specify
|
| 62 |
+
``ActorPoolStrategy(min_size=m, max_size=n)``.
|
| 63 |
+
|
| 64 |
+
To increase opportunities for pipelining task dependency prefetching with
|
| 65 |
+
computation and avoiding actor startup delays, set max_tasks_in_flight_per_actor
|
| 66 |
+
to 2 or greater; to try to decrease the delay due to queueing of tasks on the worker
|
| 67 |
+
actors, set max_tasks_in_flight_per_actor to 1.
|
| 68 |
+
"""
|
| 69 |
+
|
| 70 |
+
def __init__(
|
| 71 |
+
self,
|
| 72 |
+
*,
|
| 73 |
+
size: Optional[int] = None,
|
| 74 |
+
min_size: Optional[int] = None,
|
| 75 |
+
max_size: Optional[int] = None,
|
| 76 |
+
max_tasks_in_flight_per_actor: Optional[int] = None,
|
| 77 |
+
):
|
| 78 |
+
"""Construct ActorPoolStrategy for a Dataset transform.
|
| 79 |
+
|
| 80 |
+
Args:
|
| 81 |
+
size: Specify a fixed size actor pool of this size. It is an error to
|
| 82 |
+
specify both `size` and `min_size` or `max_size`.
|
| 83 |
+
min_size: The minimize size of the actor pool.
|
| 84 |
+
max_size: The maximum size of the actor pool.
|
| 85 |
+
max_tasks_in_flight_per_actor: The maximum number of tasks to concurrently
|
| 86 |
+
send to a single actor worker. Increasing this will increase
|
| 87 |
+
opportunities for pipelining task dependency prefetching with
|
| 88 |
+
computation and avoiding actor startup delays, but will also increase
|
| 89 |
+
queueing delay.
|
| 90 |
+
"""
|
| 91 |
+
if size is not None:
|
| 92 |
+
if size < 1:
|
| 93 |
+
raise ValueError("size must be >= 1", size)
|
| 94 |
+
if max_size is not None or min_size is not None:
|
| 95 |
+
raise ValueError(
|
| 96 |
+
"min_size and max_size cannot be set at the same time as `size`"
|
| 97 |
+
)
|
| 98 |
+
min_size = size
|
| 99 |
+
max_size = size
|
| 100 |
+
if min_size is not None and min_size < 1:
|
| 101 |
+
raise ValueError("min_size must be >= 1", min_size)
|
| 102 |
+
if max_size is not None:
|
| 103 |
+
if min_size is None:
|
| 104 |
+
min_size = 1 # Legacy default.
|
| 105 |
+
if min_size > max_size:
|
| 106 |
+
raise ValueError("min_size must be <= max_size", min_size, max_size)
|
| 107 |
+
if (
|
| 108 |
+
max_tasks_in_flight_per_actor is not None
|
| 109 |
+
and max_tasks_in_flight_per_actor < 1
|
| 110 |
+
):
|
| 111 |
+
raise ValueError(
|
| 112 |
+
"max_tasks_in_flight_per_actor must be >= 1, got: ",
|
| 113 |
+
max_tasks_in_flight_per_actor,
|
| 114 |
+
)
|
| 115 |
+
self.min_size = min_size or 1
|
| 116 |
+
self.max_size = max_size or float("inf")
|
| 117 |
+
self.max_tasks_in_flight_per_actor = max_tasks_in_flight_per_actor
|
| 118 |
+
self.num_workers = 0
|
| 119 |
+
self.ready_to_total_workers_ratio = 0.8
|
| 120 |
+
|
| 121 |
+
def __eq__(self, other: Any) -> bool:
|
| 122 |
+
return isinstance(other, ActorPoolStrategy) and (
|
| 123 |
+
self.min_size == other.min_size
|
| 124 |
+
and self.max_size == other.max_size
|
| 125 |
+
and self.max_tasks_in_flight_per_actor
|
| 126 |
+
== other.max_tasks_in_flight_per_actor
|
| 127 |
+
)
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def get_compute(compute_spec: Union[str, ComputeStrategy]) -> ComputeStrategy:
|
| 131 |
+
if not isinstance(compute_spec, (TaskPoolStrategy, ActorPoolStrategy)):
|
| 132 |
+
raise ValueError(
|
| 133 |
+
"In Ray 2.5, the compute spec must be either "
|
| 134 |
+
f"TaskPoolStrategy or ActorPoolStategy, was: {compute_spec}."
|
| 135 |
+
)
|
| 136 |
+
elif not compute_spec or compute_spec == "tasks":
|
| 137 |
+
return TaskPoolStrategy()
|
| 138 |
+
elif compute_spec == "actors":
|
| 139 |
+
return ActorPoolStrategy()
|
| 140 |
+
elif isinstance(compute_spec, ComputeStrategy):
|
| 141 |
+
return compute_spec
|
| 142 |
+
else:
|
| 143 |
+
raise ValueError("compute must be one of [`tasks`, `actors`, ComputeStrategy]")
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def is_task_compute(compute_spec: Union[str, ComputeStrategy]) -> bool:
|
| 147 |
+
return (
|
| 148 |
+
not compute_spec
|
| 149 |
+
or compute_spec == "tasks"
|
| 150 |
+
or isinstance(compute_spec, TaskPoolStrategy)
|
| 151 |
+
)
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/delegating_block_builder.py
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
from typing import Any, Mapping, Optional
|
| 3 |
+
|
| 4 |
+
from ray.data._internal.arrow_block import ArrowBlockBuilder
|
| 5 |
+
from ray.data._internal.block_builder import BlockBuilder
|
| 6 |
+
from ray.data.block import Block, BlockAccessor, BlockType, DataBatch
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class DelegatingBlockBuilder(BlockBuilder):
|
| 10 |
+
def __init__(self):
|
| 11 |
+
self._builder = None
|
| 12 |
+
self._empty_block = None
|
| 13 |
+
|
| 14 |
+
@property
|
| 15 |
+
def _inferred_block_type(self) -> Optional[BlockType]:
|
| 16 |
+
"""The block type inferred from the first item added to the builder."""
|
| 17 |
+
if self._builder is not None:
|
| 18 |
+
return self._builder.block_type()
|
| 19 |
+
return None
|
| 20 |
+
|
| 21 |
+
def add(self, item: Mapping[str, Any]) -> None:
|
| 22 |
+
assert isinstance(item, collections.abc.Mapping), item
|
| 23 |
+
|
| 24 |
+
if self._builder is None:
|
| 25 |
+
self._builder = ArrowBlockBuilder()
|
| 26 |
+
|
| 27 |
+
self._builder.add(item)
|
| 28 |
+
|
| 29 |
+
def add_batch(self, batch: DataBatch):
|
| 30 |
+
"""Add a user-facing data batch to the builder.
|
| 31 |
+
|
| 32 |
+
This data batch will be converted to an internal block and then added to the
|
| 33 |
+
underlying builder.
|
| 34 |
+
"""
|
| 35 |
+
block = BlockAccessor.batch_to_block(batch, self._inferred_block_type)
|
| 36 |
+
return self.add_block(block)
|
| 37 |
+
|
| 38 |
+
def add_block(self, block: Block):
|
| 39 |
+
accessor = BlockAccessor.for_block(block)
|
| 40 |
+
if accessor.num_rows() == 0:
|
| 41 |
+
# Don't infer types of empty lists. Store the block and use it if no
|
| 42 |
+
# other data is added. https://github.com/ray-project/ray/issues/20290
|
| 43 |
+
self._empty_block = block
|
| 44 |
+
return
|
| 45 |
+
if self._builder is None:
|
| 46 |
+
self._builder = accessor.builder()
|
| 47 |
+
else:
|
| 48 |
+
block_type = accessor.block_type()
|
| 49 |
+
assert block_type == self._inferred_block_type, (
|
| 50 |
+
block_type,
|
| 51 |
+
self._inferred_block_type,
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
self._builder.add_block(accessor.to_block())
|
| 55 |
+
|
| 56 |
+
def will_build_yield_copy(self) -> bool:
|
| 57 |
+
if self._builder is None:
|
| 58 |
+
return True
|
| 59 |
+
return self._builder.will_build_yield_copy()
|
| 60 |
+
|
| 61 |
+
def build(self) -> Block:
|
| 62 |
+
if self._builder is None:
|
| 63 |
+
if self._empty_block is not None:
|
| 64 |
+
self._builder = BlockAccessor.for_block(self._empty_block).builder()
|
| 65 |
+
self._builder.add_block(self._empty_block)
|
| 66 |
+
else:
|
| 67 |
+
self._builder = ArrowBlockBuilder()
|
| 68 |
+
return self._builder.build()
|
| 69 |
+
|
| 70 |
+
def num_rows(self) -> int:
|
| 71 |
+
return self._builder.num_rows() if self._builder is not None else 0
|
| 72 |
+
|
| 73 |
+
def get_estimated_memory_usage(self) -> int:
|
| 74 |
+
if self._builder is None:
|
| 75 |
+
return 0
|
| 76 |
+
return self._builder.get_estimated_memory_usage()
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/equalize.py
ADDED
|
@@ -0,0 +1,142 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List, Tuple
|
| 2 |
+
|
| 3 |
+
from ray.data._internal.execution.interfaces import RefBundle
|
| 4 |
+
from ray.data._internal.split import _calculate_blocks_rows, _split_at_indices
|
| 5 |
+
from ray.data.block import Block, BlockMetadata, BlockPartition
|
| 6 |
+
from ray.types import ObjectRef
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def _equalize(
|
| 10 |
+
per_split_bundles: List[RefBundle],
|
| 11 |
+
owned_by_consumer: bool,
|
| 12 |
+
) -> List[RefBundle]:
|
| 13 |
+
"""Equalize split ref bundles into equal number of rows.
|
| 14 |
+
|
| 15 |
+
Args:
|
| 16 |
+
per_split_bundles: ref bundles to equalize.
|
| 17 |
+
Returns:
|
| 18 |
+
the equalized ref bundles.
|
| 19 |
+
"""
|
| 20 |
+
if len(per_split_bundles) == 0:
|
| 21 |
+
return per_split_bundles
|
| 22 |
+
per_split_blocks_with_metadata = [bundle.blocks for bundle in per_split_bundles]
|
| 23 |
+
per_split_num_rows: List[List[int]] = [
|
| 24 |
+
_calculate_blocks_rows(split) for split in per_split_blocks_with_metadata
|
| 25 |
+
]
|
| 26 |
+
total_rows = sum([sum(blocks_rows) for blocks_rows in per_split_num_rows])
|
| 27 |
+
target_split_size = total_rows // len(per_split_blocks_with_metadata)
|
| 28 |
+
|
| 29 |
+
# phase 1: shave the current splits by dropping blocks (into leftovers)
|
| 30 |
+
# and calculate num rows needed to the meet target.
|
| 31 |
+
shaved_splits, per_split_needed_rows, leftovers = _shave_all_splits(
|
| 32 |
+
per_split_blocks_with_metadata, per_split_num_rows, target_split_size
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
# validate invariants
|
| 36 |
+
for shaved_split, split_needed_row in zip(shaved_splits, per_split_needed_rows):
|
| 37 |
+
num_shaved_rows = sum([meta.num_rows for _, meta in shaved_split])
|
| 38 |
+
assert num_shaved_rows <= target_split_size
|
| 39 |
+
assert num_shaved_rows + split_needed_row == target_split_size
|
| 40 |
+
|
| 41 |
+
# phase 2: based on the num rows needed for each shaved split, split the leftovers
|
| 42 |
+
# in the shape that exactly matches the rows needed.
|
| 43 |
+
leftover_bundle = RefBundle(leftovers, owns_blocks=owned_by_consumer)
|
| 44 |
+
leftover_splits = _split_leftovers(leftover_bundle, per_split_needed_rows)
|
| 45 |
+
|
| 46 |
+
# phase 3: merge the shaved_splits and leftoever splits and return.
|
| 47 |
+
for i, leftover_split in enumerate(leftover_splits):
|
| 48 |
+
shaved_splits[i].extend(leftover_split)
|
| 49 |
+
|
| 50 |
+
# validate invariants.
|
| 51 |
+
num_shaved_rows = sum([meta.num_rows for _, meta in shaved_splits[i]])
|
| 52 |
+
assert num_shaved_rows == target_split_size
|
| 53 |
+
|
| 54 |
+
# Compose the result back to RefBundle
|
| 55 |
+
equalized_ref_bundles: List[RefBundle] = []
|
| 56 |
+
for split in shaved_splits:
|
| 57 |
+
equalized_ref_bundles.append(RefBundle(split, owns_blocks=owned_by_consumer))
|
| 58 |
+
return equalized_ref_bundles
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def _shave_one_split(
|
| 62 |
+
split: BlockPartition, num_rows_per_block: List[int], target_size: int
|
| 63 |
+
) -> Tuple[BlockPartition, int, BlockPartition]:
|
| 64 |
+
"""Shave a block list to the target size.
|
| 65 |
+
|
| 66 |
+
Args:
|
| 67 |
+
split: the block list to shave.
|
| 68 |
+
num_rows_per_block: num rows for each block in the list.
|
| 69 |
+
target_size: the upper bound target size of the shaved list.
|
| 70 |
+
Returns:
|
| 71 |
+
A tuple of:
|
| 72 |
+
- shaved block list.
|
| 73 |
+
- num of rows needed for the block list to meet the target size.
|
| 74 |
+
- leftover blocks.
|
| 75 |
+
|
| 76 |
+
"""
|
| 77 |
+
# iterates through the blocks from the input list and
|
| 78 |
+
shaved = []
|
| 79 |
+
leftovers = []
|
| 80 |
+
shaved_rows = 0
|
| 81 |
+
for block_with_meta, block_rows in zip(split, num_rows_per_block):
|
| 82 |
+
if block_rows + shaved_rows <= target_size:
|
| 83 |
+
shaved.append(block_with_meta)
|
| 84 |
+
shaved_rows += block_rows
|
| 85 |
+
else:
|
| 86 |
+
leftovers.append(block_with_meta)
|
| 87 |
+
num_rows_needed = target_size - shaved_rows
|
| 88 |
+
return shaved, num_rows_needed, leftovers
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def _shave_all_splits(
|
| 92 |
+
input_splits: List[BlockPartition],
|
| 93 |
+
per_split_num_rows: List[List[int]],
|
| 94 |
+
target_size: int,
|
| 95 |
+
) -> Tuple[List[BlockPartition], List[int], BlockPartition]:
|
| 96 |
+
"""Shave all block list to the target size.
|
| 97 |
+
|
| 98 |
+
Args:
|
| 99 |
+
input_splits: all block list to shave.
|
| 100 |
+
input_splits: num rows (per block) for each block list.
|
| 101 |
+
target_size: the upper bound target size of the shaved lists.
|
| 102 |
+
Returns:
|
| 103 |
+
A tuple of:
|
| 104 |
+
- all shaved block list.
|
| 105 |
+
- num of rows needed for the block list to meet the target size.
|
| 106 |
+
- leftover blocks.
|
| 107 |
+
"""
|
| 108 |
+
shaved_splits = []
|
| 109 |
+
per_split_needed_rows = []
|
| 110 |
+
leftovers = []
|
| 111 |
+
|
| 112 |
+
for split, num_rows_per_block in zip(input_splits, per_split_num_rows):
|
| 113 |
+
shaved, num_rows_needed, _leftovers = _shave_one_split(
|
| 114 |
+
split, num_rows_per_block, target_size
|
| 115 |
+
)
|
| 116 |
+
shaved_splits.append(shaved)
|
| 117 |
+
per_split_needed_rows.append(num_rows_needed)
|
| 118 |
+
leftovers.extend(_leftovers)
|
| 119 |
+
|
| 120 |
+
return shaved_splits, per_split_needed_rows, leftovers
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
def _split_leftovers(
|
| 124 |
+
leftovers: RefBundle, per_split_needed_rows: List[int]
|
| 125 |
+
) -> List[BlockPartition]:
|
| 126 |
+
"""Split leftover blocks by the num of rows needed."""
|
| 127 |
+
num_splits = len(per_split_needed_rows)
|
| 128 |
+
split_indices = []
|
| 129 |
+
prev = 0
|
| 130 |
+
for i, num_rows_needed in enumerate(per_split_needed_rows):
|
| 131 |
+
split_indices.append(prev + num_rows_needed)
|
| 132 |
+
prev = split_indices[i]
|
| 133 |
+
split_result: Tuple[
|
| 134 |
+
List[List[ObjectRef[Block]]], List[List[BlockMetadata]]
|
| 135 |
+
] = _split_at_indices(
|
| 136 |
+
leftovers.blocks,
|
| 137 |
+
split_indices,
|
| 138 |
+
leftovers.owns_blocks,
|
| 139 |
+
)
|
| 140 |
+
return [list(zip(block_refs, meta)) for block_refs, meta in zip(*split_result)][
|
| 141 |
+
:num_splits
|
| 142 |
+
]
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/__init__.py
ADDED
|
File without changes
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/autoscaling_requester.py
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import math
|
| 2 |
+
import threading
|
| 3 |
+
import time
|
| 4 |
+
from typing import Dict, List
|
| 5 |
+
|
| 6 |
+
import ray
|
| 7 |
+
from ray.data.context import DataContext
|
| 8 |
+
from ray.util.scheduling_strategies import NodeAffinitySchedulingStrategy
|
| 9 |
+
|
| 10 |
+
# Resource requests are considered stale after this number of seconds, and
|
| 11 |
+
# will be purged.
|
| 12 |
+
RESOURCE_REQUEST_TIMEOUT = 60
|
| 13 |
+
PURGE_INTERVAL = RESOURCE_REQUEST_TIMEOUT * 2
|
| 14 |
+
|
| 15 |
+
# When the autoscaling is driven by memory pressure and there are abundant
|
| 16 |
+
# CPUs to support incremental CPUs needed to launch more tasks, we'll translate
|
| 17 |
+
# memory pressure into an artificial request of CPUs. The amount of CPUs we'll
|
| 18 |
+
# request is ARTIFICIAL_CPU_SCALING_FACTOR * ray.cluster_resources()["CPU"].
|
| 19 |
+
ARTIFICIAL_CPU_SCALING_FACTOR = 1.2
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
@ray.remote(num_cpus=0, max_restarts=-1, max_task_retries=-1)
|
| 23 |
+
class AutoscalingRequester:
|
| 24 |
+
"""Actor to make resource requests to autoscaler for the datasets.
|
| 25 |
+
|
| 26 |
+
The resource requests are set to timeout after RESOURCE_REQUEST_TIMEOUT seconds.
|
| 27 |
+
For those live requests, we keep track of the last request made for each execution,
|
| 28 |
+
which overrides all previous requests it made; then sum the requested amounts
|
| 29 |
+
across all executions as the final request to the autoscaler.
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
def __init__(self):
|
| 33 |
+
# execution_id -> (List[Dict], expiration timestamp)
|
| 34 |
+
self._resource_requests = {}
|
| 35 |
+
# TTL for requests.
|
| 36 |
+
self._timeout = RESOURCE_REQUEST_TIMEOUT
|
| 37 |
+
|
| 38 |
+
self._self_handle = ray.get_runtime_context().current_actor
|
| 39 |
+
|
| 40 |
+
# Start a thread to purge expired requests periodically.
|
| 41 |
+
def purge_thread_run():
|
| 42 |
+
while True:
|
| 43 |
+
time.sleep(PURGE_INTERVAL)
|
| 44 |
+
# Call purge_expired_requests() as an actor task,
|
| 45 |
+
# so we don't need to handle multi-threading.
|
| 46 |
+
ray.get(self._self_handle.purge_expired_requests.remote())
|
| 47 |
+
|
| 48 |
+
self._purge_thread = threading.Thread(target=purge_thread_run, daemon=True)
|
| 49 |
+
self._purge_thread.start()
|
| 50 |
+
|
| 51 |
+
def purge_expired_requests(self):
|
| 52 |
+
self._purge()
|
| 53 |
+
ray.autoscaler.sdk.request_resources(bundles=self._aggregate_requests())
|
| 54 |
+
|
| 55 |
+
def request_resources(self, req: List[Dict], execution_id: str):
|
| 56 |
+
# Purge expired requests before making request to autoscaler.
|
| 57 |
+
self._purge()
|
| 58 |
+
# For the same execution_id, we track the latest resource request and
|
| 59 |
+
# the its expiration timestamp.
|
| 60 |
+
self._resource_requests[execution_id] = (
|
| 61 |
+
req,
|
| 62 |
+
time.time() + self._timeout,
|
| 63 |
+
)
|
| 64 |
+
# We aggregate the resource requests across all execution_id's to Ray
|
| 65 |
+
# autoscaler.
|
| 66 |
+
ray.autoscaler.sdk.request_resources(bundles=self._aggregate_requests())
|
| 67 |
+
|
| 68 |
+
def _purge(self):
|
| 69 |
+
# Purge requests that are stale.
|
| 70 |
+
now = time.time()
|
| 71 |
+
for k, (_, t) in list(self._resource_requests.items()):
|
| 72 |
+
if t < now:
|
| 73 |
+
self._resource_requests.pop(k)
|
| 74 |
+
|
| 75 |
+
def _aggregate_requests(self) -> List[Dict]:
|
| 76 |
+
req = []
|
| 77 |
+
for _, (r, _) in self._resource_requests.items():
|
| 78 |
+
req.extend(r)
|
| 79 |
+
|
| 80 |
+
def get_cpus(req):
|
| 81 |
+
num_cpus = 0
|
| 82 |
+
for r in req:
|
| 83 |
+
if "CPU" in r:
|
| 84 |
+
num_cpus += r["CPU"]
|
| 85 |
+
return num_cpus
|
| 86 |
+
|
| 87 |
+
# Round up CPUs to exceed total cluster CPUs so it can actually upscale.
|
| 88 |
+
# This is to handle the issue where the autoscaling is driven by memory
|
| 89 |
+
# pressure (rather than CPUs) from streaming executor. In such case, simply
|
| 90 |
+
# asking for incremental CPUs (e.g. 1 CPU for each ready operator) may not
|
| 91 |
+
# actually be able to trigger autoscaling if existing CPUs in cluster can
|
| 92 |
+
# already satisfy the incremental CPUs request.
|
| 93 |
+
num_cpus = get_cpus(req)
|
| 94 |
+
if num_cpus > 0:
|
| 95 |
+
total = ray.cluster_resources()
|
| 96 |
+
if "CPU" in total and num_cpus <= total["CPU"]:
|
| 97 |
+
delta = (
|
| 98 |
+
math.ceil(ARTIFICIAL_CPU_SCALING_FACTOR * total["CPU"]) - num_cpus
|
| 99 |
+
)
|
| 100 |
+
req.extend([{"CPU": 1}] * delta)
|
| 101 |
+
|
| 102 |
+
return req
|
| 103 |
+
|
| 104 |
+
def _test_set_timeout(self, ttl):
|
| 105 |
+
"""Set the timeout. This is for test only"""
|
| 106 |
+
self._timeout = ttl
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
# Creating/getting an actor from multiple threads is not safe.
|
| 110 |
+
# https://github.com/ray-project/ray/issues/41324
|
| 111 |
+
_autoscaling_requester_lock: threading.RLock = threading.RLock()
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def get_or_create_autoscaling_requester_actor():
|
| 115 |
+
ctx = DataContext.get_current()
|
| 116 |
+
scheduling_strategy = ctx.scheduling_strategy
|
| 117 |
+
# Pin the stats actor to the local node so it fate-shares with the driver.
|
| 118 |
+
# Note: for Ray Client, the ray.get_runtime_context().get_node_id() should
|
| 119 |
+
# point to the head node.
|
| 120 |
+
scheduling_strategy = NodeAffinitySchedulingStrategy(
|
| 121 |
+
ray.get_runtime_context().get_node_id(),
|
| 122 |
+
soft=False,
|
| 123 |
+
)
|
| 124 |
+
with _autoscaling_requester_lock:
|
| 125 |
+
return AutoscalingRequester.options(
|
| 126 |
+
name="AutoscalingRequester",
|
| 127 |
+
namespace="AutoscalingRequester",
|
| 128 |
+
get_if_exists=True,
|
| 129 |
+
lifetime="detached",
|
| 130 |
+
scheduling_strategy=scheduling_strategy,
|
| 131 |
+
).remote()
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/bundle_queue/__init__.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .bundle_queue import BundleQueue
|
| 2 |
+
from .fifo_bundle_queue import FIFOBundleQueue
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
def create_bundle_queue() -> BundleQueue:
|
| 6 |
+
return FIFOBundleQueue()
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
__all__ = ["BundleQueue", "create_bundle_queue"]
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/bundle_queue/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (454 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/bundle_queue/__pycache__/bundle_queue.cpython-310.pyc
ADDED
|
Binary file (2.37 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/bundle_queue/__pycache__/fifo_bundle_queue.cpython-310.pyc
ADDED
|
Binary file (3.76 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/bundle_queue/bundle_queue.py
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import abc
|
| 2 |
+
from typing import TYPE_CHECKING, Optional
|
| 3 |
+
|
| 4 |
+
if TYPE_CHECKING:
|
| 5 |
+
from ray.data._internal.execution.interfaces import RefBundle
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class BundleQueue(abc.ABC):
|
| 9 |
+
@abc.abstractmethod
|
| 10 |
+
def __len__(self) -> int:
|
| 11 |
+
"""Return the number of bundles in the queue."""
|
| 12 |
+
...
|
| 13 |
+
|
| 14 |
+
@abc.abstractmethod
|
| 15 |
+
def __contains__(self, bundle: "RefBundle") -> bool:
|
| 16 |
+
"""Return whether the bundle is in the queue."""
|
| 17 |
+
...
|
| 18 |
+
|
| 19 |
+
@abc.abstractmethod
|
| 20 |
+
def add(self, bundle: "RefBundle") -> None:
|
| 21 |
+
"""Add a bundle to the queue."""
|
| 22 |
+
...
|
| 23 |
+
|
| 24 |
+
@abc.abstractmethod
|
| 25 |
+
def pop(self) -> "RefBundle":
|
| 26 |
+
"""Remove and return the head of the queue.
|
| 27 |
+
|
| 28 |
+
Raises:
|
| 29 |
+
IndexError: If the queue is empty.
|
| 30 |
+
"""
|
| 31 |
+
...
|
| 32 |
+
|
| 33 |
+
@abc.abstractmethod
|
| 34 |
+
def peek(self) -> Optional["RefBundle"]:
|
| 35 |
+
"""Return the head of the queue without removing it.
|
| 36 |
+
|
| 37 |
+
If the queue is empty, return `None`.
|
| 38 |
+
"""
|
| 39 |
+
...
|
| 40 |
+
|
| 41 |
+
@abc.abstractmethod
|
| 42 |
+
def remove(self, bundle: "RefBundle"):
|
| 43 |
+
"""Remove a bundle from the queue."""
|
| 44 |
+
...
|
| 45 |
+
|
| 46 |
+
@abc.abstractmethod
|
| 47 |
+
def clear(self):
|
| 48 |
+
"""Remove all bundles from the queue."""
|
| 49 |
+
...
|
| 50 |
+
|
| 51 |
+
@abc.abstractmethod
|
| 52 |
+
def estimate_size_bytes(self) -> int:
|
| 53 |
+
"""Return an estimate of the total size of objects in the queue."""
|
| 54 |
+
...
|
| 55 |
+
|
| 56 |
+
@abc.abstractmethod
|
| 57 |
+
def is_empty(self):
|
| 58 |
+
"""Return whether this queue and all of its internal data structures are empty.
|
| 59 |
+
|
| 60 |
+
This method is used for testing.
|
| 61 |
+
"""
|
| 62 |
+
...
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/bundle_queue/fifo_bundle_queue.py
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from collections import defaultdict, deque
|
| 2 |
+
from dataclasses import dataclass
|
| 3 |
+
from typing import TYPE_CHECKING, Dict, List, Optional
|
| 4 |
+
|
| 5 |
+
from .bundle_queue import BundleQueue
|
| 6 |
+
|
| 7 |
+
if TYPE_CHECKING:
|
| 8 |
+
from ray.data._internal.execution.interfaces import RefBundle
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@dataclass
|
| 12 |
+
class _Node:
|
| 13 |
+
value: "RefBundle"
|
| 14 |
+
next: Optional["_Node"] = None
|
| 15 |
+
prev: Optional["_Node"] = None
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class FIFOBundleQueue(BundleQueue):
|
| 19 |
+
"""A bundle queue that follows a first-in-first-out policy."""
|
| 20 |
+
|
| 21 |
+
def __init__(self):
|
| 22 |
+
# We manually implement a linked list because we need to remove elements
|
| 23 |
+
# efficiently, and Python's built-in data structures have O(n) removal time.
|
| 24 |
+
self._head: Optional[_Node] = None
|
| 25 |
+
self._tail: Optional[_Node] = None
|
| 26 |
+
# We use a dictionary to keep track of the nodes corresponding to each bundle.
|
| 27 |
+
# This allows us to remove a bundle from the queue in O(1) time. We need a list
|
| 28 |
+
# because a bundle can be added to the queue multiple times. Nodes in each list
|
| 29 |
+
# are insertion-ordered.
|
| 30 |
+
self._bundle_to_nodes: Dict["RefBundle", List[_Node]] = defaultdict(deque)
|
| 31 |
+
|
| 32 |
+
self._nbytes = 0
|
| 33 |
+
self._num_bundles = 0
|
| 34 |
+
|
| 35 |
+
def __len__(self) -> int:
|
| 36 |
+
return self._num_bundles
|
| 37 |
+
|
| 38 |
+
def __contains__(self, bundle: "RefBundle") -> bool:
|
| 39 |
+
return bundle in self._bundle_to_nodes
|
| 40 |
+
|
| 41 |
+
def add(self, bundle: "RefBundle") -> None:
|
| 42 |
+
"""Add a bundle to the end (right) of the queue."""
|
| 43 |
+
new_node = _Node(value=bundle, next=None, prev=self._tail)
|
| 44 |
+
# Case 1: The queue is empty.
|
| 45 |
+
if self._head is None:
|
| 46 |
+
assert self._tail is None
|
| 47 |
+
self._head = new_node
|
| 48 |
+
self._tail = new_node
|
| 49 |
+
# Case 2: The queue has at least one element.
|
| 50 |
+
else:
|
| 51 |
+
self._tail.next = new_node
|
| 52 |
+
self._tail = new_node
|
| 53 |
+
|
| 54 |
+
self._bundle_to_nodes[bundle].append(new_node)
|
| 55 |
+
|
| 56 |
+
self._nbytes += bundle.size_bytes()
|
| 57 |
+
self._num_bundles += 1
|
| 58 |
+
|
| 59 |
+
def pop(self) -> "RefBundle":
|
| 60 |
+
"""Return the first (left) bundle in the queue."""
|
| 61 |
+
# Case 1: The queue is empty.
|
| 62 |
+
if not self._head:
|
| 63 |
+
raise IndexError("You can't pop from an empty queue")
|
| 64 |
+
|
| 65 |
+
bundle = self._head.value
|
| 66 |
+
self.remove(bundle)
|
| 67 |
+
|
| 68 |
+
return bundle
|
| 69 |
+
|
| 70 |
+
def peek(self) -> Optional["RefBundle"]:
|
| 71 |
+
"""Return the first (left) bundle in the queue without removing it."""
|
| 72 |
+
if self._head is None:
|
| 73 |
+
return None
|
| 74 |
+
|
| 75 |
+
return self._head.value
|
| 76 |
+
|
| 77 |
+
def remove(self, bundle: "RefBundle"):
|
| 78 |
+
"""Remove a bundle from the queue.
|
| 79 |
+
|
| 80 |
+
If there are multiple instances of the bundle in the queue, this method only
|
| 81 |
+
removes the first one.
|
| 82 |
+
"""
|
| 83 |
+
# Case 1: The queue is empty.
|
| 84 |
+
if bundle not in self._bundle_to_nodes:
|
| 85 |
+
raise ValueError(f"The bundle {bundle} is not in the queue.")
|
| 86 |
+
|
| 87 |
+
node = self._bundle_to_nodes[bundle].popleft()
|
| 88 |
+
if not self._bundle_to_nodes[bundle]:
|
| 89 |
+
del self._bundle_to_nodes[bundle]
|
| 90 |
+
|
| 91 |
+
# Case 2: The bundle is the only element in the queue.
|
| 92 |
+
if self._head is self._tail:
|
| 93 |
+
self._head = None
|
| 94 |
+
self._tail = None
|
| 95 |
+
# Case 3: The bundle is the first element in the queue.
|
| 96 |
+
elif node is self._head:
|
| 97 |
+
self._head = node.next
|
| 98 |
+
self._head.prev = None
|
| 99 |
+
# Case 4: The bundle is the last element in the queue.
|
| 100 |
+
elif node is self._tail:
|
| 101 |
+
self._tail = node.prev
|
| 102 |
+
self._tail.next = None
|
| 103 |
+
# Case 5: The bundle is in the middle of the queue.
|
| 104 |
+
else:
|
| 105 |
+
node.prev.next = node.next
|
| 106 |
+
node.next.prev = node.prev
|
| 107 |
+
|
| 108 |
+
self._nbytes -= bundle.size_bytes()
|
| 109 |
+
assert self._nbytes >= 0, (
|
| 110 |
+
"Expected the total size of objects in the queue to be non-negative, but "
|
| 111 |
+
f"got {self._nbytes} bytes instead."
|
| 112 |
+
)
|
| 113 |
+
|
| 114 |
+
self._num_bundles -= 1
|
| 115 |
+
|
| 116 |
+
return node.value
|
| 117 |
+
|
| 118 |
+
def clear(self):
|
| 119 |
+
self._head = None
|
| 120 |
+
self._tail = None
|
| 121 |
+
self._bundle_to_nodes.clear()
|
| 122 |
+
self._nbytes = 0
|
| 123 |
+
self._num_bundles = 0
|
| 124 |
+
|
| 125 |
+
def estimate_size_bytes(self) -> int:
|
| 126 |
+
return self._nbytes
|
| 127 |
+
|
| 128 |
+
def is_empty(self):
|
| 129 |
+
return not self._bundle_to_nodes and self._head is None and self._tail is None
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/interfaces/__init__.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .common import NodeIdStr
|
| 2 |
+
from .execution_options import ExecutionOptions, ExecutionResources
|
| 3 |
+
from .executor import Executor, OutputIterator
|
| 4 |
+
from .physical_operator import PhysicalOperator
|
| 5 |
+
from .ref_bundle import RefBundle
|
| 6 |
+
from .task_context import TaskContext
|
| 7 |
+
from .transform_fn import AllToAllTransformFn
|
| 8 |
+
|
| 9 |
+
__all__ = [
|
| 10 |
+
"AllToAllTransformFn",
|
| 11 |
+
"ExecutionOptions",
|
| 12 |
+
"ExecutionResources",
|
| 13 |
+
"Executor",
|
| 14 |
+
"NodeIdStr",
|
| 15 |
+
"OutputIterator",
|
| 16 |
+
"PhysicalOperator",
|
| 17 |
+
"RefBundle",
|
| 18 |
+
"TaskContext",
|
| 19 |
+
]
|