Datasets:
Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +3 -0
- material/dataset/Datasets/Electron Microscopy Image Masks/TiO2_Masks_Manual_4connected/1908308_m.tif +3 -0
- material/dataset/Datasets/Electron Microscopy Image Masks/TiO2_Masks_Manual_4connected_4Classes/1908250_cm.tif +3 -0
- material/dataset/Datasets/Electron Microscopy Image Masks/TiO2_Masks_TSEM/Image_Registration/Output/1908272/1908272.tif +3 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/attr/__pycache__/filters.cpython-310.pyc +0 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/attr/_cmp.py +160 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/attr/_cmp.pyi +13 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/attr/_config.py +31 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/attr/_next_gen.py +623 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/attr/_version_info.py +86 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/attr/converters.pyi +19 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/attr/setters.py +79 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/attr/setters.pyi +20 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/attr/validators.pyi +86 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/grpcio-1.71.0.dist-info/INSTALLER +1 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/grpcio-1.71.0.dist-info/LICENSE +610 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/grpcio-1.71.0.dist-info/METADATA +129 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/grpcio-1.71.0.dist-info/RECORD +120 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/grpcio-1.71.0.dist-info/WHEEL +6 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/grpcio-1.71.0.dist-info/top_level.txt +1 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/_models.py +1277 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/_status_codes.py +162 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/_urlparse.py +527 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/__pycache__/__init__.cpython-310.pyc +0 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/__pycache__/cache.cpython-310.pyc +0 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/__pycache__/common.cpython-310.pyc +0 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/__pycache__/file_io.cpython-310.pyc +0 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/__pycache__/parser_utils.cpython-310.pyc +0 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/__pycache__/settings.cpython-310.pyc +0 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/__pycache__/utils.cpython-310.pyc +0 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/api/classes.py +895 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/api/completion.py +696 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/api/completion_cache.py +31 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/api/environment.py +480 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/api/exceptions.py +31 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/api/helpers.py +522 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/api/project.py +448 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/api/refactoring/__init__.py +264 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/api/replstartup.py +29 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/api/strings.py +111 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/joblib/_cloudpickle_wrapper.py +19 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/joblib/_memmapping_reducer.py +657 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/joblib/_parallel_backends.py +649 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/joblib/_store_backends.py +474 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/joblib/_utils.py +83 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/joblib/backports.py +177 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/joblib/executor.py +117 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/joblib/hashing.py +265 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/joblib/logger.py +162 -0
- material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/joblib/memory.py +1172 -0
.gitattributes
CHANGED
|
@@ -501,3 +501,6 @@ material/dataset/Datasets/Electron[[:space:]]Microscopy[[:space:]]Image[[:space:
|
|
| 501 |
material/dataset/Datasets/Electron[[:space:]]Microscopy[[:space:]]Image[[:space:]]Masks/TiO2_Masks_Manual_4connected_4Classes/1908270_cm.tif filter=lfs diff=lfs merge=lfs -text
|
| 502 |
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/h5py.libs/libhdf5_hl-a45b8ce6.so.310.0.6 filter=lfs diff=lfs merge=lfs -text
|
| 503 |
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/wrapt/_wrappers.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
| 501 |
material/dataset/Datasets/Electron[[:space:]]Microscopy[[:space:]]Image[[:space:]]Masks/TiO2_Masks_Manual_4connected_4Classes/1908270_cm.tif filter=lfs diff=lfs merge=lfs -text
|
| 502 |
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/h5py.libs/libhdf5_hl-a45b8ce6.so.310.0.6 filter=lfs diff=lfs merge=lfs -text
|
| 503 |
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/wrapt/_wrappers.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 504 |
+
material/dataset/Datasets/Electron[[:space:]]Microscopy[[:space:]]Image[[:space:]]Masks/TiO2_Masks_Manual_4connected_4Classes/1908250_cm.tif filter=lfs diff=lfs merge=lfs -text
|
| 505 |
+
material/dataset/Datasets/Electron[[:space:]]Microscopy[[:space:]]Image[[:space:]]Masks/TiO2_Masks_TSEM/Image_Registration/Output/1908272/1908272.tif filter=lfs diff=lfs merge=lfs -text
|
| 506 |
+
material/dataset/Datasets/Electron[[:space:]]Microscopy[[:space:]]Image[[:space:]]Masks/TiO2_Masks_Manual_4connected/1908308_m.tif filter=lfs diff=lfs merge=lfs -text
|
material/dataset/Datasets/Electron Microscopy Image Masks/TiO2_Masks_Manual_4connected/1908308_m.tif
ADDED
|
|
Git LFS Details
|
material/dataset/Datasets/Electron Microscopy Image Masks/TiO2_Masks_Manual_4connected_4Classes/1908250_cm.tif
ADDED
|
|
Git LFS Details
|
material/dataset/Datasets/Electron Microscopy Image Masks/TiO2_Masks_TSEM/Image_Registration/Output/1908272/1908272.tif
ADDED
|
|
Git LFS Details
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/attr/__pycache__/filters.cpython-310.pyc
ADDED
|
Binary file (2.3 kB). View file
|
|
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/attr/_cmp.py
ADDED
|
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-License-Identifier: MIT
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
import functools
|
| 5 |
+
import types
|
| 6 |
+
|
| 7 |
+
from ._make import __ne__
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def cmp_using(
|
| 14 |
+
eq=None,
|
| 15 |
+
lt=None,
|
| 16 |
+
le=None,
|
| 17 |
+
gt=None,
|
| 18 |
+
ge=None,
|
| 19 |
+
require_same_type=True,
|
| 20 |
+
class_name="Comparable",
|
| 21 |
+
):
|
| 22 |
+
"""
|
| 23 |
+
Create a class that can be passed into `attrs.field`'s ``eq``, ``order``,
|
| 24 |
+
and ``cmp`` arguments to customize field comparison.
|
| 25 |
+
|
| 26 |
+
The resulting class will have a full set of ordering methods if at least
|
| 27 |
+
one of ``{lt, le, gt, ge}`` and ``eq`` are provided.
|
| 28 |
+
|
| 29 |
+
Args:
|
| 30 |
+
eq (typing.Callable | None):
|
| 31 |
+
Callable used to evaluate equality of two objects.
|
| 32 |
+
|
| 33 |
+
lt (typing.Callable | None):
|
| 34 |
+
Callable used to evaluate whether one object is less than another
|
| 35 |
+
object.
|
| 36 |
+
|
| 37 |
+
le (typing.Callable | None):
|
| 38 |
+
Callable used to evaluate whether one object is less than or equal
|
| 39 |
+
to another object.
|
| 40 |
+
|
| 41 |
+
gt (typing.Callable | None):
|
| 42 |
+
Callable used to evaluate whether one object is greater than
|
| 43 |
+
another object.
|
| 44 |
+
|
| 45 |
+
ge (typing.Callable | None):
|
| 46 |
+
Callable used to evaluate whether one object is greater than or
|
| 47 |
+
equal to another object.
|
| 48 |
+
|
| 49 |
+
require_same_type (bool):
|
| 50 |
+
When `True`, equality and ordering methods will return
|
| 51 |
+
`NotImplemented` if objects are not of the same type.
|
| 52 |
+
|
| 53 |
+
class_name (str | None): Name of class. Defaults to "Comparable".
|
| 54 |
+
|
| 55 |
+
See `comparison` for more details.
|
| 56 |
+
|
| 57 |
+
.. versionadded:: 21.1.0
|
| 58 |
+
"""
|
| 59 |
+
|
| 60 |
+
body = {
|
| 61 |
+
"__slots__": ["value"],
|
| 62 |
+
"__init__": _make_init(),
|
| 63 |
+
"_requirements": [],
|
| 64 |
+
"_is_comparable_to": _is_comparable_to,
|
| 65 |
+
}
|
| 66 |
+
|
| 67 |
+
# Add operations.
|
| 68 |
+
num_order_functions = 0
|
| 69 |
+
has_eq_function = False
|
| 70 |
+
|
| 71 |
+
if eq is not None:
|
| 72 |
+
has_eq_function = True
|
| 73 |
+
body["__eq__"] = _make_operator("eq", eq)
|
| 74 |
+
body["__ne__"] = __ne__
|
| 75 |
+
|
| 76 |
+
if lt is not None:
|
| 77 |
+
num_order_functions += 1
|
| 78 |
+
body["__lt__"] = _make_operator("lt", lt)
|
| 79 |
+
|
| 80 |
+
if le is not None:
|
| 81 |
+
num_order_functions += 1
|
| 82 |
+
body["__le__"] = _make_operator("le", le)
|
| 83 |
+
|
| 84 |
+
if gt is not None:
|
| 85 |
+
num_order_functions += 1
|
| 86 |
+
body["__gt__"] = _make_operator("gt", gt)
|
| 87 |
+
|
| 88 |
+
if ge is not None:
|
| 89 |
+
num_order_functions += 1
|
| 90 |
+
body["__ge__"] = _make_operator("ge", ge)
|
| 91 |
+
|
| 92 |
+
type_ = types.new_class(
|
| 93 |
+
class_name, (object,), {}, lambda ns: ns.update(body)
|
| 94 |
+
)
|
| 95 |
+
|
| 96 |
+
# Add same type requirement.
|
| 97 |
+
if require_same_type:
|
| 98 |
+
type_._requirements.append(_check_same_type)
|
| 99 |
+
|
| 100 |
+
# Add total ordering if at least one operation was defined.
|
| 101 |
+
if 0 < num_order_functions < 4:
|
| 102 |
+
if not has_eq_function:
|
| 103 |
+
# functools.total_ordering requires __eq__ to be defined,
|
| 104 |
+
# so raise early error here to keep a nice stack.
|
| 105 |
+
msg = "eq must be define is order to complete ordering from lt, le, gt, ge."
|
| 106 |
+
raise ValueError(msg)
|
| 107 |
+
type_ = functools.total_ordering(type_)
|
| 108 |
+
|
| 109 |
+
return type_
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
def _make_init():
|
| 113 |
+
"""
|
| 114 |
+
Create __init__ method.
|
| 115 |
+
"""
|
| 116 |
+
|
| 117 |
+
def __init__(self, value):
|
| 118 |
+
"""
|
| 119 |
+
Initialize object with *value*.
|
| 120 |
+
"""
|
| 121 |
+
self.value = value
|
| 122 |
+
|
| 123 |
+
return __init__
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def _make_operator(name, func):
|
| 127 |
+
"""
|
| 128 |
+
Create operator method.
|
| 129 |
+
"""
|
| 130 |
+
|
| 131 |
+
def method(self, other):
|
| 132 |
+
if not self._is_comparable_to(other):
|
| 133 |
+
return NotImplemented
|
| 134 |
+
|
| 135 |
+
result = func(self.value, other.value)
|
| 136 |
+
if result is NotImplemented:
|
| 137 |
+
return NotImplemented
|
| 138 |
+
|
| 139 |
+
return result
|
| 140 |
+
|
| 141 |
+
method.__name__ = f"__{name}__"
|
| 142 |
+
method.__doc__ = (
|
| 143 |
+
f"Return a {_operation_names[name]} b. Computed by attrs."
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
return method
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
def _is_comparable_to(self, other):
|
| 150 |
+
"""
|
| 151 |
+
Check whether `other` is comparable to `self`.
|
| 152 |
+
"""
|
| 153 |
+
return all(func(self, other) for func in self._requirements)
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
def _check_same_type(self, other):
|
| 157 |
+
"""
|
| 158 |
+
Return True if *self* and *other* are of the same type, False otherwise.
|
| 159 |
+
"""
|
| 160 |
+
return other.value.__class__ is self.value.__class__
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/attr/_cmp.pyi
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Any, Callable
|
| 2 |
+
|
| 3 |
+
_CompareWithType = Callable[[Any, Any], bool]
|
| 4 |
+
|
| 5 |
+
def cmp_using(
|
| 6 |
+
eq: _CompareWithType | None = ...,
|
| 7 |
+
lt: _CompareWithType | None = ...,
|
| 8 |
+
le: _CompareWithType | None = ...,
|
| 9 |
+
gt: _CompareWithType | None = ...,
|
| 10 |
+
ge: _CompareWithType | None = ...,
|
| 11 |
+
require_same_type: bool = ...,
|
| 12 |
+
class_name: str = ...,
|
| 13 |
+
) -> type: ...
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/attr/_config.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-License-Identifier: MIT
|
| 2 |
+
|
| 3 |
+
__all__ = ["get_run_validators", "set_run_validators"]
|
| 4 |
+
|
| 5 |
+
_run_validators = True
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def set_run_validators(run):
|
| 9 |
+
"""
|
| 10 |
+
Set whether or not validators are run. By default, they are run.
|
| 11 |
+
|
| 12 |
+
.. deprecated:: 21.3.0 It will not be removed, but it also will not be
|
| 13 |
+
moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
|
| 14 |
+
instead.
|
| 15 |
+
"""
|
| 16 |
+
if not isinstance(run, bool):
|
| 17 |
+
msg = "'run' must be bool."
|
| 18 |
+
raise TypeError(msg)
|
| 19 |
+
global _run_validators
|
| 20 |
+
_run_validators = run
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def get_run_validators():
|
| 24 |
+
"""
|
| 25 |
+
Return whether or not validators are run.
|
| 26 |
+
|
| 27 |
+
.. deprecated:: 21.3.0 It will not be removed, but it also will not be
|
| 28 |
+
moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
|
| 29 |
+
instead.
|
| 30 |
+
"""
|
| 31 |
+
return _run_validators
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/attr/_next_gen.py
ADDED
|
@@ -0,0 +1,623 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-License-Identifier: MIT
|
| 2 |
+
|
| 3 |
+
"""
|
| 4 |
+
These are keyword-only APIs that call `attr.s` and `attr.ib` with different
|
| 5 |
+
default values.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
from functools import partial
|
| 9 |
+
|
| 10 |
+
from . import setters
|
| 11 |
+
from ._funcs import asdict as _asdict
|
| 12 |
+
from ._funcs import astuple as _astuple
|
| 13 |
+
from ._make import (
|
| 14 |
+
_DEFAULT_ON_SETATTR,
|
| 15 |
+
NOTHING,
|
| 16 |
+
_frozen_setattrs,
|
| 17 |
+
attrib,
|
| 18 |
+
attrs,
|
| 19 |
+
)
|
| 20 |
+
from .exceptions import UnannotatedAttributeError
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def define(
|
| 24 |
+
maybe_cls=None,
|
| 25 |
+
*,
|
| 26 |
+
these=None,
|
| 27 |
+
repr=None,
|
| 28 |
+
unsafe_hash=None,
|
| 29 |
+
hash=None,
|
| 30 |
+
init=None,
|
| 31 |
+
slots=True,
|
| 32 |
+
frozen=False,
|
| 33 |
+
weakref_slot=True,
|
| 34 |
+
str=False,
|
| 35 |
+
auto_attribs=None,
|
| 36 |
+
kw_only=False,
|
| 37 |
+
cache_hash=False,
|
| 38 |
+
auto_exc=True,
|
| 39 |
+
eq=None,
|
| 40 |
+
order=False,
|
| 41 |
+
auto_detect=True,
|
| 42 |
+
getstate_setstate=None,
|
| 43 |
+
on_setattr=None,
|
| 44 |
+
field_transformer=None,
|
| 45 |
+
match_args=True,
|
| 46 |
+
):
|
| 47 |
+
r"""
|
| 48 |
+
A class decorator that adds :term:`dunder methods` according to
|
| 49 |
+
:term:`fields <field>` specified using :doc:`type annotations <types>`,
|
| 50 |
+
`field()` calls, or the *these* argument.
|
| 51 |
+
|
| 52 |
+
Since *attrs* patches or replaces an existing class, you cannot use
|
| 53 |
+
`object.__init_subclass__` with *attrs* classes, because it runs too early.
|
| 54 |
+
As a replacement, you can define ``__attrs_init_subclass__`` on your class.
|
| 55 |
+
It will be called by *attrs* classes that subclass it after they're
|
| 56 |
+
created. See also :ref:`init-subclass`.
|
| 57 |
+
|
| 58 |
+
Args:
|
| 59 |
+
slots (bool):
|
| 60 |
+
Create a :term:`slotted class <slotted classes>` that's more
|
| 61 |
+
memory-efficient. Slotted classes are generally superior to the
|
| 62 |
+
default dict classes, but have some gotchas you should know about,
|
| 63 |
+
so we encourage you to read the :term:`glossary entry <slotted
|
| 64 |
+
classes>`.
|
| 65 |
+
|
| 66 |
+
auto_detect (bool):
|
| 67 |
+
Instead of setting the *init*, *repr*, *eq*, and *hash* arguments
|
| 68 |
+
explicitly, assume they are set to True **unless any** of the
|
| 69 |
+
involved methods for one of the arguments is implemented in the
|
| 70 |
+
*current* class (meaning, it is *not* inherited from some base
|
| 71 |
+
class).
|
| 72 |
+
|
| 73 |
+
So, for example by implementing ``__eq__`` on a class yourself,
|
| 74 |
+
*attrs* will deduce ``eq=False`` and will create *neither*
|
| 75 |
+
``__eq__`` *nor* ``__ne__`` (but Python classes come with a
|
| 76 |
+
sensible ``__ne__`` by default, so it *should* be enough to only
|
| 77 |
+
implement ``__eq__`` in most cases).
|
| 78 |
+
|
| 79 |
+
Passing True or False` to *init*, *repr*, *eq*, or *hash*
|
| 80 |
+
overrides whatever *auto_detect* would determine.
|
| 81 |
+
|
| 82 |
+
auto_exc (bool):
|
| 83 |
+
If the class subclasses `BaseException` (which implicitly includes
|
| 84 |
+
any subclass of any exception), the following happens to behave
|
| 85 |
+
like a well-behaved Python exception class:
|
| 86 |
+
|
| 87 |
+
- the values for *eq*, *order*, and *hash* are ignored and the
|
| 88 |
+
instances compare and hash by the instance's ids [#]_ ,
|
| 89 |
+
- all attributes that are either passed into ``__init__`` or have a
|
| 90 |
+
default value are additionally available as a tuple in the
|
| 91 |
+
``args`` attribute,
|
| 92 |
+
- the value of *str* is ignored leaving ``__str__`` to base
|
| 93 |
+
classes.
|
| 94 |
+
|
| 95 |
+
.. [#]
|
| 96 |
+
Note that *attrs* will *not* remove existing implementations of
|
| 97 |
+
``__hash__`` or the equality methods. It just won't add own
|
| 98 |
+
ones.
|
| 99 |
+
|
| 100 |
+
on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]):
|
| 101 |
+
A callable that is run whenever the user attempts to set an
|
| 102 |
+
attribute (either by assignment like ``i.x = 42`` or by using
|
| 103 |
+
`setattr` like ``setattr(i, "x", 42)``). It receives the same
|
| 104 |
+
arguments as validators: the instance, the attribute that is being
|
| 105 |
+
modified, and the new value.
|
| 106 |
+
|
| 107 |
+
If no exception is raised, the attribute is set to the return value
|
| 108 |
+
of the callable.
|
| 109 |
+
|
| 110 |
+
If a list of callables is passed, they're automatically wrapped in
|
| 111 |
+
an `attrs.setters.pipe`.
|
| 112 |
+
|
| 113 |
+
If left None, the default behavior is to run converters and
|
| 114 |
+
validators whenever an attribute is set.
|
| 115 |
+
|
| 116 |
+
init (bool):
|
| 117 |
+
Create a ``__init__`` method that initializes the *attrs*
|
| 118 |
+
attributes. Leading underscores are stripped for the argument name,
|
| 119 |
+
unless an alias is set on the attribute.
|
| 120 |
+
|
| 121 |
+
.. seealso::
|
| 122 |
+
`init` shows advanced ways to customize the generated
|
| 123 |
+
``__init__`` method, including executing code before and after.
|
| 124 |
+
|
| 125 |
+
repr(bool):
|
| 126 |
+
Create a ``__repr__`` method with a human readable representation
|
| 127 |
+
of *attrs* attributes.
|
| 128 |
+
|
| 129 |
+
str (bool):
|
| 130 |
+
Create a ``__str__`` method that is identical to ``__repr__``. This
|
| 131 |
+
is usually not necessary except for `Exception`\ s.
|
| 132 |
+
|
| 133 |
+
eq (bool | None):
|
| 134 |
+
If True or None (default), add ``__eq__`` and ``__ne__`` methods
|
| 135 |
+
that check two instances for equality.
|
| 136 |
+
|
| 137 |
+
.. seealso::
|
| 138 |
+
`comparison` describes how to customize the comparison behavior
|
| 139 |
+
going as far comparing NumPy arrays.
|
| 140 |
+
|
| 141 |
+
order (bool | None):
|
| 142 |
+
If True, add ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__``
|
| 143 |
+
methods that behave like *eq* above and allow instances to be
|
| 144 |
+
ordered.
|
| 145 |
+
|
| 146 |
+
They compare the instances as if they were tuples of their *attrs*
|
| 147 |
+
attributes if and only if the types of both classes are
|
| 148 |
+
*identical*.
|
| 149 |
+
|
| 150 |
+
If `None` mirror value of *eq*.
|
| 151 |
+
|
| 152 |
+
.. seealso:: `comparison`
|
| 153 |
+
|
| 154 |
+
unsafe_hash (bool | None):
|
| 155 |
+
If None (default), the ``__hash__`` method is generated according
|
| 156 |
+
how *eq* and *frozen* are set.
|
| 157 |
+
|
| 158 |
+
1. If *both* are True, *attrs* will generate a ``__hash__`` for
|
| 159 |
+
you.
|
| 160 |
+
2. If *eq* is True and *frozen* is False, ``__hash__`` will be set
|
| 161 |
+
to None, marking it unhashable (which it is).
|
| 162 |
+
3. If *eq* is False, ``__hash__`` will be left untouched meaning
|
| 163 |
+
the ``__hash__`` method of the base class will be used. If the
|
| 164 |
+
base class is `object`, this means it will fall back to id-based
|
| 165 |
+
hashing.
|
| 166 |
+
|
| 167 |
+
Although not recommended, you can decide for yourself and force
|
| 168 |
+
*attrs* to create one (for example, if the class is immutable even
|
| 169 |
+
though you didn't freeze it programmatically) by passing True or
|
| 170 |
+
not. Both of these cases are rather special and should be used
|
| 171 |
+
carefully.
|
| 172 |
+
|
| 173 |
+
.. seealso::
|
| 174 |
+
|
| 175 |
+
- Our documentation on `hashing`,
|
| 176 |
+
- Python's documentation on `object.__hash__`,
|
| 177 |
+
- and the `GitHub issue that led to the default \ behavior
|
| 178 |
+
<https://github.com/python-attrs/attrs/issues/136>`_ for more
|
| 179 |
+
details.
|
| 180 |
+
|
| 181 |
+
hash (bool | None):
|
| 182 |
+
Deprecated alias for *unsafe_hash*. *unsafe_hash* takes precedence.
|
| 183 |
+
|
| 184 |
+
cache_hash (bool):
|
| 185 |
+
Ensure that the object's hash code is computed only once and stored
|
| 186 |
+
on the object. If this is set to True, hashing must be either
|
| 187 |
+
explicitly or implicitly enabled for this class. If the hash code
|
| 188 |
+
is cached, avoid any reassignments of fields involved in hash code
|
| 189 |
+
computation or mutations of the objects those fields point to after
|
| 190 |
+
object creation. If such changes occur, the behavior of the
|
| 191 |
+
object's hash code is undefined.
|
| 192 |
+
|
| 193 |
+
frozen (bool):
|
| 194 |
+
Make instances immutable after initialization. If someone attempts
|
| 195 |
+
to modify a frozen instance, `attrs.exceptions.FrozenInstanceError`
|
| 196 |
+
is raised.
|
| 197 |
+
|
| 198 |
+
.. note::
|
| 199 |
+
|
| 200 |
+
1. This is achieved by installing a custom ``__setattr__``
|
| 201 |
+
method on your class, so you can't implement your own.
|
| 202 |
+
|
| 203 |
+
2. True immutability is impossible in Python.
|
| 204 |
+
|
| 205 |
+
3. This *does* have a minor a runtime performance `impact
|
| 206 |
+
<how-frozen>` when initializing new instances. In other
|
| 207 |
+
words: ``__init__`` is slightly slower with ``frozen=True``.
|
| 208 |
+
|
| 209 |
+
4. If a class is frozen, you cannot modify ``self`` in
|
| 210 |
+
``__attrs_post_init__`` or a self-written ``__init__``. You
|
| 211 |
+
can circumvent that limitation by using
|
| 212 |
+
``object.__setattr__(self, "attribute_name", value)``.
|
| 213 |
+
|
| 214 |
+
5. Subclasses of a frozen class are frozen too.
|
| 215 |
+
|
| 216 |
+
kw_only (bool):
|
| 217 |
+
Make all attributes keyword-only in the generated ``__init__`` (if
|
| 218 |
+
*init* is False, this parameter is ignored).
|
| 219 |
+
|
| 220 |
+
weakref_slot (bool):
|
| 221 |
+
Make instances weak-referenceable. This has no effect unless
|
| 222 |
+
*slots* is True.
|
| 223 |
+
|
| 224 |
+
field_transformer (~typing.Callable | None):
|
| 225 |
+
A function that is called with the original class object and all
|
| 226 |
+
fields right before *attrs* finalizes the class. You can use this,
|
| 227 |
+
for example, to automatically add converters or validators to
|
| 228 |
+
fields based on their types.
|
| 229 |
+
|
| 230 |
+
.. seealso:: `transform-fields`
|
| 231 |
+
|
| 232 |
+
match_args (bool):
|
| 233 |
+
If True (default), set ``__match_args__`` on the class to support
|
| 234 |
+
:pep:`634` (*Structural Pattern Matching*). It is a tuple of all
|
| 235 |
+
non-keyword-only ``__init__`` parameter names on Python 3.10 and
|
| 236 |
+
later. Ignored on older Python versions.
|
| 237 |
+
|
| 238 |
+
collect_by_mro (bool):
|
| 239 |
+
If True, *attrs* collects attributes from base classes correctly
|
| 240 |
+
according to the `method resolution order
|
| 241 |
+
<https://docs.python.org/3/howto/mro.html>`_. If False, *attrs*
|
| 242 |
+
will mimic the (wrong) behavior of `dataclasses` and :pep:`681`.
|
| 243 |
+
|
| 244 |
+
See also `issue #428
|
| 245 |
+
<https://github.com/python-attrs/attrs/issues/428>`_.
|
| 246 |
+
|
| 247 |
+
getstate_setstate (bool | None):
|
| 248 |
+
.. note::
|
| 249 |
+
|
| 250 |
+
This is usually only interesting for slotted classes and you
|
| 251 |
+
should probably just set *auto_detect* to True.
|
| 252 |
+
|
| 253 |
+
If True, ``__getstate__`` and ``__setstate__`` are generated and
|
| 254 |
+
attached to the class. This is necessary for slotted classes to be
|
| 255 |
+
pickleable. If left None, it's True by default for slotted classes
|
| 256 |
+
and False for dict classes.
|
| 257 |
+
|
| 258 |
+
If *auto_detect* is True, and *getstate_setstate* is left None, and
|
| 259 |
+
**either** ``__getstate__`` or ``__setstate__`` is detected
|
| 260 |
+
directly on the class (meaning: not inherited), it is set to False
|
| 261 |
+
(this is usually what you want).
|
| 262 |
+
|
| 263 |
+
auto_attribs (bool | None):
|
| 264 |
+
If True, look at type annotations to determine which attributes to
|
| 265 |
+
use, like `dataclasses`. If False, it will only look for explicit
|
| 266 |
+
:func:`field` class attributes, like classic *attrs*.
|
| 267 |
+
|
| 268 |
+
If left None, it will guess:
|
| 269 |
+
|
| 270 |
+
1. If any attributes are annotated and no unannotated
|
| 271 |
+
`attrs.field`\ s are found, it assumes *auto_attribs=True*.
|
| 272 |
+
2. Otherwise it assumes *auto_attribs=False* and tries to collect
|
| 273 |
+
`attrs.field`\ s.
|
| 274 |
+
|
| 275 |
+
If *attrs* decides to look at type annotations, **all** fields
|
| 276 |
+
**must** be annotated. If *attrs* encounters a field that is set to
|
| 277 |
+
a :func:`field` / `attr.ib` but lacks a type annotation, an
|
| 278 |
+
`attrs.exceptions.UnannotatedAttributeError` is raised. Use
|
| 279 |
+
``field_name: typing.Any = field(...)`` if you don't want to set a
|
| 280 |
+
type.
|
| 281 |
+
|
| 282 |
+
.. warning::
|
| 283 |
+
|
| 284 |
+
For features that use the attribute name to create decorators
|
| 285 |
+
(for example, :ref:`validators <validators>`), you still *must*
|
| 286 |
+
assign :func:`field` / `attr.ib` to them. Otherwise Python will
|
| 287 |
+
either not find the name or try to use the default value to
|
| 288 |
+
call, for example, ``validator`` on it.
|
| 289 |
+
|
| 290 |
+
Attributes annotated as `typing.ClassVar`, and attributes that are
|
| 291 |
+
neither annotated nor set to an `field()` are **ignored**.
|
| 292 |
+
|
| 293 |
+
these (dict[str, object]):
|
| 294 |
+
A dictionary of name to the (private) return value of `field()`
|
| 295 |
+
mappings. This is useful to avoid the definition of your attributes
|
| 296 |
+
within the class body because you can't (for example, if you want
|
| 297 |
+
to add ``__repr__`` methods to Django models) or don't want to.
|
| 298 |
+
|
| 299 |
+
If *these* is not `None`, *attrs* will *not* search the class body
|
| 300 |
+
for attributes and will *not* remove any attributes from it.
|
| 301 |
+
|
| 302 |
+
The order is deduced from the order of the attributes inside
|
| 303 |
+
*these*.
|
| 304 |
+
|
| 305 |
+
Arguably, this is a rather obscure feature.
|
| 306 |
+
|
| 307 |
+
.. versionadded:: 20.1.0
|
| 308 |
+
.. versionchanged:: 21.3.0 Converters are also run ``on_setattr``.
|
| 309 |
+
.. versionadded:: 22.2.0
|
| 310 |
+
*unsafe_hash* as an alias for *hash* (for :pep:`681` compliance).
|
| 311 |
+
.. versionchanged:: 24.1.0
|
| 312 |
+
Instances are not compared as tuples of attributes anymore, but using a
|
| 313 |
+
big ``and`` condition. This is faster and has more correct behavior for
|
| 314 |
+
uncomparable values like `math.nan`.
|
| 315 |
+
.. versionadded:: 24.1.0
|
| 316 |
+
If a class has an *inherited* classmethod called
|
| 317 |
+
``__attrs_init_subclass__``, it is executed after the class is created.
|
| 318 |
+
.. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*.
|
| 319 |
+
.. versionadded:: 24.3.0
|
| 320 |
+
Unless already present, a ``__replace__`` method is automatically
|
| 321 |
+
created for `copy.replace` (Python 3.13+ only).
|
| 322 |
+
|
| 323 |
+
.. note::
|
| 324 |
+
|
| 325 |
+
The main differences to the classic `attr.s` are:
|
| 326 |
+
|
| 327 |
+
- Automatically detect whether or not *auto_attribs* should be `True`
|
| 328 |
+
(c.f. *auto_attribs* parameter).
|
| 329 |
+
- Converters and validators run when attributes are set by default --
|
| 330 |
+
if *frozen* is `False`.
|
| 331 |
+
- *slots=True*
|
| 332 |
+
|
| 333 |
+
Usually, this has only upsides and few visible effects in everyday
|
| 334 |
+
programming. But it *can* lead to some surprising behaviors, so
|
| 335 |
+
please make sure to read :term:`slotted classes`.
|
| 336 |
+
|
| 337 |
+
- *auto_exc=True*
|
| 338 |
+
- *auto_detect=True*
|
| 339 |
+
- *order=False*
|
| 340 |
+
- Some options that were only relevant on Python 2 or were kept around
|
| 341 |
+
for backwards-compatibility have been removed.
|
| 342 |
+
|
| 343 |
+
"""
|
| 344 |
+
|
| 345 |
+
def do_it(cls, auto_attribs):
|
| 346 |
+
return attrs(
|
| 347 |
+
maybe_cls=cls,
|
| 348 |
+
these=these,
|
| 349 |
+
repr=repr,
|
| 350 |
+
hash=hash,
|
| 351 |
+
unsafe_hash=unsafe_hash,
|
| 352 |
+
init=init,
|
| 353 |
+
slots=slots,
|
| 354 |
+
frozen=frozen,
|
| 355 |
+
weakref_slot=weakref_slot,
|
| 356 |
+
str=str,
|
| 357 |
+
auto_attribs=auto_attribs,
|
| 358 |
+
kw_only=kw_only,
|
| 359 |
+
cache_hash=cache_hash,
|
| 360 |
+
auto_exc=auto_exc,
|
| 361 |
+
eq=eq,
|
| 362 |
+
order=order,
|
| 363 |
+
auto_detect=auto_detect,
|
| 364 |
+
collect_by_mro=True,
|
| 365 |
+
getstate_setstate=getstate_setstate,
|
| 366 |
+
on_setattr=on_setattr,
|
| 367 |
+
field_transformer=field_transformer,
|
| 368 |
+
match_args=match_args,
|
| 369 |
+
)
|
| 370 |
+
|
| 371 |
+
def wrap(cls):
|
| 372 |
+
"""
|
| 373 |
+
Making this a wrapper ensures this code runs during class creation.
|
| 374 |
+
|
| 375 |
+
We also ensure that frozen-ness of classes is inherited.
|
| 376 |
+
"""
|
| 377 |
+
nonlocal frozen, on_setattr
|
| 378 |
+
|
| 379 |
+
had_on_setattr = on_setattr not in (None, setters.NO_OP)
|
| 380 |
+
|
| 381 |
+
# By default, mutable classes convert & validate on setattr.
|
| 382 |
+
if frozen is False and on_setattr is None:
|
| 383 |
+
on_setattr = _DEFAULT_ON_SETATTR
|
| 384 |
+
|
| 385 |
+
# However, if we subclass a frozen class, we inherit the immutability
|
| 386 |
+
# and disable on_setattr.
|
| 387 |
+
for base_cls in cls.__bases__:
|
| 388 |
+
if base_cls.__setattr__ is _frozen_setattrs:
|
| 389 |
+
if had_on_setattr:
|
| 390 |
+
msg = "Frozen classes can't use on_setattr (frozen-ness was inherited)."
|
| 391 |
+
raise ValueError(msg)
|
| 392 |
+
|
| 393 |
+
on_setattr = setters.NO_OP
|
| 394 |
+
break
|
| 395 |
+
|
| 396 |
+
if auto_attribs is not None:
|
| 397 |
+
return do_it(cls, auto_attribs)
|
| 398 |
+
|
| 399 |
+
try:
|
| 400 |
+
return do_it(cls, True)
|
| 401 |
+
except UnannotatedAttributeError:
|
| 402 |
+
return do_it(cls, False)
|
| 403 |
+
|
| 404 |
+
# maybe_cls's type depends on the usage of the decorator. It's a class
|
| 405 |
+
# if it's used as `@attrs` but `None` if used as `@attrs()`.
|
| 406 |
+
if maybe_cls is None:
|
| 407 |
+
return wrap
|
| 408 |
+
|
| 409 |
+
return wrap(maybe_cls)
|
| 410 |
+
|
| 411 |
+
|
| 412 |
+
mutable = define
|
| 413 |
+
frozen = partial(define, frozen=True, on_setattr=None)
|
| 414 |
+
|
| 415 |
+
|
| 416 |
+
def field(
|
| 417 |
+
*,
|
| 418 |
+
default=NOTHING,
|
| 419 |
+
validator=None,
|
| 420 |
+
repr=True,
|
| 421 |
+
hash=None,
|
| 422 |
+
init=True,
|
| 423 |
+
metadata=None,
|
| 424 |
+
type=None,
|
| 425 |
+
converter=None,
|
| 426 |
+
factory=None,
|
| 427 |
+
kw_only=False,
|
| 428 |
+
eq=None,
|
| 429 |
+
order=None,
|
| 430 |
+
on_setattr=None,
|
| 431 |
+
alias=None,
|
| 432 |
+
):
|
| 433 |
+
"""
|
| 434 |
+
Create a new :term:`field` / :term:`attribute` on a class.
|
| 435 |
+
|
| 436 |
+
.. warning::
|
| 437 |
+
|
| 438 |
+
Does **nothing** unless the class is also decorated with
|
| 439 |
+
`attrs.define` (or similar)!
|
| 440 |
+
|
| 441 |
+
Args:
|
| 442 |
+
default:
|
| 443 |
+
A value that is used if an *attrs*-generated ``__init__`` is used
|
| 444 |
+
and no value is passed while instantiating or the attribute is
|
| 445 |
+
excluded using ``init=False``.
|
| 446 |
+
|
| 447 |
+
If the value is an instance of `attrs.Factory`, its callable will
|
| 448 |
+
be used to construct a new value (useful for mutable data types
|
| 449 |
+
like lists or dicts).
|
| 450 |
+
|
| 451 |
+
If a default is not set (or set manually to `attrs.NOTHING`), a
|
| 452 |
+
value *must* be supplied when instantiating; otherwise a
|
| 453 |
+
`TypeError` will be raised.
|
| 454 |
+
|
| 455 |
+
.. seealso:: `defaults`
|
| 456 |
+
|
| 457 |
+
factory (~typing.Callable):
|
| 458 |
+
Syntactic sugar for ``default=attr.Factory(factory)``.
|
| 459 |
+
|
| 460 |
+
validator (~typing.Callable | list[~typing.Callable]):
|
| 461 |
+
Callable that is called by *attrs*-generated ``__init__`` methods
|
| 462 |
+
after the instance has been initialized. They receive the
|
| 463 |
+
initialized instance, the :func:`~attrs.Attribute`, and the passed
|
| 464 |
+
value.
|
| 465 |
+
|
| 466 |
+
The return value is *not* inspected so the validator has to throw
|
| 467 |
+
an exception itself.
|
| 468 |
+
|
| 469 |
+
If a `list` is passed, its items are treated as validators and must
|
| 470 |
+
all pass.
|
| 471 |
+
|
| 472 |
+
Validators can be globally disabled and re-enabled using
|
| 473 |
+
`attrs.validators.get_disabled` / `attrs.validators.set_disabled`.
|
| 474 |
+
|
| 475 |
+
The validator can also be set using decorator notation as shown
|
| 476 |
+
below.
|
| 477 |
+
|
| 478 |
+
.. seealso:: :ref:`validators`
|
| 479 |
+
|
| 480 |
+
repr (bool | ~typing.Callable):
|
| 481 |
+
Include this attribute in the generated ``__repr__`` method. If
|
| 482 |
+
True, include the attribute; if False, omit it. By default, the
|
| 483 |
+
built-in ``repr()`` function is used. To override how the attribute
|
| 484 |
+
value is formatted, pass a ``callable`` that takes a single value
|
| 485 |
+
and returns a string. Note that the resulting string is used as-is,
|
| 486 |
+
which means it will be used directly *instead* of calling
|
| 487 |
+
``repr()`` (the default).
|
| 488 |
+
|
| 489 |
+
eq (bool | ~typing.Callable):
|
| 490 |
+
If True (default), include this attribute in the generated
|
| 491 |
+
``__eq__`` and ``__ne__`` methods that check two instances for
|
| 492 |
+
equality. To override how the attribute value is compared, pass a
|
| 493 |
+
callable that takes a single value and returns the value to be
|
| 494 |
+
compared.
|
| 495 |
+
|
| 496 |
+
.. seealso:: `comparison`
|
| 497 |
+
|
| 498 |
+
order (bool | ~typing.Callable):
|
| 499 |
+
If True (default), include this attributes in the generated
|
| 500 |
+
``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. To
|
| 501 |
+
override how the attribute value is ordered, pass a callable that
|
| 502 |
+
takes a single value and returns the value to be ordered.
|
| 503 |
+
|
| 504 |
+
.. seealso:: `comparison`
|
| 505 |
+
|
| 506 |
+
hash (bool | None):
|
| 507 |
+
Include this attribute in the generated ``__hash__`` method. If
|
| 508 |
+
None (default), mirror *eq*'s value. This is the correct behavior
|
| 509 |
+
according the Python spec. Setting this value to anything else
|
| 510 |
+
than None is *discouraged*.
|
| 511 |
+
|
| 512 |
+
.. seealso:: `hashing`
|
| 513 |
+
|
| 514 |
+
init (bool):
|
| 515 |
+
Include this attribute in the generated ``__init__`` method.
|
| 516 |
+
|
| 517 |
+
It is possible to set this to False and set a default value. In
|
| 518 |
+
that case this attributed is unconditionally initialized with the
|
| 519 |
+
specified default value or factory.
|
| 520 |
+
|
| 521 |
+
.. seealso:: `init`
|
| 522 |
+
|
| 523 |
+
converter (typing.Callable | Converter):
|
| 524 |
+
A callable that is called by *attrs*-generated ``__init__`` methods
|
| 525 |
+
to convert attribute's value to the desired format.
|
| 526 |
+
|
| 527 |
+
If a vanilla callable is passed, it is given the passed-in value as
|
| 528 |
+
the only positional argument. It is possible to receive additional
|
| 529 |
+
arguments by wrapping the callable in a `Converter`.
|
| 530 |
+
|
| 531 |
+
Either way, the returned value will be used as the new value of the
|
| 532 |
+
attribute. The value is converted before being passed to the
|
| 533 |
+
validator, if any.
|
| 534 |
+
|
| 535 |
+
.. seealso:: :ref:`converters`
|
| 536 |
+
|
| 537 |
+
metadata (dict | None):
|
| 538 |
+
An arbitrary mapping, to be used by third-party code.
|
| 539 |
+
|
| 540 |
+
.. seealso:: `extending-metadata`.
|
| 541 |
+
|
| 542 |
+
type (type):
|
| 543 |
+
The type of the attribute. Nowadays, the preferred method to
|
| 544 |
+
specify the type is using a variable annotation (see :pep:`526`).
|
| 545 |
+
This argument is provided for backwards-compatibility and for usage
|
| 546 |
+
with `make_class`. Regardless of the approach used, the type will
|
| 547 |
+
be stored on ``Attribute.type``.
|
| 548 |
+
|
| 549 |
+
Please note that *attrs* doesn't do anything with this metadata by
|
| 550 |
+
itself. You can use it as part of your own code or for `static type
|
| 551 |
+
checking <types>`.
|
| 552 |
+
|
| 553 |
+
kw_only (bool):
|
| 554 |
+
Make this attribute keyword-only in the generated ``__init__`` (if
|
| 555 |
+
``init`` is False, this parameter is ignored).
|
| 556 |
+
|
| 557 |
+
on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]):
|
| 558 |
+
Allows to overwrite the *on_setattr* setting from `attr.s`. If left
|
| 559 |
+
None, the *on_setattr* value from `attr.s` is used. Set to
|
| 560 |
+
`attrs.setters.NO_OP` to run **no** `setattr` hooks for this
|
| 561 |
+
attribute -- regardless of the setting in `define()`.
|
| 562 |
+
|
| 563 |
+
alias (str | None):
|
| 564 |
+
Override this attribute's parameter name in the generated
|
| 565 |
+
``__init__`` method. If left None, default to ``name`` stripped
|
| 566 |
+
of leading underscores. See `private-attributes`.
|
| 567 |
+
|
| 568 |
+
.. versionadded:: 20.1.0
|
| 569 |
+
.. versionchanged:: 21.1.0
|
| 570 |
+
*eq*, *order*, and *cmp* also accept a custom callable
|
| 571 |
+
.. versionadded:: 22.2.0 *alias*
|
| 572 |
+
.. versionadded:: 23.1.0
|
| 573 |
+
The *type* parameter has been re-added; mostly for `attrs.make_class`.
|
| 574 |
+
Please note that type checkers ignore this metadata.
|
| 575 |
+
|
| 576 |
+
.. seealso::
|
| 577 |
+
|
| 578 |
+
`attr.ib`
|
| 579 |
+
"""
|
| 580 |
+
return attrib(
|
| 581 |
+
default=default,
|
| 582 |
+
validator=validator,
|
| 583 |
+
repr=repr,
|
| 584 |
+
hash=hash,
|
| 585 |
+
init=init,
|
| 586 |
+
metadata=metadata,
|
| 587 |
+
type=type,
|
| 588 |
+
converter=converter,
|
| 589 |
+
factory=factory,
|
| 590 |
+
kw_only=kw_only,
|
| 591 |
+
eq=eq,
|
| 592 |
+
order=order,
|
| 593 |
+
on_setattr=on_setattr,
|
| 594 |
+
alias=alias,
|
| 595 |
+
)
|
| 596 |
+
|
| 597 |
+
|
| 598 |
+
def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
|
| 599 |
+
"""
|
| 600 |
+
Same as `attr.asdict`, except that collections types are always retained
|
| 601 |
+
and dict is always used as *dict_factory*.
|
| 602 |
+
|
| 603 |
+
.. versionadded:: 21.3.0
|
| 604 |
+
"""
|
| 605 |
+
return _asdict(
|
| 606 |
+
inst=inst,
|
| 607 |
+
recurse=recurse,
|
| 608 |
+
filter=filter,
|
| 609 |
+
value_serializer=value_serializer,
|
| 610 |
+
retain_collection_types=True,
|
| 611 |
+
)
|
| 612 |
+
|
| 613 |
+
|
| 614 |
+
def astuple(inst, *, recurse=True, filter=None):
|
| 615 |
+
"""
|
| 616 |
+
Same as `attr.astuple`, except that collections types are always retained
|
| 617 |
+
and `tuple` is always used as the *tuple_factory*.
|
| 618 |
+
|
| 619 |
+
.. versionadded:: 21.3.0
|
| 620 |
+
"""
|
| 621 |
+
return _astuple(
|
| 622 |
+
inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
|
| 623 |
+
)
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/attr/_version_info.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-License-Identifier: MIT
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
from functools import total_ordering
|
| 5 |
+
|
| 6 |
+
from ._funcs import astuple
|
| 7 |
+
from ._make import attrib, attrs
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
@total_ordering
|
| 11 |
+
@attrs(eq=False, order=False, slots=True, frozen=True)
|
| 12 |
+
class VersionInfo:
|
| 13 |
+
"""
|
| 14 |
+
A version object that can be compared to tuple of length 1--4:
|
| 15 |
+
|
| 16 |
+
>>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)
|
| 17 |
+
True
|
| 18 |
+
>>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
|
| 19 |
+
True
|
| 20 |
+
>>> vi = attr.VersionInfo(19, 2, 0, "final")
|
| 21 |
+
>>> vi < (19, 1, 1)
|
| 22 |
+
False
|
| 23 |
+
>>> vi < (19,)
|
| 24 |
+
False
|
| 25 |
+
>>> vi == (19, 2,)
|
| 26 |
+
True
|
| 27 |
+
>>> vi == (19, 2, 1)
|
| 28 |
+
False
|
| 29 |
+
|
| 30 |
+
.. versionadded:: 19.2
|
| 31 |
+
"""
|
| 32 |
+
|
| 33 |
+
year = attrib(type=int)
|
| 34 |
+
minor = attrib(type=int)
|
| 35 |
+
micro = attrib(type=int)
|
| 36 |
+
releaselevel = attrib(type=str)
|
| 37 |
+
|
| 38 |
+
@classmethod
|
| 39 |
+
def _from_version_string(cls, s):
|
| 40 |
+
"""
|
| 41 |
+
Parse *s* and return a _VersionInfo.
|
| 42 |
+
"""
|
| 43 |
+
v = s.split(".")
|
| 44 |
+
if len(v) == 3:
|
| 45 |
+
v.append("final")
|
| 46 |
+
|
| 47 |
+
return cls(
|
| 48 |
+
year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
def _ensure_tuple(self, other):
|
| 52 |
+
"""
|
| 53 |
+
Ensure *other* is a tuple of a valid length.
|
| 54 |
+
|
| 55 |
+
Returns a possibly transformed *other* and ourselves as a tuple of
|
| 56 |
+
the same length as *other*.
|
| 57 |
+
"""
|
| 58 |
+
|
| 59 |
+
if self.__class__ is other.__class__:
|
| 60 |
+
other = astuple(other)
|
| 61 |
+
|
| 62 |
+
if not isinstance(other, tuple):
|
| 63 |
+
raise NotImplementedError
|
| 64 |
+
|
| 65 |
+
if not (1 <= len(other) <= 4):
|
| 66 |
+
raise NotImplementedError
|
| 67 |
+
|
| 68 |
+
return astuple(self)[: len(other)], other
|
| 69 |
+
|
| 70 |
+
def __eq__(self, other):
|
| 71 |
+
try:
|
| 72 |
+
us, them = self._ensure_tuple(other)
|
| 73 |
+
except NotImplementedError:
|
| 74 |
+
return NotImplemented
|
| 75 |
+
|
| 76 |
+
return us == them
|
| 77 |
+
|
| 78 |
+
def __lt__(self, other):
|
| 79 |
+
try:
|
| 80 |
+
us, them = self._ensure_tuple(other)
|
| 81 |
+
except NotImplementedError:
|
| 82 |
+
return NotImplemented
|
| 83 |
+
|
| 84 |
+
# Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
|
| 85 |
+
# have to do anything special with releaselevel for now.
|
| 86 |
+
return us < them
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/attr/converters.pyi
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Callable, Any, overload
|
| 2 |
+
|
| 3 |
+
from attrs import _ConverterType, _CallableConverterType
|
| 4 |
+
|
| 5 |
+
@overload
|
| 6 |
+
def pipe(*validators: _CallableConverterType) -> _CallableConverterType: ...
|
| 7 |
+
@overload
|
| 8 |
+
def pipe(*validators: _ConverterType) -> _ConverterType: ...
|
| 9 |
+
@overload
|
| 10 |
+
def optional(converter: _CallableConverterType) -> _CallableConverterType: ...
|
| 11 |
+
@overload
|
| 12 |
+
def optional(converter: _ConverterType) -> _ConverterType: ...
|
| 13 |
+
@overload
|
| 14 |
+
def default_if_none(default: Any) -> _CallableConverterType: ...
|
| 15 |
+
@overload
|
| 16 |
+
def default_if_none(
|
| 17 |
+
*, factory: Callable[[], Any]
|
| 18 |
+
) -> _CallableConverterType: ...
|
| 19 |
+
def to_bool(val: str | int | bool) -> bool: ...
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/attr/setters.py
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-License-Identifier: MIT
|
| 2 |
+
|
| 3 |
+
"""
|
| 4 |
+
Commonly used hooks for on_setattr.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
from . import _config
|
| 8 |
+
from .exceptions import FrozenAttributeError
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def pipe(*setters):
|
| 12 |
+
"""
|
| 13 |
+
Run all *setters* and return the return value of the last one.
|
| 14 |
+
|
| 15 |
+
.. versionadded:: 20.1.0
|
| 16 |
+
"""
|
| 17 |
+
|
| 18 |
+
def wrapped_pipe(instance, attrib, new_value):
|
| 19 |
+
rv = new_value
|
| 20 |
+
|
| 21 |
+
for setter in setters:
|
| 22 |
+
rv = setter(instance, attrib, rv)
|
| 23 |
+
|
| 24 |
+
return rv
|
| 25 |
+
|
| 26 |
+
return wrapped_pipe
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def frozen(_, __, ___):
|
| 30 |
+
"""
|
| 31 |
+
Prevent an attribute to be modified.
|
| 32 |
+
|
| 33 |
+
.. versionadded:: 20.1.0
|
| 34 |
+
"""
|
| 35 |
+
raise FrozenAttributeError
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def validate(instance, attrib, new_value):
|
| 39 |
+
"""
|
| 40 |
+
Run *attrib*'s validator on *new_value* if it has one.
|
| 41 |
+
|
| 42 |
+
.. versionadded:: 20.1.0
|
| 43 |
+
"""
|
| 44 |
+
if _config._run_validators is False:
|
| 45 |
+
return new_value
|
| 46 |
+
|
| 47 |
+
v = attrib.validator
|
| 48 |
+
if not v:
|
| 49 |
+
return new_value
|
| 50 |
+
|
| 51 |
+
v(instance, attrib, new_value)
|
| 52 |
+
|
| 53 |
+
return new_value
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def convert(instance, attrib, new_value):
|
| 57 |
+
"""
|
| 58 |
+
Run *attrib*'s converter -- if it has one -- on *new_value* and return the
|
| 59 |
+
result.
|
| 60 |
+
|
| 61 |
+
.. versionadded:: 20.1.0
|
| 62 |
+
"""
|
| 63 |
+
c = attrib.converter
|
| 64 |
+
if c:
|
| 65 |
+
# This can be removed once we drop 3.8 and use attrs.Converter instead.
|
| 66 |
+
from ._make import Converter
|
| 67 |
+
|
| 68 |
+
if not isinstance(c, Converter):
|
| 69 |
+
return c(new_value)
|
| 70 |
+
|
| 71 |
+
return c(new_value, instance, attrib)
|
| 72 |
+
|
| 73 |
+
return new_value
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
|
| 77 |
+
# Sphinx's autodata stopped working, so the docstring is inlined in the API
|
| 78 |
+
# docs.
|
| 79 |
+
NO_OP = object()
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/attr/setters.pyi
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Any, NewType, NoReturn, TypeVar
|
| 2 |
+
|
| 3 |
+
from . import Attribute
|
| 4 |
+
from attrs import _OnSetAttrType
|
| 5 |
+
|
| 6 |
+
_T = TypeVar("_T")
|
| 7 |
+
|
| 8 |
+
def frozen(
|
| 9 |
+
instance: Any, attribute: Attribute[Any], new_value: Any
|
| 10 |
+
) -> NoReturn: ...
|
| 11 |
+
def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
|
| 12 |
+
def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
|
| 13 |
+
|
| 14 |
+
# convert is allowed to return Any, because they can be chained using pipe.
|
| 15 |
+
def convert(
|
| 16 |
+
instance: Any, attribute: Attribute[Any], new_value: Any
|
| 17 |
+
) -> Any: ...
|
| 18 |
+
|
| 19 |
+
_NoOpType = NewType("_NoOpType", object)
|
| 20 |
+
NO_OP: _NoOpType
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/attr/validators.pyi
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from types import UnionType
|
| 2 |
+
from typing import (
|
| 3 |
+
Any,
|
| 4 |
+
AnyStr,
|
| 5 |
+
Callable,
|
| 6 |
+
Container,
|
| 7 |
+
ContextManager,
|
| 8 |
+
Iterable,
|
| 9 |
+
Mapping,
|
| 10 |
+
Match,
|
| 11 |
+
Pattern,
|
| 12 |
+
TypeVar,
|
| 13 |
+
overload,
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
from attrs import _ValidatorType
|
| 17 |
+
from attrs import _ValidatorArgType
|
| 18 |
+
|
| 19 |
+
_T = TypeVar("_T")
|
| 20 |
+
_T1 = TypeVar("_T1")
|
| 21 |
+
_T2 = TypeVar("_T2")
|
| 22 |
+
_T3 = TypeVar("_T3")
|
| 23 |
+
_I = TypeVar("_I", bound=Iterable)
|
| 24 |
+
_K = TypeVar("_K")
|
| 25 |
+
_V = TypeVar("_V")
|
| 26 |
+
_M = TypeVar("_M", bound=Mapping)
|
| 27 |
+
|
| 28 |
+
def set_disabled(run: bool) -> None: ...
|
| 29 |
+
def get_disabled() -> bool: ...
|
| 30 |
+
def disabled() -> ContextManager[None]: ...
|
| 31 |
+
|
| 32 |
+
# To be more precise on instance_of use some overloads.
|
| 33 |
+
# If there are more than 3 items in the tuple then we fall back to Any
|
| 34 |
+
@overload
|
| 35 |
+
def instance_of(type: type[_T]) -> _ValidatorType[_T]: ...
|
| 36 |
+
@overload
|
| 37 |
+
def instance_of(type: tuple[type[_T]]) -> _ValidatorType[_T]: ...
|
| 38 |
+
@overload
|
| 39 |
+
def instance_of(
|
| 40 |
+
type: tuple[type[_T1], type[_T2]],
|
| 41 |
+
) -> _ValidatorType[_T1 | _T2]: ...
|
| 42 |
+
@overload
|
| 43 |
+
def instance_of(
|
| 44 |
+
type: tuple[type[_T1], type[_T2], type[_T3]],
|
| 45 |
+
) -> _ValidatorType[_T1 | _T2 | _T3]: ...
|
| 46 |
+
@overload
|
| 47 |
+
def instance_of(type: tuple[type, ...]) -> _ValidatorType[Any]: ...
|
| 48 |
+
@overload
|
| 49 |
+
def instance_of(type: UnionType) -> _ValidatorType[Any]: ...
|
| 50 |
+
def optional(
|
| 51 |
+
validator: (
|
| 52 |
+
_ValidatorType[_T]
|
| 53 |
+
| list[_ValidatorType[_T]]
|
| 54 |
+
| tuple[_ValidatorType[_T]]
|
| 55 |
+
),
|
| 56 |
+
) -> _ValidatorType[_T | None]: ...
|
| 57 |
+
def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
|
| 58 |
+
def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
|
| 59 |
+
def matches_re(
|
| 60 |
+
regex: Pattern[AnyStr] | AnyStr,
|
| 61 |
+
flags: int = ...,
|
| 62 |
+
func: Callable[[AnyStr, AnyStr, int], Match[AnyStr] | None] | None = ...,
|
| 63 |
+
) -> _ValidatorType[AnyStr]: ...
|
| 64 |
+
def deep_iterable(
|
| 65 |
+
member_validator: _ValidatorArgType[_T],
|
| 66 |
+
iterable_validator: _ValidatorType[_I] | None = ...,
|
| 67 |
+
) -> _ValidatorType[_I]: ...
|
| 68 |
+
def deep_mapping(
|
| 69 |
+
key_validator: _ValidatorType[_K],
|
| 70 |
+
value_validator: _ValidatorType[_V],
|
| 71 |
+
mapping_validator: _ValidatorType[_M] | None = ...,
|
| 72 |
+
) -> _ValidatorType[_M]: ...
|
| 73 |
+
def is_callable() -> _ValidatorType[_T]: ...
|
| 74 |
+
def lt(val: _T) -> _ValidatorType[_T]: ...
|
| 75 |
+
def le(val: _T) -> _ValidatorType[_T]: ...
|
| 76 |
+
def ge(val: _T) -> _ValidatorType[_T]: ...
|
| 77 |
+
def gt(val: _T) -> _ValidatorType[_T]: ...
|
| 78 |
+
def max_len(length: int) -> _ValidatorType[_T]: ...
|
| 79 |
+
def min_len(length: int) -> _ValidatorType[_T]: ...
|
| 80 |
+
def not_(
|
| 81 |
+
validator: _ValidatorType[_T],
|
| 82 |
+
*,
|
| 83 |
+
msg: str | None = None,
|
| 84 |
+
exc_types: type[Exception] | Iterable[type[Exception]] = ...,
|
| 85 |
+
) -> _ValidatorType[_T]: ...
|
| 86 |
+
def or_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/grpcio-1.71.0.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/grpcio-1.71.0.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,610 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
Apache License
|
| 3 |
+
Version 2.0, January 2004
|
| 4 |
+
http://www.apache.org/licenses/
|
| 5 |
+
|
| 6 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
| 7 |
+
|
| 8 |
+
1. Definitions.
|
| 9 |
+
|
| 10 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
| 11 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
| 12 |
+
|
| 13 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
| 14 |
+
the copyright owner that is granting the License.
|
| 15 |
+
|
| 16 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
| 17 |
+
other entities that control, are controlled by, or are under common
|
| 18 |
+
control with that entity. For the purposes of this definition,
|
| 19 |
+
"control" means (i) the power, direct or indirect, to cause the
|
| 20 |
+
direction or management of such entity, whether by contract or
|
| 21 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
| 22 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
| 23 |
+
|
| 24 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
| 25 |
+
exercising permissions granted by this License.
|
| 26 |
+
|
| 27 |
+
"Source" form shall mean the preferred form for making modifications,
|
| 28 |
+
including but not limited to software source code, documentation
|
| 29 |
+
source, and configuration files.
|
| 30 |
+
|
| 31 |
+
"Object" form shall mean any form resulting from mechanical
|
| 32 |
+
transformation or translation of a Source form, including but
|
| 33 |
+
not limited to compiled object code, generated documentation,
|
| 34 |
+
and conversions to other media types.
|
| 35 |
+
|
| 36 |
+
"Work" shall mean the work of authorship, whether in Source or
|
| 37 |
+
Object form, made available under the License, as indicated by a
|
| 38 |
+
copyright notice that is included in or attached to the work
|
| 39 |
+
(an example is provided in the Appendix below).
|
| 40 |
+
|
| 41 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
| 42 |
+
form, that is based on (or derived from) the Work and for which the
|
| 43 |
+
editorial revisions, annotations, elaborations, or other modifications
|
| 44 |
+
represent, as a whole, an original work of authorship. For the purposes
|
| 45 |
+
of this License, Derivative Works shall not include works that remain
|
| 46 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
| 47 |
+
the Work and Derivative Works thereof.
|
| 48 |
+
|
| 49 |
+
"Contribution" shall mean any work of authorship, including
|
| 50 |
+
the original version of the Work and any modifications or additions
|
| 51 |
+
to that Work or Derivative Works thereof, that is intentionally
|
| 52 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
| 53 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
| 54 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
| 55 |
+
means any form of electronic, verbal, or written communication sent
|
| 56 |
+
to the Licensor or its representatives, including but not limited to
|
| 57 |
+
communication on electronic mailing lists, source code control systems,
|
| 58 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
| 59 |
+
Licensor for the purpose of discussing and improving the Work, but
|
| 60 |
+
excluding communication that is conspicuously marked or otherwise
|
| 61 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
| 62 |
+
|
| 63 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
| 64 |
+
on behalf of whom a Contribution has been received by Licensor and
|
| 65 |
+
subsequently incorporated within the Work.
|
| 66 |
+
|
| 67 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
| 68 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 69 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 70 |
+
copyright license to reproduce, prepare Derivative Works of,
|
| 71 |
+
publicly display, publicly perform, sublicense, and distribute the
|
| 72 |
+
Work and such Derivative Works in Source or Object form.
|
| 73 |
+
|
| 74 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
| 75 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 76 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 77 |
+
(except as stated in this section) patent license to make, have made,
|
| 78 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
| 79 |
+
where such license applies only to those patent claims licensable
|
| 80 |
+
by such Contributor that are necessarily infringed by their
|
| 81 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
| 82 |
+
with the Work to which such Contribution(s) was submitted. If You
|
| 83 |
+
institute patent litigation against any entity (including a
|
| 84 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
| 85 |
+
or a Contribution incorporated within the Work constitutes direct
|
| 86 |
+
or contributory patent infringement, then any patent licenses
|
| 87 |
+
granted to You under this License for that Work shall terminate
|
| 88 |
+
as of the date such litigation is filed.
|
| 89 |
+
|
| 90 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
| 91 |
+
Work or Derivative Works thereof in any medium, with or without
|
| 92 |
+
modifications, and in Source or Object form, provided that You
|
| 93 |
+
meet the following conditions:
|
| 94 |
+
|
| 95 |
+
(a) You must give any other recipients of the Work or
|
| 96 |
+
Derivative Works a copy of this License; and
|
| 97 |
+
|
| 98 |
+
(b) You must cause any modified files to carry prominent notices
|
| 99 |
+
stating that You changed the files; and
|
| 100 |
+
|
| 101 |
+
(c) You must retain, in the Source form of any Derivative Works
|
| 102 |
+
that You distribute, all copyright, patent, trademark, and
|
| 103 |
+
attribution notices from the Source form of the Work,
|
| 104 |
+
excluding those notices that do not pertain to any part of
|
| 105 |
+
the Derivative Works; and
|
| 106 |
+
|
| 107 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
| 108 |
+
distribution, then any Derivative Works that You distribute must
|
| 109 |
+
include a readable copy of the attribution notices contained
|
| 110 |
+
within such NOTICE file, excluding those notices that do not
|
| 111 |
+
pertain to any part of the Derivative Works, in at least one
|
| 112 |
+
of the following places: within a NOTICE text file distributed
|
| 113 |
+
as part of the Derivative Works; within the Source form or
|
| 114 |
+
documentation, if provided along with the Derivative Works; or,
|
| 115 |
+
within a display generated by the Derivative Works, if and
|
| 116 |
+
wherever such third-party notices normally appear. The contents
|
| 117 |
+
of the NOTICE file are for informational purposes only and
|
| 118 |
+
do not modify the License. You may add Your own attribution
|
| 119 |
+
notices within Derivative Works that You distribute, alongside
|
| 120 |
+
or as an addendum to the NOTICE text from the Work, provided
|
| 121 |
+
that such additional attribution notices cannot be construed
|
| 122 |
+
as modifying the License.
|
| 123 |
+
|
| 124 |
+
You may add Your own copyright statement to Your modifications and
|
| 125 |
+
may provide additional or different license terms and conditions
|
| 126 |
+
for use, reproduction, or distribution of Your modifications, or
|
| 127 |
+
for any such Derivative Works as a whole, provided Your use,
|
| 128 |
+
reproduction, and distribution of the Work otherwise complies with
|
| 129 |
+
the conditions stated in this License.
|
| 130 |
+
|
| 131 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
| 132 |
+
any Contribution intentionally submitted for inclusion in the Work
|
| 133 |
+
by You to the Licensor shall be under the terms and conditions of
|
| 134 |
+
this License, without any additional terms or conditions.
|
| 135 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
| 136 |
+
the terms of any separate license agreement you may have executed
|
| 137 |
+
with Licensor regarding such Contributions.
|
| 138 |
+
|
| 139 |
+
6. Trademarks. This License does not grant permission to use the trade
|
| 140 |
+
names, trademarks, service marks, or product names of the Licensor,
|
| 141 |
+
except as required for reasonable and customary use in describing the
|
| 142 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
| 143 |
+
|
| 144 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
| 145 |
+
agreed to in writing, Licensor provides the Work (and each
|
| 146 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
| 147 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
| 148 |
+
implied, including, without limitation, any warranties or conditions
|
| 149 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
| 150 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
| 151 |
+
appropriateness of using or redistributing the Work and assume any
|
| 152 |
+
risks associated with Your exercise of permissions under this License.
|
| 153 |
+
|
| 154 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
| 155 |
+
whether in tort (including negligence), contract, or otherwise,
|
| 156 |
+
unless required by applicable law (such as deliberate and grossly
|
| 157 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
| 158 |
+
liable to You for damages, including any direct, indirect, special,
|
| 159 |
+
incidental, or consequential damages of any character arising as a
|
| 160 |
+
result of this License or out of the use or inability to use the
|
| 161 |
+
Work (including but not limited to damages for loss of goodwill,
|
| 162 |
+
work stoppage, computer failure or malfunction, or any and all
|
| 163 |
+
other commercial damages or losses), even if such Contributor
|
| 164 |
+
has been advised of the possibility of such damages.
|
| 165 |
+
|
| 166 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
| 167 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
| 168 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
| 169 |
+
or other liability obligations and/or rights consistent with this
|
| 170 |
+
License. However, in accepting such obligations, You may act only
|
| 171 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
| 172 |
+
of any other Contributor, and only if You agree to indemnify,
|
| 173 |
+
defend, and hold each Contributor harmless for any liability
|
| 174 |
+
incurred by, or claims asserted against, such Contributor by reason
|
| 175 |
+
of your accepting any such warranty or additional liability.
|
| 176 |
+
|
| 177 |
+
END OF TERMS AND CONDITIONS
|
| 178 |
+
|
| 179 |
+
APPENDIX: How to apply the Apache License to your work.
|
| 180 |
+
|
| 181 |
+
To apply the Apache License to your work, attach the following
|
| 182 |
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
| 183 |
+
replaced with your own identifying information. (Don't include
|
| 184 |
+
the brackets!) The text should be enclosed in the appropriate
|
| 185 |
+
comment syntax for the file format. We also recommend that a
|
| 186 |
+
file or class name and description of purpose be included on the
|
| 187 |
+
same "printed page" as the copyright notice for easier
|
| 188 |
+
identification within third-party archives.
|
| 189 |
+
|
| 190 |
+
Copyright [yyyy] [name of copyright owner]
|
| 191 |
+
|
| 192 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
| 193 |
+
you may not use this file except in compliance with the License.
|
| 194 |
+
You may obtain a copy of the License at
|
| 195 |
+
|
| 196 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
| 197 |
+
|
| 198 |
+
Unless required by applicable law or agreed to in writing, software
|
| 199 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
| 200 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 201 |
+
See the License for the specific language governing permissions and
|
| 202 |
+
limitations under the License.
|
| 203 |
+
|
| 204 |
+
-----------------------------------------------------------
|
| 205 |
+
|
| 206 |
+
BSD 3-Clause License
|
| 207 |
+
|
| 208 |
+
Copyright 2016, Google Inc.
|
| 209 |
+
|
| 210 |
+
Redistribution and use in source and binary forms, with or without
|
| 211 |
+
modification, are permitted provided that the following conditions are met:
|
| 212 |
+
|
| 213 |
+
1. Redistributions of source code must retain the above copyright notice,
|
| 214 |
+
this list of conditions and the following disclaimer.
|
| 215 |
+
|
| 216 |
+
2. Redistributions in binary form must reproduce the above copyright notice,
|
| 217 |
+
this list of conditions and the following disclaimer in the documentation
|
| 218 |
+
and/or other materials provided with the distribution.
|
| 219 |
+
|
| 220 |
+
3. Neither the name of the copyright holder nor the names of its
|
| 221 |
+
contributors may be used to endorse or promote products derived from this
|
| 222 |
+
software without specific prior written permission.
|
| 223 |
+
|
| 224 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
| 225 |
+
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
| 226 |
+
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
| 227 |
+
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
| 228 |
+
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
| 229 |
+
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
| 230 |
+
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
| 231 |
+
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
| 232 |
+
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
| 233 |
+
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
|
| 234 |
+
THE POSSIBILITY OF SUCH DAMAGE.
|
| 235 |
+
|
| 236 |
+
-----------------------------------------------------------
|
| 237 |
+
|
| 238 |
+
Mozilla Public License Version 2.0
|
| 239 |
+
==================================
|
| 240 |
+
|
| 241 |
+
1. Definitions
|
| 242 |
+
--------------
|
| 243 |
+
|
| 244 |
+
1.1. "Contributor"
|
| 245 |
+
means each individual or legal entity that creates, contributes to
|
| 246 |
+
the creation of, or owns Covered Software.
|
| 247 |
+
|
| 248 |
+
1.2. "Contributor Version"
|
| 249 |
+
means the combination of the Contributions of others (if any) used
|
| 250 |
+
by a Contributor and that particular Contributor's Contribution.
|
| 251 |
+
|
| 252 |
+
1.3. "Contribution"
|
| 253 |
+
means Covered Software of a particular Contributor.
|
| 254 |
+
|
| 255 |
+
1.4. "Covered Software"
|
| 256 |
+
means Source Code Form to which the initial Contributor has attached
|
| 257 |
+
the notice in Exhibit A, the Executable Form of such Source Code
|
| 258 |
+
Form, and Modifications of such Source Code Form, in each case
|
| 259 |
+
including portions thereof.
|
| 260 |
+
|
| 261 |
+
1.5. "Incompatible With Secondary Licenses"
|
| 262 |
+
means
|
| 263 |
+
|
| 264 |
+
(a) that the initial Contributor has attached the notice described
|
| 265 |
+
in Exhibit B to the Covered Software; or
|
| 266 |
+
|
| 267 |
+
(b) that the Covered Software was made available under the terms of
|
| 268 |
+
version 1.1 or earlier of the License, but not also under the
|
| 269 |
+
terms of a Secondary License.
|
| 270 |
+
|
| 271 |
+
1.6. "Executable Form"
|
| 272 |
+
means any form of the work other than Source Code Form.
|
| 273 |
+
|
| 274 |
+
1.7. "Larger Work"
|
| 275 |
+
means a work that combines Covered Software with other material, in
|
| 276 |
+
a separate file or files, that is not Covered Software.
|
| 277 |
+
|
| 278 |
+
1.8. "License"
|
| 279 |
+
means this document.
|
| 280 |
+
|
| 281 |
+
1.9. "Licensable"
|
| 282 |
+
means having the right to grant, to the maximum extent possible,
|
| 283 |
+
whether at the time of the initial grant or subsequently, any and
|
| 284 |
+
all of the rights conveyed by this License.
|
| 285 |
+
|
| 286 |
+
1.10. "Modifications"
|
| 287 |
+
means any of the following:
|
| 288 |
+
|
| 289 |
+
(a) any file in Source Code Form that results from an addition to,
|
| 290 |
+
deletion from, or modification of the contents of Covered
|
| 291 |
+
Software; or
|
| 292 |
+
|
| 293 |
+
(b) any new file in Source Code Form that contains any Covered
|
| 294 |
+
Software.
|
| 295 |
+
|
| 296 |
+
1.11. "Patent Claims" of a Contributor
|
| 297 |
+
means any patent claim(s), including without limitation, method,
|
| 298 |
+
process, and apparatus claims, in any patent Licensable by such
|
| 299 |
+
Contributor that would be infringed, but for the grant of the
|
| 300 |
+
License, by the making, using, selling, offering for sale, having
|
| 301 |
+
made, import, or transfer of either its Contributions or its
|
| 302 |
+
Contributor Version.
|
| 303 |
+
|
| 304 |
+
1.12. "Secondary License"
|
| 305 |
+
means either the GNU General Public License, Version 2.0, the GNU
|
| 306 |
+
Lesser General Public License, Version 2.1, the GNU Affero General
|
| 307 |
+
Public License, Version 3.0, or any later versions of those
|
| 308 |
+
licenses.
|
| 309 |
+
|
| 310 |
+
1.13. "Source Code Form"
|
| 311 |
+
means the form of the work preferred for making modifications.
|
| 312 |
+
|
| 313 |
+
1.14. "You" (or "Your")
|
| 314 |
+
means an individual or a legal entity exercising rights under this
|
| 315 |
+
License. For legal entities, "You" includes any entity that
|
| 316 |
+
controls, is controlled by, or is under common control with You. For
|
| 317 |
+
purposes of this definition, "control" means (a) the power, direct
|
| 318 |
+
or indirect, to cause the direction or management of such entity,
|
| 319 |
+
whether by contract or otherwise, or (b) ownership of more than
|
| 320 |
+
fifty percent (50%) of the outstanding shares or beneficial
|
| 321 |
+
ownership of such entity.
|
| 322 |
+
|
| 323 |
+
2. License Grants and Conditions
|
| 324 |
+
--------------------------------
|
| 325 |
+
|
| 326 |
+
2.1. Grants
|
| 327 |
+
|
| 328 |
+
Each Contributor hereby grants You a world-wide, royalty-free,
|
| 329 |
+
non-exclusive license:
|
| 330 |
+
|
| 331 |
+
(a) under intellectual property rights (other than patent or trademark)
|
| 332 |
+
Licensable by such Contributor to use, reproduce, make available,
|
| 333 |
+
modify, display, perform, distribute, and otherwise exploit its
|
| 334 |
+
Contributions, either on an unmodified basis, with Modifications, or
|
| 335 |
+
as part of a Larger Work; and
|
| 336 |
+
|
| 337 |
+
(b) under Patent Claims of such Contributor to make, use, sell, offer
|
| 338 |
+
for sale, have made, import, and otherwise transfer either its
|
| 339 |
+
Contributions or its Contributor Version.
|
| 340 |
+
|
| 341 |
+
2.2. Effective Date
|
| 342 |
+
|
| 343 |
+
The licenses granted in Section 2.1 with respect to any Contribution
|
| 344 |
+
become effective for each Contribution on the date the Contributor first
|
| 345 |
+
distributes such Contribution.
|
| 346 |
+
|
| 347 |
+
2.3. Limitations on Grant Scope
|
| 348 |
+
|
| 349 |
+
The licenses granted in this Section 2 are the only rights granted under
|
| 350 |
+
this License. No additional rights or licenses will be implied from the
|
| 351 |
+
distribution or licensing of Covered Software under this License.
|
| 352 |
+
Notwithstanding Section 2.1(b) above, no patent license is granted by a
|
| 353 |
+
Contributor:
|
| 354 |
+
|
| 355 |
+
(a) for any code that a Contributor has removed from Covered Software;
|
| 356 |
+
or
|
| 357 |
+
|
| 358 |
+
(b) for infringements caused by: (i) Your and any other third party's
|
| 359 |
+
modifications of Covered Software, or (ii) the combination of its
|
| 360 |
+
Contributions with other software (except as part of its Contributor
|
| 361 |
+
Version); or
|
| 362 |
+
|
| 363 |
+
(c) under Patent Claims infringed by Covered Software in the absence of
|
| 364 |
+
its Contributions.
|
| 365 |
+
|
| 366 |
+
This License does not grant any rights in the trademarks, service marks,
|
| 367 |
+
or logos of any Contributor (except as may be necessary to comply with
|
| 368 |
+
the notice requirements in Section 3.4).
|
| 369 |
+
|
| 370 |
+
2.4. Subsequent Licenses
|
| 371 |
+
|
| 372 |
+
No Contributor makes additional grants as a result of Your choice to
|
| 373 |
+
distribute the Covered Software under a subsequent version of this
|
| 374 |
+
License (see Section 10.2) or under the terms of a Secondary License (if
|
| 375 |
+
permitted under the terms of Section 3.3).
|
| 376 |
+
|
| 377 |
+
2.5. Representation
|
| 378 |
+
|
| 379 |
+
Each Contributor represents that the Contributor believes its
|
| 380 |
+
Contributions are its original creation(s) or it has sufficient rights
|
| 381 |
+
to grant the rights to its Contributions conveyed by this License.
|
| 382 |
+
|
| 383 |
+
2.6. Fair Use
|
| 384 |
+
|
| 385 |
+
This License is not intended to limit any rights You have under
|
| 386 |
+
applicable copyright doctrines of fair use, fair dealing, or other
|
| 387 |
+
equivalents.
|
| 388 |
+
|
| 389 |
+
2.7. Conditions
|
| 390 |
+
|
| 391 |
+
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
|
| 392 |
+
in Section 2.1.
|
| 393 |
+
|
| 394 |
+
3. Responsibilities
|
| 395 |
+
-------------------
|
| 396 |
+
|
| 397 |
+
3.1. Distribution of Source Form
|
| 398 |
+
|
| 399 |
+
All distribution of Covered Software in Source Code Form, including any
|
| 400 |
+
Modifications that You create or to which You contribute, must be under
|
| 401 |
+
the terms of this License. You must inform recipients that the Source
|
| 402 |
+
Code Form of the Covered Software is governed by the terms of this
|
| 403 |
+
License, and how they can obtain a copy of this License. You may not
|
| 404 |
+
attempt to alter or restrict the recipients' rights in the Source Code
|
| 405 |
+
Form.
|
| 406 |
+
|
| 407 |
+
3.2. Distribution of Executable Form
|
| 408 |
+
|
| 409 |
+
If You distribute Covered Software in Executable Form then:
|
| 410 |
+
|
| 411 |
+
(a) such Covered Software must also be made available in Source Code
|
| 412 |
+
Form, as described in Section 3.1, and You must inform recipients of
|
| 413 |
+
the Executable Form how they can obtain a copy of such Source Code
|
| 414 |
+
Form by reasonable means in a timely manner, at a charge no more
|
| 415 |
+
than the cost of distribution to the recipient; and
|
| 416 |
+
|
| 417 |
+
(b) You may distribute such Executable Form under the terms of this
|
| 418 |
+
License, or sublicense it under different terms, provided that the
|
| 419 |
+
license for the Executable Form does not attempt to limit or alter
|
| 420 |
+
the recipients' rights in the Source Code Form under this License.
|
| 421 |
+
|
| 422 |
+
3.3. Distribution of a Larger Work
|
| 423 |
+
|
| 424 |
+
You may create and distribute a Larger Work under terms of Your choice,
|
| 425 |
+
provided that You also comply with the requirements of this License for
|
| 426 |
+
the Covered Software. If the Larger Work is a combination of Covered
|
| 427 |
+
Software with a work governed by one or more Secondary Licenses, and the
|
| 428 |
+
Covered Software is not Incompatible With Secondary Licenses, this
|
| 429 |
+
License permits You to additionally distribute such Covered Software
|
| 430 |
+
under the terms of such Secondary License(s), so that the recipient of
|
| 431 |
+
the Larger Work may, at their option, further distribute the Covered
|
| 432 |
+
Software under the terms of either this License or such Secondary
|
| 433 |
+
License(s).
|
| 434 |
+
|
| 435 |
+
3.4. Notices
|
| 436 |
+
|
| 437 |
+
You may not remove or alter the substance of any license notices
|
| 438 |
+
(including copyright notices, patent notices, disclaimers of warranty,
|
| 439 |
+
or limitations of liability) contained within the Source Code Form of
|
| 440 |
+
the Covered Software, except that You may alter any license notices to
|
| 441 |
+
the extent required to remedy known factual inaccuracies.
|
| 442 |
+
|
| 443 |
+
3.5. Application of Additional Terms
|
| 444 |
+
|
| 445 |
+
You may choose to offer, and to charge a fee for, warranty, support,
|
| 446 |
+
indemnity or liability obligations to one or more recipients of Covered
|
| 447 |
+
Software. However, You may do so only on Your own behalf, and not on
|
| 448 |
+
behalf of any Contributor. You must make it absolutely clear that any
|
| 449 |
+
such warranty, support, indemnity, or liability obligation is offered by
|
| 450 |
+
You alone, and You hereby agree to indemnify every Contributor for any
|
| 451 |
+
liability incurred by such Contributor as a result of warranty, support,
|
| 452 |
+
indemnity or liability terms You offer. You may include additional
|
| 453 |
+
disclaimers of warranty and limitations of liability specific to any
|
| 454 |
+
jurisdiction.
|
| 455 |
+
|
| 456 |
+
4. Inability to Comply Due to Statute or Regulation
|
| 457 |
+
---------------------------------------------------
|
| 458 |
+
|
| 459 |
+
If it is impossible for You to comply with any of the terms of this
|
| 460 |
+
License with respect to some or all of the Covered Software due to
|
| 461 |
+
statute, judicial order, or regulation then You must: (a) comply with
|
| 462 |
+
the terms of this License to the maximum extent possible; and (b)
|
| 463 |
+
describe the limitations and the code they affect. Such description must
|
| 464 |
+
be placed in a text file included with all distributions of the Covered
|
| 465 |
+
Software under this License. Except to the extent prohibited by statute
|
| 466 |
+
or regulation, such description must be sufficiently detailed for a
|
| 467 |
+
recipient of ordinary skill to be able to understand it.
|
| 468 |
+
|
| 469 |
+
5. Termination
|
| 470 |
+
--------------
|
| 471 |
+
|
| 472 |
+
5.1. The rights granted under this License will terminate automatically
|
| 473 |
+
if You fail to comply with any of its terms. However, if You become
|
| 474 |
+
compliant, then the rights granted under this License from a particular
|
| 475 |
+
Contributor are reinstated (a) provisionally, unless and until such
|
| 476 |
+
Contributor explicitly and finally terminates Your grants, and (b) on an
|
| 477 |
+
ongoing basis, if such Contributor fails to notify You of the
|
| 478 |
+
non-compliance by some reasonable means prior to 60 days after You have
|
| 479 |
+
come back into compliance. Moreover, Your grants from a particular
|
| 480 |
+
Contributor are reinstated on an ongoing basis if such Contributor
|
| 481 |
+
notifies You of the non-compliance by some reasonable means, this is the
|
| 482 |
+
first time You have received notice of non-compliance with this License
|
| 483 |
+
from such Contributor, and You become compliant prior to 30 days after
|
| 484 |
+
Your receipt of the notice.
|
| 485 |
+
|
| 486 |
+
5.2. If You initiate litigation against any entity by asserting a patent
|
| 487 |
+
infringement claim (excluding declaratory judgment actions,
|
| 488 |
+
counter-claims, and cross-claims) alleging that a Contributor Version
|
| 489 |
+
directly or indirectly infringes any patent, then the rights granted to
|
| 490 |
+
You by any and all Contributors for the Covered Software under Section
|
| 491 |
+
2.1 of this License shall terminate.
|
| 492 |
+
|
| 493 |
+
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
|
| 494 |
+
end user license agreements (excluding distributors and resellers) which
|
| 495 |
+
have been validly granted by You or Your distributors under this License
|
| 496 |
+
prior to termination shall survive termination.
|
| 497 |
+
|
| 498 |
+
************************************************************************
|
| 499 |
+
* *
|
| 500 |
+
* 6. Disclaimer of Warranty *
|
| 501 |
+
* ------------------------- *
|
| 502 |
+
* *
|
| 503 |
+
* Covered Software is provided under this License on an "as is" *
|
| 504 |
+
* basis, without warranty of any kind, either expressed, implied, or *
|
| 505 |
+
* statutory, including, without limitation, warranties that the *
|
| 506 |
+
* Covered Software is free of defects, merchantable, fit for a *
|
| 507 |
+
* particular purpose or non-infringing. The entire risk as to the *
|
| 508 |
+
* quality and performance of the Covered Software is with You. *
|
| 509 |
+
* Should any Covered Software prove defective in any respect, You *
|
| 510 |
+
* (not any Contributor) assume the cost of any necessary servicing, *
|
| 511 |
+
* repair, or correction. This disclaimer of warranty constitutes an *
|
| 512 |
+
* essential part of this License. No use of any Covered Software is *
|
| 513 |
+
* authorized under this License except under this disclaimer. *
|
| 514 |
+
* *
|
| 515 |
+
************************************************************************
|
| 516 |
+
|
| 517 |
+
************************************************************************
|
| 518 |
+
* *
|
| 519 |
+
* 7. Limitation of Liability *
|
| 520 |
+
* -------------------------- *
|
| 521 |
+
* *
|
| 522 |
+
* Under no circumstances and under no legal theory, whether tort *
|
| 523 |
+
* (including negligence), contract, or otherwise, shall any *
|
| 524 |
+
* Contributor, or anyone who distributes Covered Software as *
|
| 525 |
+
* permitted above, be liable to You for any direct, indirect, *
|
| 526 |
+
* special, incidental, or consequential damages of any character *
|
| 527 |
+
* including, without limitation, damages for lost profits, loss of *
|
| 528 |
+
* goodwill, work stoppage, computer failure or malfunction, or any *
|
| 529 |
+
* and all other commercial damages or losses, even if such party *
|
| 530 |
+
* shall have been informed of the possibility of such damages. This *
|
| 531 |
+
* limitation of liability shall not apply to liability for death or *
|
| 532 |
+
* personal injury resulting from such party's negligence to the *
|
| 533 |
+
* extent applicable law prohibits such limitation. Some *
|
| 534 |
+
* jurisdictions do not allow the exclusion or limitation of *
|
| 535 |
+
* incidental or consequential damages, so this exclusion and *
|
| 536 |
+
* limitation may not apply to You. *
|
| 537 |
+
* *
|
| 538 |
+
************************************************************************
|
| 539 |
+
|
| 540 |
+
8. Litigation
|
| 541 |
+
-------------
|
| 542 |
+
|
| 543 |
+
Any litigation relating to this License may be brought only in the
|
| 544 |
+
courts of a jurisdiction where the defendant maintains its principal
|
| 545 |
+
place of business and such litigation shall be governed by laws of that
|
| 546 |
+
jurisdiction, without reference to its conflict-of-law provisions.
|
| 547 |
+
Nothing in this Section shall prevent a party's ability to bring
|
| 548 |
+
cross-claims or counter-claims.
|
| 549 |
+
|
| 550 |
+
9. Miscellaneous
|
| 551 |
+
----------------
|
| 552 |
+
|
| 553 |
+
This License represents the complete agreement concerning the subject
|
| 554 |
+
matter hereof. If any provision of this License is held to be
|
| 555 |
+
unenforceable, such provision shall be reformed only to the extent
|
| 556 |
+
necessary to make it enforceable. Any law or regulation which provides
|
| 557 |
+
that the language of a contract shall be construed against the drafter
|
| 558 |
+
shall not be used to construe this License against a Contributor.
|
| 559 |
+
|
| 560 |
+
10. Versions of the License
|
| 561 |
+
---------------------------
|
| 562 |
+
|
| 563 |
+
10.1. New Versions
|
| 564 |
+
|
| 565 |
+
Mozilla Foundation is the license steward. Except as provided in Section
|
| 566 |
+
10.3, no one other than the license steward has the right to modify or
|
| 567 |
+
publish new versions of this License. Each version will be given a
|
| 568 |
+
distinguishing version number.
|
| 569 |
+
|
| 570 |
+
10.2. Effect of New Versions
|
| 571 |
+
|
| 572 |
+
You may distribute the Covered Software under the terms of the version
|
| 573 |
+
of the License under which You originally received the Covered Software,
|
| 574 |
+
or under the terms of any subsequent version published by the license
|
| 575 |
+
steward.
|
| 576 |
+
|
| 577 |
+
10.3. Modified Versions
|
| 578 |
+
|
| 579 |
+
If you create software not governed by this License, and you want to
|
| 580 |
+
create a new license for such software, you may create and use a
|
| 581 |
+
modified version of this License if you rename the license and remove
|
| 582 |
+
any references to the name of the license steward (except to note that
|
| 583 |
+
such modified license differs from this License).
|
| 584 |
+
|
| 585 |
+
10.4. Distributing Source Code Form that is Incompatible With Secondary
|
| 586 |
+
Licenses
|
| 587 |
+
|
| 588 |
+
If You choose to distribute Source Code Form that is Incompatible With
|
| 589 |
+
Secondary Licenses under the terms of this version of the License, the
|
| 590 |
+
notice described in Exhibit B of this License must be attached.
|
| 591 |
+
|
| 592 |
+
Exhibit A - Source Code Form License Notice
|
| 593 |
+
-------------------------------------------
|
| 594 |
+
|
| 595 |
+
This Source Code Form is subject to the terms of the Mozilla Public
|
| 596 |
+
License, v. 2.0. If a copy of the MPL was not distributed with this
|
| 597 |
+
file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
| 598 |
+
|
| 599 |
+
If it is not possible or desirable to put the notice in a particular
|
| 600 |
+
file, then You may include the notice in a location (such as a LICENSE
|
| 601 |
+
file in a relevant directory) where a recipient would be likely to look
|
| 602 |
+
for such a notice.
|
| 603 |
+
|
| 604 |
+
You may add additional accurate notices of copyright ownership.
|
| 605 |
+
|
| 606 |
+
Exhibit B - "Incompatible With Secondary Licenses" Notice
|
| 607 |
+
---------------------------------------------------------
|
| 608 |
+
|
| 609 |
+
This Source Code Form is "Incompatible With Secondary Licenses", as
|
| 610 |
+
defined by the Mozilla Public License, v. 2.0.
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/grpcio-1.71.0.dist-info/METADATA
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: grpcio
|
| 3 |
+
Version: 1.71.0
|
| 4 |
+
Summary: HTTP/2-based RPC framework
|
| 5 |
+
Home-page: https://grpc.io
|
| 6 |
+
Author: The gRPC Authors
|
| 7 |
+
Author-email: grpc-io@googlegroups.com
|
| 8 |
+
License: Apache License 2.0
|
| 9 |
+
Project-URL: Source Code, https://github.com/grpc/grpc
|
| 10 |
+
Project-URL: Bug Tracker, https://github.com/grpc/grpc/issues
|
| 11 |
+
Project-URL: Documentation, https://grpc.github.io/grpc/python
|
| 12 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 13 |
+
Classifier: Programming Language :: Python
|
| 14 |
+
Classifier: Programming Language :: Python :: 3
|
| 15 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 16 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 17 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 18 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 19 |
+
Classifier: Programming Language :: Python :: 3.13
|
| 20 |
+
Classifier: License :: OSI Approved :: Apache Software License
|
| 21 |
+
Requires-Python: >=3.9
|
| 22 |
+
Description-Content-Type: text/x-rst
|
| 23 |
+
License-File: LICENSE
|
| 24 |
+
Provides-Extra: protobuf
|
| 25 |
+
Requires-Dist: grpcio-tools >=1.71.0 ; extra == 'protobuf'
|
| 26 |
+
|
| 27 |
+
gRPC Python
|
| 28 |
+
===========
|
| 29 |
+
|
| 30 |
+
|compat_check_pypi|
|
| 31 |
+
|
| 32 |
+
Package for gRPC Python.
|
| 33 |
+
|
| 34 |
+
.. |compat_check_pypi| image:: https://python-compatibility-tools.appspot.com/one_badge_image?package=grpcio
|
| 35 |
+
:target: https://python-compatibility-tools.appspot.com/one_badge_target?package=grpcio
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
Installation
|
| 39 |
+
------------
|
| 40 |
+
|
| 41 |
+
gRPC Python is available for Linux, macOS, and Windows.
|
| 42 |
+
|
| 43 |
+
Installing From PyPI
|
| 44 |
+
~~~~~~~~~~~~~~~~~~~~
|
| 45 |
+
|
| 46 |
+
If you are installing locally...
|
| 47 |
+
|
| 48 |
+
::
|
| 49 |
+
|
| 50 |
+
$ pip install grpcio
|
| 51 |
+
|
| 52 |
+
Else system wide (on Ubuntu)...
|
| 53 |
+
|
| 54 |
+
::
|
| 55 |
+
|
| 56 |
+
$ sudo pip install grpcio
|
| 57 |
+
|
| 58 |
+
If you're on Windows make sure that you installed the :code:`pip.exe` component
|
| 59 |
+
when you installed Python (if not go back and install it!) then invoke:
|
| 60 |
+
|
| 61 |
+
::
|
| 62 |
+
|
| 63 |
+
$ pip.exe install grpcio
|
| 64 |
+
|
| 65 |
+
Windows users may need to invoke :code:`pip.exe` from a command line ran as
|
| 66 |
+
administrator.
|
| 67 |
+
|
| 68 |
+
n.b. On Windows and on Mac OS X one *must* have a recent release of :code:`pip`
|
| 69 |
+
to retrieve the proper wheel from PyPI. Be sure to upgrade to the latest
|
| 70 |
+
version!
|
| 71 |
+
|
| 72 |
+
Installing From Source
|
| 73 |
+
~~~~~~~~~~~~~~~~~~~~~~
|
| 74 |
+
|
| 75 |
+
Building from source requires that you have the Python headers (usually a
|
| 76 |
+
package named :code:`python-dev`).
|
| 77 |
+
|
| 78 |
+
::
|
| 79 |
+
|
| 80 |
+
$ export REPO_ROOT=grpc # REPO_ROOT can be any directory of your choice
|
| 81 |
+
$ git clone -b RELEASE_TAG_HERE https://github.com/grpc/grpc $REPO_ROOT
|
| 82 |
+
$ cd $REPO_ROOT
|
| 83 |
+
$ git submodule update --init
|
| 84 |
+
|
| 85 |
+
# To include systemd socket-activation feature in the build,
|
| 86 |
+
# first install the `libsystemd-dev` package, then :
|
| 87 |
+
$ export GRPC_PYTHON_BUILD_WITH_SYSTEMD=1
|
| 88 |
+
|
| 89 |
+
# For the next two commands do `sudo pip install` if you get permission-denied errors
|
| 90 |
+
$ pip install -r requirements.txt
|
| 91 |
+
$ GRPC_PYTHON_BUILD_WITH_CYTHON=1 pip install .
|
| 92 |
+
|
| 93 |
+
You cannot currently install Python from source on Windows. Things might work
|
| 94 |
+
out for you in MSYS2 (follow the Linux instructions), but it isn't officially
|
| 95 |
+
supported at the moment.
|
| 96 |
+
|
| 97 |
+
Troubleshooting
|
| 98 |
+
~~~~~~~~~~~~~~~
|
| 99 |
+
|
| 100 |
+
Help, I ...
|
| 101 |
+
|
| 102 |
+
* **... see the following error on some platforms**
|
| 103 |
+
|
| 104 |
+
::
|
| 105 |
+
|
| 106 |
+
/tmp/pip-build-U8pSsr/cython/Cython/Plex/Scanners.c:4:20: fatal error: Python.h: No such file or directory
|
| 107 |
+
#include "Python.h"
|
| 108 |
+
^
|
| 109 |
+
compilation terminated.
|
| 110 |
+
|
| 111 |
+
You can fix it by installing `python-dev` package. i.e
|
| 112 |
+
|
| 113 |
+
::
|
| 114 |
+
|
| 115 |
+
sudo apt-get install python-dev
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
Versioning
|
| 119 |
+
~~~~~~~~~~
|
| 120 |
+
|
| 121 |
+
gRPC Python is developed in a monorepo shared with implementations of gRPC in
|
| 122 |
+
other programming languages. While the minor versions are released in
|
| 123 |
+
lock-step with other languages in the repo (e.g. 1.63.0 is guaranteed to exist
|
| 124 |
+
for all languages), patch versions may be specific to only a single
|
| 125 |
+
language. For example, if 1.63.1 is a C++-specific patch, 1.63.1 may not be
|
| 126 |
+
uploaded to PyPi. As a result, it is __not__ a good assumption that the latest
|
| 127 |
+
patch for a given minor version on Github is also the latest patch for that
|
| 128 |
+
same minor version on PyPi.
|
| 129 |
+
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/grpcio-1.71.0.dist-info/RECORD
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
grpc/__init__.py,sha256=RUN1ZIKXNgRqOEaUbqKJP2sDpn7C1wCcompDQsS_UkA,82333
|
| 2 |
+
grpc/__pycache__/__init__.cpython-310.pyc,,
|
| 3 |
+
grpc/__pycache__/_auth.cpython-310.pyc,,
|
| 4 |
+
grpc/__pycache__/_channel.cpython-310.pyc,,
|
| 5 |
+
grpc/__pycache__/_common.cpython-310.pyc,,
|
| 6 |
+
grpc/__pycache__/_compression.cpython-310.pyc,,
|
| 7 |
+
grpc/__pycache__/_grpcio_metadata.cpython-310.pyc,,
|
| 8 |
+
grpc/__pycache__/_interceptor.cpython-310.pyc,,
|
| 9 |
+
grpc/__pycache__/_observability.cpython-310.pyc,,
|
| 10 |
+
grpc/__pycache__/_plugin_wrapping.cpython-310.pyc,,
|
| 11 |
+
grpc/__pycache__/_runtime_protos.cpython-310.pyc,,
|
| 12 |
+
grpc/__pycache__/_server.cpython-310.pyc,,
|
| 13 |
+
grpc/__pycache__/_simple_stubs.cpython-310.pyc,,
|
| 14 |
+
grpc/__pycache__/_typing.cpython-310.pyc,,
|
| 15 |
+
grpc/__pycache__/_utilities.cpython-310.pyc,,
|
| 16 |
+
grpc/_auth.py,sha256=UNjlkWE4rTsTZlVzJRpgupTJgEJYop-fpTkgJmvien4,2635
|
| 17 |
+
grpc/_channel.py,sha256=sPVbiPQ5BuDx1rgBPlajXi9XH958XCeGGsKXie08Ttw,81346
|
| 18 |
+
grpc/_common.py,sha256=PQdgX83qEG3BUCfXlQVnrv9t5yFD9wjHhrckvX_UkfA,6784
|
| 19 |
+
grpc/_compression.py,sha256=0P9yfNIn33BmcQmOdnMUTogbLIPR0eT_Lmmnm3llcFg,1983
|
| 20 |
+
grpc/_cython/__init__.py,sha256=w3kqSAyaZgP-W0890xR4L4WeBPGrtsQCQJe0FUFR0K0,577
|
| 21 |
+
grpc/_cython/__pycache__/__init__.cpython-310.pyc,,
|
| 22 |
+
grpc/_cython/_credentials/roots.pem,sha256=lhQzRMSuEJWIElssQdXa9lSl-vxuI_rDf3uj0p2n53Y,264440
|
| 23 |
+
grpc/_cython/_cygrpc/__init__.py,sha256=w3kqSAyaZgP-W0890xR4L4WeBPGrtsQCQJe0FUFR0K0,577
|
| 24 |
+
grpc/_cython/_cygrpc/__pycache__/__init__.cpython-310.pyc,,
|
| 25 |
+
grpc/_cython/cygrpc.cpython-310-x86_64-linux-gnu.so,sha256=fHECq94cyFAodZrdm3ZojlnPWRWnwsnlkGHNC-4tYlQ,13677208
|
| 26 |
+
grpc/_grpcio_metadata.py,sha256=hosqFnNPgE2xb-JI6HqOw66DCXylY7jiEVcaGLejuFQ,26
|
| 27 |
+
grpc/_interceptor.py,sha256=5qiM2lQWabbBd8IeR6VHzTJKmhz7R2_Tf3m5Xp3C6Y0,25862
|
| 28 |
+
grpc/_observability.py,sha256=JVe0NZ1uGNwSAt-oo9_KS6k2eYlcA5P1h1id8BsJImA,10417
|
| 29 |
+
grpc/_plugin_wrapping.py,sha256=sbJLmw0reHc0aBaSQuPuFr9hHnzllEVoDYgrkre7bkk,4382
|
| 30 |
+
grpc/_runtime_protos.py,sha256=2JtQGu0T8NlZn_GfzH_lpmTwVJPE1rkB5X9jcE7UXos,5805
|
| 31 |
+
grpc/_server.py,sha256=VXOqgBjwXURqLTnIA1AaxO3VdxkuAhQSLATgsTkMP8E,50885
|
| 32 |
+
grpc/_simple_stubs.py,sha256=2hK8LsbRdWhG8kT5DR1p0gLNr5Q818XYox8Sv__w0ws,24610
|
| 33 |
+
grpc/_typing.py,sha256=-wdtuGJpvR10J6r4LSUYTDeRCWpMsQTo67HwowaVOCk,2758
|
| 34 |
+
grpc/_utilities.py,sha256=befrEhsHGPfSuVPEMTNEVaQCPwTD8tKV8yIrge7I4Vo,7043
|
| 35 |
+
grpc/aio/__init__.py,sha256=80Ho1FolpueFqIIvyl7d5b9FJgvw5ilZgHcXxN1NmUs,3160
|
| 36 |
+
grpc/aio/__pycache__/__init__.cpython-310.pyc,,
|
| 37 |
+
grpc/aio/__pycache__/_base_call.cpython-310.pyc,,
|
| 38 |
+
grpc/aio/__pycache__/_base_channel.cpython-310.pyc,,
|
| 39 |
+
grpc/aio/__pycache__/_base_server.cpython-310.pyc,,
|
| 40 |
+
grpc/aio/__pycache__/_call.cpython-310.pyc,,
|
| 41 |
+
grpc/aio/__pycache__/_channel.cpython-310.pyc,,
|
| 42 |
+
grpc/aio/__pycache__/_interceptor.cpython-310.pyc,,
|
| 43 |
+
grpc/aio/__pycache__/_metadata.cpython-310.pyc,,
|
| 44 |
+
grpc/aio/__pycache__/_server.cpython-310.pyc,,
|
| 45 |
+
grpc/aio/__pycache__/_typing.cpython-310.pyc,,
|
| 46 |
+
grpc/aio/__pycache__/_utils.cpython-310.pyc,,
|
| 47 |
+
grpc/aio/_base_call.py,sha256=THFXG0o0ISn40Jrw0sNlcFzfobD8UrjgjddMWtg8ZJA,7560
|
| 48 |
+
grpc/aio/_base_channel.py,sha256=wVwElJ73bfTTkYC9J6NZjCq-q2MEvTOLSbOseNPzr8k,13893
|
| 49 |
+
grpc/aio/_base_server.py,sha256=_bBNGAy6feBwyE4aIuwjKVdCy2wQYpiBOtFg2rXaZ-8,12560
|
| 50 |
+
grpc/aio/_call.py,sha256=N9rbvu8-0B_rV7RthtSlp-nZakDoshHgptOozEuVqtU,25356
|
| 51 |
+
grpc/aio/_channel.py,sha256=J1opppQ_zkyhYTqh0cxo85tX14wdki3qccp1BFSUzd0,22099
|
| 52 |
+
grpc/aio/_interceptor.py,sha256=mXz0ivpOBkbBpMQyxInHmFCqVm1HrYX0GRq4IZwWCFQ,41345
|
| 53 |
+
grpc/aio/_metadata.py,sha256=Bhxf6d8r90FQe-HPEUEOrowfAb-DA_6OfaQejcYJjog,5009
|
| 54 |
+
grpc/aio/_server.py,sha256=_7v2-W92W1Ag1LINrxCED-647nAquGFUKbgGnKN06_s,8931
|
| 55 |
+
grpc/aio/_typing.py,sha256=xMlG33vn_UkdkRv39Udx421BWVgt5RHCu8gJXAQ18do,1378
|
| 56 |
+
grpc/aio/_utils.py,sha256=Bh5-lQO2xszdZeTFAWFfFhhaKy20ll4kucD6f_YZTlg,821
|
| 57 |
+
grpc/beta/__init__.py,sha256=w3kqSAyaZgP-W0890xR4L4WeBPGrtsQCQJe0FUFR0K0,577
|
| 58 |
+
grpc/beta/__pycache__/__init__.cpython-310.pyc,,
|
| 59 |
+
grpc/beta/__pycache__/_client_adaptations.cpython-310.pyc,,
|
| 60 |
+
grpc/beta/__pycache__/_metadata.cpython-310.pyc,,
|
| 61 |
+
grpc/beta/__pycache__/_server_adaptations.cpython-310.pyc,,
|
| 62 |
+
grpc/beta/__pycache__/implementations.cpython-310.pyc,,
|
| 63 |
+
grpc/beta/__pycache__/interfaces.cpython-310.pyc,,
|
| 64 |
+
grpc/beta/__pycache__/utilities.cpython-310.pyc,,
|
| 65 |
+
grpc/beta/_client_adaptations.py,sha256=q4rwPyIv9ratLPhC3xmq9M1_fuAVTW71slsBChHP4T0,27023
|
| 66 |
+
grpc/beta/_metadata.py,sha256=pC2-RLU3nHhTXTk_RMyoA3WwmRhm_woLbB-13lKO-aY,1638
|
| 67 |
+
grpc/beta/_server_adaptations.py,sha256=-f_hSbqBZqNgaz55B1zbE7TK-kwc1oHnngXHMDU-kKk,14611
|
| 68 |
+
grpc/beta/implementations.py,sha256=0yITggzqKKo8Lb8ZwPx2zIu414mIQxfan3jmenchIHk,12058
|
| 69 |
+
grpc/beta/interfaces.py,sha256=mtQnvm7Bg2u5MQZf2_nmxUxKFCANDw7VUOHHvE7cBhM,6082
|
| 70 |
+
grpc/beta/utilities.py,sha256=h_2yPH_5sCsFWKr9bBfa-JXj0CnSFaSa-_snKqtUUTo,5005
|
| 71 |
+
grpc/experimental/__init__.py,sha256=uUcQbntsX9ROKozWD6eTVnAYJ6Dea7VUSpGGv1L5Hz0,4103
|
| 72 |
+
grpc/experimental/__pycache__/__init__.cpython-310.pyc,,
|
| 73 |
+
grpc/experimental/__pycache__/gevent.cpython-310.pyc,,
|
| 74 |
+
grpc/experimental/__pycache__/session_cache.cpython-310.pyc,,
|
| 75 |
+
grpc/experimental/aio/__init__.py,sha256=bIyDdGBbNHi5F_kHvwByONjc4M_74thy53YmBDr1ZPo,660
|
| 76 |
+
grpc/experimental/aio/__pycache__/__init__.cpython-310.pyc,,
|
| 77 |
+
grpc/experimental/gevent.py,sha256=_YAk9aH2PCZCpaCnW9uGY77W21342dEWm8wOVApTx88,973
|
| 78 |
+
grpc/experimental/session_cache.py,sha256=wAauvDzxvTC6-p3jMbPnTc7y74nhDKSRjb0ktfMPCm8,1533
|
| 79 |
+
grpc/framework/__init__.py,sha256=w3kqSAyaZgP-W0890xR4L4WeBPGrtsQCQJe0FUFR0K0,577
|
| 80 |
+
grpc/framework/__pycache__/__init__.cpython-310.pyc,,
|
| 81 |
+
grpc/framework/common/__init__.py,sha256=w3kqSAyaZgP-W0890xR4L4WeBPGrtsQCQJe0FUFR0K0,577
|
| 82 |
+
grpc/framework/common/__pycache__/__init__.cpython-310.pyc,,
|
| 83 |
+
grpc/framework/common/__pycache__/cardinality.cpython-310.pyc,,
|
| 84 |
+
grpc/framework/common/__pycache__/style.cpython-310.pyc,,
|
| 85 |
+
grpc/framework/common/cardinality.py,sha256=ygWtrjjsk-SOPLHaey6-7ekD23Qhu3k4QUNmYI1ScVU,988
|
| 86 |
+
grpc/framework/common/style.py,sha256=X9wN-af8T7WWhFfdtmFkdqPN5PbpI8FJsKGdmPivaAU,824
|
| 87 |
+
grpc/framework/foundation/__init__.py,sha256=w3kqSAyaZgP-W0890xR4L4WeBPGrtsQCQJe0FUFR0K0,577
|
| 88 |
+
grpc/framework/foundation/__pycache__/__init__.cpython-310.pyc,,
|
| 89 |
+
grpc/framework/foundation/__pycache__/abandonment.cpython-310.pyc,,
|
| 90 |
+
grpc/framework/foundation/__pycache__/callable_util.cpython-310.pyc,,
|
| 91 |
+
grpc/framework/foundation/__pycache__/future.cpython-310.pyc,,
|
| 92 |
+
grpc/framework/foundation/__pycache__/logging_pool.cpython-310.pyc,,
|
| 93 |
+
grpc/framework/foundation/__pycache__/stream.cpython-310.pyc,,
|
| 94 |
+
grpc/framework/foundation/__pycache__/stream_util.cpython-310.pyc,,
|
| 95 |
+
grpc/framework/foundation/abandonment.py,sha256=AF4Y734bGIVaX0CL8W9XaaPYuPUacgciBEZ9Wb4fpGA,878
|
| 96 |
+
grpc/framework/foundation/callable_util.py,sha256=_uvikTKQnBPQv0Bfo59FkzwvclWDLxssFqHhdn-qqEY,3151
|
| 97 |
+
grpc/framework/foundation/future.py,sha256=djoeNq-Wd5_tkzQwF_ZeL1nYAWKJ5rwzelGoTPdR1ME,8373
|
| 98 |
+
grpc/framework/foundation/logging_pool.py,sha256=j8PFaWarQrnDebsDxmcJMzfFsJH8xGY2A4z6Plo_Q9c,2248
|
| 99 |
+
grpc/framework/foundation/stream.py,sha256=F6XQUJC8rT-ciybPtHP7PTMlMXNoVXyzmnRwYKos0Ms,1377
|
| 100 |
+
grpc/framework/foundation/stream_util.py,sha256=CUx6geCSB9zwgdlE0XHXcPSbuLeuNfClsZFyjsQVMB0,4772
|
| 101 |
+
grpc/framework/interfaces/__init__.py,sha256=w3kqSAyaZgP-W0890xR4L4WeBPGrtsQCQJe0FUFR0K0,577
|
| 102 |
+
grpc/framework/interfaces/__pycache__/__init__.cpython-310.pyc,,
|
| 103 |
+
grpc/framework/interfaces/base/__init__.py,sha256=w3kqSAyaZgP-W0890xR4L4WeBPGrtsQCQJe0FUFR0K0,577
|
| 104 |
+
grpc/framework/interfaces/base/__pycache__/__init__.cpython-310.pyc,,
|
| 105 |
+
grpc/framework/interfaces/base/__pycache__/base.cpython-310.pyc,,
|
| 106 |
+
grpc/framework/interfaces/base/__pycache__/utilities.cpython-310.pyc,,
|
| 107 |
+
grpc/framework/interfaces/base/base.py,sha256=aDW-nCVA4brDc3ZRf_b1-MOZ4Jv7ss_F4V6jIeJugTc,12234
|
| 108 |
+
grpc/framework/interfaces/base/utilities.py,sha256=buvlDv3ulHgsF4ej6DzQecoPGXwfMV2eyZ70lPMJfNA,2362
|
| 109 |
+
grpc/framework/interfaces/face/__init__.py,sha256=w3kqSAyaZgP-W0890xR4L4WeBPGrtsQCQJe0FUFR0K0,577
|
| 110 |
+
grpc/framework/interfaces/face/__pycache__/__init__.cpython-310.pyc,,
|
| 111 |
+
grpc/framework/interfaces/face/__pycache__/face.cpython-310.pyc,,
|
| 112 |
+
grpc/framework/interfaces/face/__pycache__/utilities.cpython-310.pyc,,
|
| 113 |
+
grpc/framework/interfaces/face/face.py,sha256=OGyApdjsZ8BF9nNq-2sd6WXd9bPpbvj4jvXrGdJu-nY,39700
|
| 114 |
+
grpc/framework/interfaces/face/utilities.py,sha256=jRmAmV0hXPXcN5a6Vg7OlBbcltGB6B10bUfoqqHJmUc,6777
|
| 115 |
+
grpcio-1.71.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 116 |
+
grpcio-1.71.0.dist-info/LICENSE,sha256=WQGY4_MF8sNH_eZNY3xlSSu-9VTbbINk4UnNN143l-4,29687
|
| 117 |
+
grpcio-1.71.0.dist-info/METADATA,sha256=E-somUcBTIs_ie0p-F7XMRZ29pybhbm-DeAQKCsjpdA,3838
|
| 118 |
+
grpcio-1.71.0.dist-info/RECORD,,
|
| 119 |
+
grpcio-1.71.0.dist-info/WHEEL,sha256=CzQQWV-lNyM92gr3iaBk8dvO35YDHRxgzkZ-dxumUIM,152
|
| 120 |
+
grpcio-1.71.0.dist-info/top_level.txt,sha256=eEd2Jq_aVQFp38bWW8Pfwjz_5iibqeOFT-2zXlPAq_8,5
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/grpcio-1.71.0.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: bdist_wheel (0.43.0)
|
| 3 |
+
Root-Is-Purelib: false
|
| 4 |
+
Tag: cp310-cp310-manylinux_2_17_x86_64
|
| 5 |
+
Tag: cp310-cp310-manylinux2014_x86_64
|
| 6 |
+
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/grpcio-1.71.0.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
grpc
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/_models.py
ADDED
|
@@ -0,0 +1,1277 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import codecs
|
| 4 |
+
import datetime
|
| 5 |
+
import email.message
|
| 6 |
+
import json as jsonlib
|
| 7 |
+
import re
|
| 8 |
+
import typing
|
| 9 |
+
import urllib.request
|
| 10 |
+
from collections.abc import Mapping
|
| 11 |
+
from http.cookiejar import Cookie, CookieJar
|
| 12 |
+
|
| 13 |
+
from ._content import ByteStream, UnattachedStream, encode_request, encode_response
|
| 14 |
+
from ._decoders import (
|
| 15 |
+
SUPPORTED_DECODERS,
|
| 16 |
+
ByteChunker,
|
| 17 |
+
ContentDecoder,
|
| 18 |
+
IdentityDecoder,
|
| 19 |
+
LineDecoder,
|
| 20 |
+
MultiDecoder,
|
| 21 |
+
TextChunker,
|
| 22 |
+
TextDecoder,
|
| 23 |
+
)
|
| 24 |
+
from ._exceptions import (
|
| 25 |
+
CookieConflict,
|
| 26 |
+
HTTPStatusError,
|
| 27 |
+
RequestNotRead,
|
| 28 |
+
ResponseNotRead,
|
| 29 |
+
StreamClosed,
|
| 30 |
+
StreamConsumed,
|
| 31 |
+
request_context,
|
| 32 |
+
)
|
| 33 |
+
from ._multipart import get_multipart_boundary_from_content_type
|
| 34 |
+
from ._status_codes import codes
|
| 35 |
+
from ._types import (
|
| 36 |
+
AsyncByteStream,
|
| 37 |
+
CookieTypes,
|
| 38 |
+
HeaderTypes,
|
| 39 |
+
QueryParamTypes,
|
| 40 |
+
RequestContent,
|
| 41 |
+
RequestData,
|
| 42 |
+
RequestExtensions,
|
| 43 |
+
RequestFiles,
|
| 44 |
+
ResponseContent,
|
| 45 |
+
ResponseExtensions,
|
| 46 |
+
SyncByteStream,
|
| 47 |
+
)
|
| 48 |
+
from ._urls import URL
|
| 49 |
+
from ._utils import to_bytes_or_str, to_str
|
| 50 |
+
|
| 51 |
+
__all__ = ["Cookies", "Headers", "Request", "Response"]
|
| 52 |
+
|
| 53 |
+
SENSITIVE_HEADERS = {"authorization", "proxy-authorization"}
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def _is_known_encoding(encoding: str) -> bool:
|
| 57 |
+
"""
|
| 58 |
+
Return `True` if `encoding` is a known codec.
|
| 59 |
+
"""
|
| 60 |
+
try:
|
| 61 |
+
codecs.lookup(encoding)
|
| 62 |
+
except LookupError:
|
| 63 |
+
return False
|
| 64 |
+
return True
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def _normalize_header_key(key: str | bytes, encoding: str | None = None) -> bytes:
|
| 68 |
+
"""
|
| 69 |
+
Coerce str/bytes into a strictly byte-wise HTTP header key.
|
| 70 |
+
"""
|
| 71 |
+
return key if isinstance(key, bytes) else key.encode(encoding or "ascii")
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
def _normalize_header_value(value: str | bytes, encoding: str | None = None) -> bytes:
|
| 75 |
+
"""
|
| 76 |
+
Coerce str/bytes into a strictly byte-wise HTTP header value.
|
| 77 |
+
"""
|
| 78 |
+
if isinstance(value, bytes):
|
| 79 |
+
return value
|
| 80 |
+
if not isinstance(value, str):
|
| 81 |
+
raise TypeError(f"Header value must be str or bytes, not {type(value)}")
|
| 82 |
+
return value.encode(encoding or "ascii")
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def _parse_content_type_charset(content_type: str) -> str | None:
|
| 86 |
+
# We used to use `cgi.parse_header()` here, but `cgi` became a dead battery.
|
| 87 |
+
# See: https://peps.python.org/pep-0594/#cgi
|
| 88 |
+
msg = email.message.Message()
|
| 89 |
+
msg["content-type"] = content_type
|
| 90 |
+
return msg.get_content_charset(failobj=None)
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def _parse_header_links(value: str) -> list[dict[str, str]]:
|
| 94 |
+
"""
|
| 95 |
+
Returns a list of parsed link headers, for more info see:
|
| 96 |
+
https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Link
|
| 97 |
+
The generic syntax of those is:
|
| 98 |
+
Link: < uri-reference >; param1=value1; param2="value2"
|
| 99 |
+
So for instance:
|
| 100 |
+
Link; '<http:/.../front.jpeg>; type="image/jpeg",<http://.../back.jpeg>;'
|
| 101 |
+
would return
|
| 102 |
+
[
|
| 103 |
+
{"url": "http:/.../front.jpeg", "type": "image/jpeg"},
|
| 104 |
+
{"url": "http://.../back.jpeg"},
|
| 105 |
+
]
|
| 106 |
+
:param value: HTTP Link entity-header field
|
| 107 |
+
:return: list of parsed link headers
|
| 108 |
+
"""
|
| 109 |
+
links: list[dict[str, str]] = []
|
| 110 |
+
replace_chars = " '\""
|
| 111 |
+
value = value.strip(replace_chars)
|
| 112 |
+
if not value:
|
| 113 |
+
return links
|
| 114 |
+
for val in re.split(", *<", value):
|
| 115 |
+
try:
|
| 116 |
+
url, params = val.split(";", 1)
|
| 117 |
+
except ValueError:
|
| 118 |
+
url, params = val, ""
|
| 119 |
+
link = {"url": url.strip("<> '\"")}
|
| 120 |
+
for param in params.split(";"):
|
| 121 |
+
try:
|
| 122 |
+
key, value = param.split("=")
|
| 123 |
+
except ValueError:
|
| 124 |
+
break
|
| 125 |
+
link[key.strip(replace_chars)] = value.strip(replace_chars)
|
| 126 |
+
links.append(link)
|
| 127 |
+
return links
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def _obfuscate_sensitive_headers(
|
| 131 |
+
items: typing.Iterable[tuple[typing.AnyStr, typing.AnyStr]],
|
| 132 |
+
) -> typing.Iterator[tuple[typing.AnyStr, typing.AnyStr]]:
|
| 133 |
+
for k, v in items:
|
| 134 |
+
if to_str(k.lower()) in SENSITIVE_HEADERS:
|
| 135 |
+
v = to_bytes_or_str("[secure]", match_type_of=v)
|
| 136 |
+
yield k, v
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
class Headers(typing.MutableMapping[str, str]):
|
| 140 |
+
"""
|
| 141 |
+
HTTP headers, as a case-insensitive multi-dict.
|
| 142 |
+
"""
|
| 143 |
+
|
| 144 |
+
def __init__(
|
| 145 |
+
self,
|
| 146 |
+
headers: HeaderTypes | None = None,
|
| 147 |
+
encoding: str | None = None,
|
| 148 |
+
) -> None:
|
| 149 |
+
self._list = [] # type: typing.List[typing.Tuple[bytes, bytes, bytes]]
|
| 150 |
+
|
| 151 |
+
if isinstance(headers, Headers):
|
| 152 |
+
self._list = list(headers._list)
|
| 153 |
+
elif isinstance(headers, Mapping):
|
| 154 |
+
for k, v in headers.items():
|
| 155 |
+
bytes_key = _normalize_header_key(k, encoding)
|
| 156 |
+
bytes_value = _normalize_header_value(v, encoding)
|
| 157 |
+
self._list.append((bytes_key, bytes_key.lower(), bytes_value))
|
| 158 |
+
elif headers is not None:
|
| 159 |
+
for k, v in headers:
|
| 160 |
+
bytes_key = _normalize_header_key(k, encoding)
|
| 161 |
+
bytes_value = _normalize_header_value(v, encoding)
|
| 162 |
+
self._list.append((bytes_key, bytes_key.lower(), bytes_value))
|
| 163 |
+
|
| 164 |
+
self._encoding = encoding
|
| 165 |
+
|
| 166 |
+
@property
|
| 167 |
+
def encoding(self) -> str:
|
| 168 |
+
"""
|
| 169 |
+
Header encoding is mandated as ascii, but we allow fallbacks to utf-8
|
| 170 |
+
or iso-8859-1.
|
| 171 |
+
"""
|
| 172 |
+
if self._encoding is None:
|
| 173 |
+
for encoding in ["ascii", "utf-8"]:
|
| 174 |
+
for key, value in self.raw:
|
| 175 |
+
try:
|
| 176 |
+
key.decode(encoding)
|
| 177 |
+
value.decode(encoding)
|
| 178 |
+
except UnicodeDecodeError:
|
| 179 |
+
break
|
| 180 |
+
else:
|
| 181 |
+
# The else block runs if 'break' did not occur, meaning
|
| 182 |
+
# all values fitted the encoding.
|
| 183 |
+
self._encoding = encoding
|
| 184 |
+
break
|
| 185 |
+
else:
|
| 186 |
+
# The ISO-8859-1 encoding covers all 256 code points in a byte,
|
| 187 |
+
# so will never raise decode errors.
|
| 188 |
+
self._encoding = "iso-8859-1"
|
| 189 |
+
return self._encoding
|
| 190 |
+
|
| 191 |
+
@encoding.setter
|
| 192 |
+
def encoding(self, value: str) -> None:
|
| 193 |
+
self._encoding = value
|
| 194 |
+
|
| 195 |
+
@property
|
| 196 |
+
def raw(self) -> list[tuple[bytes, bytes]]:
|
| 197 |
+
"""
|
| 198 |
+
Returns a list of the raw header items, as byte pairs.
|
| 199 |
+
"""
|
| 200 |
+
return [(raw_key, value) for raw_key, _, value in self._list]
|
| 201 |
+
|
| 202 |
+
def keys(self) -> typing.KeysView[str]:
|
| 203 |
+
return {key.decode(self.encoding): None for _, key, value in self._list}.keys()
|
| 204 |
+
|
| 205 |
+
def values(self) -> typing.ValuesView[str]:
|
| 206 |
+
values_dict: dict[str, str] = {}
|
| 207 |
+
for _, key, value in self._list:
|
| 208 |
+
str_key = key.decode(self.encoding)
|
| 209 |
+
str_value = value.decode(self.encoding)
|
| 210 |
+
if str_key in values_dict:
|
| 211 |
+
values_dict[str_key] += f", {str_value}"
|
| 212 |
+
else:
|
| 213 |
+
values_dict[str_key] = str_value
|
| 214 |
+
return values_dict.values()
|
| 215 |
+
|
| 216 |
+
def items(self) -> typing.ItemsView[str, str]:
|
| 217 |
+
"""
|
| 218 |
+
Return `(key, value)` items of headers. Concatenate headers
|
| 219 |
+
into a single comma separated value when a key occurs multiple times.
|
| 220 |
+
"""
|
| 221 |
+
values_dict: dict[str, str] = {}
|
| 222 |
+
for _, key, value in self._list:
|
| 223 |
+
str_key = key.decode(self.encoding)
|
| 224 |
+
str_value = value.decode(self.encoding)
|
| 225 |
+
if str_key in values_dict:
|
| 226 |
+
values_dict[str_key] += f", {str_value}"
|
| 227 |
+
else:
|
| 228 |
+
values_dict[str_key] = str_value
|
| 229 |
+
return values_dict.items()
|
| 230 |
+
|
| 231 |
+
def multi_items(self) -> list[tuple[str, str]]:
|
| 232 |
+
"""
|
| 233 |
+
Return a list of `(key, value)` pairs of headers. Allow multiple
|
| 234 |
+
occurrences of the same key without concatenating into a single
|
| 235 |
+
comma separated value.
|
| 236 |
+
"""
|
| 237 |
+
return [
|
| 238 |
+
(key.decode(self.encoding), value.decode(self.encoding))
|
| 239 |
+
for _, key, value in self._list
|
| 240 |
+
]
|
| 241 |
+
|
| 242 |
+
def get(self, key: str, default: typing.Any = None) -> typing.Any:
|
| 243 |
+
"""
|
| 244 |
+
Return a header value. If multiple occurrences of the header occur
|
| 245 |
+
then concatenate them together with commas.
|
| 246 |
+
"""
|
| 247 |
+
try:
|
| 248 |
+
return self[key]
|
| 249 |
+
except KeyError:
|
| 250 |
+
return default
|
| 251 |
+
|
| 252 |
+
def get_list(self, key: str, split_commas: bool = False) -> list[str]:
|
| 253 |
+
"""
|
| 254 |
+
Return a list of all header values for a given key.
|
| 255 |
+
If `split_commas=True` is passed, then any comma separated header
|
| 256 |
+
values are split into multiple return strings.
|
| 257 |
+
"""
|
| 258 |
+
get_header_key = key.lower().encode(self.encoding)
|
| 259 |
+
|
| 260 |
+
values = [
|
| 261 |
+
item_value.decode(self.encoding)
|
| 262 |
+
for _, item_key, item_value in self._list
|
| 263 |
+
if item_key.lower() == get_header_key
|
| 264 |
+
]
|
| 265 |
+
|
| 266 |
+
if not split_commas:
|
| 267 |
+
return values
|
| 268 |
+
|
| 269 |
+
split_values = []
|
| 270 |
+
for value in values:
|
| 271 |
+
split_values.extend([item.strip() for item in value.split(",")])
|
| 272 |
+
return split_values
|
| 273 |
+
|
| 274 |
+
def update(self, headers: HeaderTypes | None = None) -> None: # type: ignore
|
| 275 |
+
headers = Headers(headers)
|
| 276 |
+
for key in headers.keys():
|
| 277 |
+
if key in self:
|
| 278 |
+
self.pop(key)
|
| 279 |
+
self._list.extend(headers._list)
|
| 280 |
+
|
| 281 |
+
def copy(self) -> Headers:
|
| 282 |
+
return Headers(self, encoding=self.encoding)
|
| 283 |
+
|
| 284 |
+
def __getitem__(self, key: str) -> str:
|
| 285 |
+
"""
|
| 286 |
+
Return a single header value.
|
| 287 |
+
|
| 288 |
+
If there are multiple headers with the same key, then we concatenate
|
| 289 |
+
them with commas. See: https://tools.ietf.org/html/rfc7230#section-3.2.2
|
| 290 |
+
"""
|
| 291 |
+
normalized_key = key.lower().encode(self.encoding)
|
| 292 |
+
|
| 293 |
+
items = [
|
| 294 |
+
header_value.decode(self.encoding)
|
| 295 |
+
for _, header_key, header_value in self._list
|
| 296 |
+
if header_key == normalized_key
|
| 297 |
+
]
|
| 298 |
+
|
| 299 |
+
if items:
|
| 300 |
+
return ", ".join(items)
|
| 301 |
+
|
| 302 |
+
raise KeyError(key)
|
| 303 |
+
|
| 304 |
+
def __setitem__(self, key: str, value: str) -> None:
|
| 305 |
+
"""
|
| 306 |
+
Set the header `key` to `value`, removing any duplicate entries.
|
| 307 |
+
Retains insertion order.
|
| 308 |
+
"""
|
| 309 |
+
set_key = key.encode(self._encoding or "utf-8")
|
| 310 |
+
set_value = value.encode(self._encoding or "utf-8")
|
| 311 |
+
lookup_key = set_key.lower()
|
| 312 |
+
|
| 313 |
+
found_indexes = [
|
| 314 |
+
idx
|
| 315 |
+
for idx, (_, item_key, _) in enumerate(self._list)
|
| 316 |
+
if item_key == lookup_key
|
| 317 |
+
]
|
| 318 |
+
|
| 319 |
+
for idx in reversed(found_indexes[1:]):
|
| 320 |
+
del self._list[idx]
|
| 321 |
+
|
| 322 |
+
if found_indexes:
|
| 323 |
+
idx = found_indexes[0]
|
| 324 |
+
self._list[idx] = (set_key, lookup_key, set_value)
|
| 325 |
+
else:
|
| 326 |
+
self._list.append((set_key, lookup_key, set_value))
|
| 327 |
+
|
| 328 |
+
def __delitem__(self, key: str) -> None:
|
| 329 |
+
"""
|
| 330 |
+
Remove the header `key`.
|
| 331 |
+
"""
|
| 332 |
+
del_key = key.lower().encode(self.encoding)
|
| 333 |
+
|
| 334 |
+
pop_indexes = [
|
| 335 |
+
idx
|
| 336 |
+
for idx, (_, item_key, _) in enumerate(self._list)
|
| 337 |
+
if item_key.lower() == del_key
|
| 338 |
+
]
|
| 339 |
+
|
| 340 |
+
if not pop_indexes:
|
| 341 |
+
raise KeyError(key)
|
| 342 |
+
|
| 343 |
+
for idx in reversed(pop_indexes):
|
| 344 |
+
del self._list[idx]
|
| 345 |
+
|
| 346 |
+
def __contains__(self, key: typing.Any) -> bool:
|
| 347 |
+
header_key = key.lower().encode(self.encoding)
|
| 348 |
+
return header_key in [key for _, key, _ in self._list]
|
| 349 |
+
|
| 350 |
+
def __iter__(self) -> typing.Iterator[typing.Any]:
|
| 351 |
+
return iter(self.keys())
|
| 352 |
+
|
| 353 |
+
def __len__(self) -> int:
|
| 354 |
+
return len(self._list)
|
| 355 |
+
|
| 356 |
+
def __eq__(self, other: typing.Any) -> bool:
|
| 357 |
+
try:
|
| 358 |
+
other_headers = Headers(other)
|
| 359 |
+
except ValueError:
|
| 360 |
+
return False
|
| 361 |
+
|
| 362 |
+
self_list = [(key, value) for _, key, value in self._list]
|
| 363 |
+
other_list = [(key, value) for _, key, value in other_headers._list]
|
| 364 |
+
return sorted(self_list) == sorted(other_list)
|
| 365 |
+
|
| 366 |
+
def __repr__(self) -> str:
|
| 367 |
+
class_name = self.__class__.__name__
|
| 368 |
+
|
| 369 |
+
encoding_str = ""
|
| 370 |
+
if self.encoding != "ascii":
|
| 371 |
+
encoding_str = f", encoding={self.encoding!r}"
|
| 372 |
+
|
| 373 |
+
as_list = list(_obfuscate_sensitive_headers(self.multi_items()))
|
| 374 |
+
as_dict = dict(as_list)
|
| 375 |
+
|
| 376 |
+
no_duplicate_keys = len(as_dict) == len(as_list)
|
| 377 |
+
if no_duplicate_keys:
|
| 378 |
+
return f"{class_name}({as_dict!r}{encoding_str})"
|
| 379 |
+
return f"{class_name}({as_list!r}{encoding_str})"
|
| 380 |
+
|
| 381 |
+
|
| 382 |
+
class Request:
|
| 383 |
+
def __init__(
|
| 384 |
+
self,
|
| 385 |
+
method: str,
|
| 386 |
+
url: URL | str,
|
| 387 |
+
*,
|
| 388 |
+
params: QueryParamTypes | None = None,
|
| 389 |
+
headers: HeaderTypes | None = None,
|
| 390 |
+
cookies: CookieTypes | None = None,
|
| 391 |
+
content: RequestContent | None = None,
|
| 392 |
+
data: RequestData | None = None,
|
| 393 |
+
files: RequestFiles | None = None,
|
| 394 |
+
json: typing.Any | None = None,
|
| 395 |
+
stream: SyncByteStream | AsyncByteStream | None = None,
|
| 396 |
+
extensions: RequestExtensions | None = None,
|
| 397 |
+
) -> None:
|
| 398 |
+
self.method = method.upper()
|
| 399 |
+
self.url = URL(url) if params is None else URL(url, params=params)
|
| 400 |
+
self.headers = Headers(headers)
|
| 401 |
+
self.extensions = {} if extensions is None else dict(extensions)
|
| 402 |
+
|
| 403 |
+
if cookies:
|
| 404 |
+
Cookies(cookies).set_cookie_header(self)
|
| 405 |
+
|
| 406 |
+
if stream is None:
|
| 407 |
+
content_type: str | None = self.headers.get("content-type")
|
| 408 |
+
headers, stream = encode_request(
|
| 409 |
+
content=content,
|
| 410 |
+
data=data,
|
| 411 |
+
files=files,
|
| 412 |
+
json=json,
|
| 413 |
+
boundary=get_multipart_boundary_from_content_type(
|
| 414 |
+
content_type=content_type.encode(self.headers.encoding)
|
| 415 |
+
if content_type
|
| 416 |
+
else None
|
| 417 |
+
),
|
| 418 |
+
)
|
| 419 |
+
self._prepare(headers)
|
| 420 |
+
self.stream = stream
|
| 421 |
+
# Load the request body, except for streaming content.
|
| 422 |
+
if isinstance(stream, ByteStream):
|
| 423 |
+
self.read()
|
| 424 |
+
else:
|
| 425 |
+
# There's an important distinction between `Request(content=...)`,
|
| 426 |
+
# and `Request(stream=...)`.
|
| 427 |
+
#
|
| 428 |
+
# Using `content=...` implies automatically populated `Host` and content
|
| 429 |
+
# headers, of either `Content-Length: ...` or `Transfer-Encoding: chunked`.
|
| 430 |
+
#
|
| 431 |
+
# Using `stream=...` will not automatically include *any*
|
| 432 |
+
# auto-populated headers.
|
| 433 |
+
#
|
| 434 |
+
# As an end-user you don't really need `stream=...`. It's only
|
| 435 |
+
# useful when:
|
| 436 |
+
#
|
| 437 |
+
# * Preserving the request stream when copying requests, eg for redirects.
|
| 438 |
+
# * Creating request instances on the *server-side* of the transport API.
|
| 439 |
+
self.stream = stream
|
| 440 |
+
|
| 441 |
+
def _prepare(self, default_headers: dict[str, str]) -> None:
|
| 442 |
+
for key, value in default_headers.items():
|
| 443 |
+
# Ignore Transfer-Encoding if the Content-Length has been set explicitly.
|
| 444 |
+
if key.lower() == "transfer-encoding" and "Content-Length" in self.headers:
|
| 445 |
+
continue
|
| 446 |
+
self.headers.setdefault(key, value)
|
| 447 |
+
|
| 448 |
+
auto_headers: list[tuple[bytes, bytes]] = []
|
| 449 |
+
|
| 450 |
+
has_host = "Host" in self.headers
|
| 451 |
+
has_content_length = (
|
| 452 |
+
"Content-Length" in self.headers or "Transfer-Encoding" in self.headers
|
| 453 |
+
)
|
| 454 |
+
|
| 455 |
+
if not has_host and self.url.host:
|
| 456 |
+
auto_headers.append((b"Host", self.url.netloc))
|
| 457 |
+
if not has_content_length and self.method in ("POST", "PUT", "PATCH"):
|
| 458 |
+
auto_headers.append((b"Content-Length", b"0"))
|
| 459 |
+
|
| 460 |
+
self.headers = Headers(auto_headers + self.headers.raw)
|
| 461 |
+
|
| 462 |
+
@property
|
| 463 |
+
def content(self) -> bytes:
|
| 464 |
+
if not hasattr(self, "_content"):
|
| 465 |
+
raise RequestNotRead()
|
| 466 |
+
return self._content
|
| 467 |
+
|
| 468 |
+
def read(self) -> bytes:
|
| 469 |
+
"""
|
| 470 |
+
Read and return the request content.
|
| 471 |
+
"""
|
| 472 |
+
if not hasattr(self, "_content"):
|
| 473 |
+
assert isinstance(self.stream, typing.Iterable)
|
| 474 |
+
self._content = b"".join(self.stream)
|
| 475 |
+
if not isinstance(self.stream, ByteStream):
|
| 476 |
+
# If a streaming request has been read entirely into memory, then
|
| 477 |
+
# we can replace the stream with a raw bytes implementation,
|
| 478 |
+
# to ensure that any non-replayable streams can still be used.
|
| 479 |
+
self.stream = ByteStream(self._content)
|
| 480 |
+
return self._content
|
| 481 |
+
|
| 482 |
+
async def aread(self) -> bytes:
|
| 483 |
+
"""
|
| 484 |
+
Read and return the request content.
|
| 485 |
+
"""
|
| 486 |
+
if not hasattr(self, "_content"):
|
| 487 |
+
assert isinstance(self.stream, typing.AsyncIterable)
|
| 488 |
+
self._content = b"".join([part async for part in self.stream])
|
| 489 |
+
if not isinstance(self.stream, ByteStream):
|
| 490 |
+
# If a streaming request has been read entirely into memory, then
|
| 491 |
+
# we can replace the stream with a raw bytes implementation,
|
| 492 |
+
# to ensure that any non-replayable streams can still be used.
|
| 493 |
+
self.stream = ByteStream(self._content)
|
| 494 |
+
return self._content
|
| 495 |
+
|
| 496 |
+
def __repr__(self) -> str:
|
| 497 |
+
class_name = self.__class__.__name__
|
| 498 |
+
url = str(self.url)
|
| 499 |
+
return f"<{class_name}({self.method!r}, {url!r})>"
|
| 500 |
+
|
| 501 |
+
def __getstate__(self) -> dict[str, typing.Any]:
|
| 502 |
+
return {
|
| 503 |
+
name: value
|
| 504 |
+
for name, value in self.__dict__.items()
|
| 505 |
+
if name not in ["extensions", "stream"]
|
| 506 |
+
}
|
| 507 |
+
|
| 508 |
+
def __setstate__(self, state: dict[str, typing.Any]) -> None:
|
| 509 |
+
for name, value in state.items():
|
| 510 |
+
setattr(self, name, value)
|
| 511 |
+
self.extensions = {}
|
| 512 |
+
self.stream = UnattachedStream()
|
| 513 |
+
|
| 514 |
+
|
| 515 |
+
class Response:
|
| 516 |
+
def __init__(
|
| 517 |
+
self,
|
| 518 |
+
status_code: int,
|
| 519 |
+
*,
|
| 520 |
+
headers: HeaderTypes | None = None,
|
| 521 |
+
content: ResponseContent | None = None,
|
| 522 |
+
text: str | None = None,
|
| 523 |
+
html: str | None = None,
|
| 524 |
+
json: typing.Any = None,
|
| 525 |
+
stream: SyncByteStream | AsyncByteStream | None = None,
|
| 526 |
+
request: Request | None = None,
|
| 527 |
+
extensions: ResponseExtensions | None = None,
|
| 528 |
+
history: list[Response] | None = None,
|
| 529 |
+
default_encoding: str | typing.Callable[[bytes], str] = "utf-8",
|
| 530 |
+
) -> None:
|
| 531 |
+
self.status_code = status_code
|
| 532 |
+
self.headers = Headers(headers)
|
| 533 |
+
|
| 534 |
+
self._request: Request | None = request
|
| 535 |
+
|
| 536 |
+
# When follow_redirects=False and a redirect is received,
|
| 537 |
+
# the client will set `response.next_request`.
|
| 538 |
+
self.next_request: Request | None = None
|
| 539 |
+
|
| 540 |
+
self.extensions = {} if extensions is None else dict(extensions)
|
| 541 |
+
self.history = [] if history is None else list(history)
|
| 542 |
+
|
| 543 |
+
self.is_closed = False
|
| 544 |
+
self.is_stream_consumed = False
|
| 545 |
+
|
| 546 |
+
self.default_encoding = default_encoding
|
| 547 |
+
|
| 548 |
+
if stream is None:
|
| 549 |
+
headers, stream = encode_response(content, text, html, json)
|
| 550 |
+
self._prepare(headers)
|
| 551 |
+
self.stream = stream
|
| 552 |
+
if isinstance(stream, ByteStream):
|
| 553 |
+
# Load the response body, except for streaming content.
|
| 554 |
+
self.read()
|
| 555 |
+
else:
|
| 556 |
+
# There's an important distinction between `Response(content=...)`,
|
| 557 |
+
# and `Response(stream=...)`.
|
| 558 |
+
#
|
| 559 |
+
# Using `content=...` implies automatically populated content headers,
|
| 560 |
+
# of either `Content-Length: ...` or `Transfer-Encoding: chunked`.
|
| 561 |
+
#
|
| 562 |
+
# Using `stream=...` will not automatically include any content headers.
|
| 563 |
+
#
|
| 564 |
+
# As an end-user you don't really need `stream=...`. It's only
|
| 565 |
+
# useful when creating response instances having received a stream
|
| 566 |
+
# from the transport API.
|
| 567 |
+
self.stream = stream
|
| 568 |
+
|
| 569 |
+
self._num_bytes_downloaded = 0
|
| 570 |
+
|
| 571 |
+
def _prepare(self, default_headers: dict[str, str]) -> None:
|
| 572 |
+
for key, value in default_headers.items():
|
| 573 |
+
# Ignore Transfer-Encoding if the Content-Length has been set explicitly.
|
| 574 |
+
if key.lower() == "transfer-encoding" and "content-length" in self.headers:
|
| 575 |
+
continue
|
| 576 |
+
self.headers.setdefault(key, value)
|
| 577 |
+
|
| 578 |
+
@property
|
| 579 |
+
def elapsed(self) -> datetime.timedelta:
|
| 580 |
+
"""
|
| 581 |
+
Returns the time taken for the complete request/response
|
| 582 |
+
cycle to complete.
|
| 583 |
+
"""
|
| 584 |
+
if not hasattr(self, "_elapsed"):
|
| 585 |
+
raise RuntimeError(
|
| 586 |
+
"'.elapsed' may only be accessed after the response "
|
| 587 |
+
"has been read or closed."
|
| 588 |
+
)
|
| 589 |
+
return self._elapsed
|
| 590 |
+
|
| 591 |
+
@elapsed.setter
|
| 592 |
+
def elapsed(self, elapsed: datetime.timedelta) -> None:
|
| 593 |
+
self._elapsed = elapsed
|
| 594 |
+
|
| 595 |
+
@property
|
| 596 |
+
def request(self) -> Request:
|
| 597 |
+
"""
|
| 598 |
+
Returns the request instance associated to the current response.
|
| 599 |
+
"""
|
| 600 |
+
if self._request is None:
|
| 601 |
+
raise RuntimeError(
|
| 602 |
+
"The request instance has not been set on this response."
|
| 603 |
+
)
|
| 604 |
+
return self._request
|
| 605 |
+
|
| 606 |
+
@request.setter
|
| 607 |
+
def request(self, value: Request) -> None:
|
| 608 |
+
self._request = value
|
| 609 |
+
|
| 610 |
+
@property
|
| 611 |
+
def http_version(self) -> str:
|
| 612 |
+
try:
|
| 613 |
+
http_version: bytes = self.extensions["http_version"]
|
| 614 |
+
except KeyError:
|
| 615 |
+
return "HTTP/1.1"
|
| 616 |
+
else:
|
| 617 |
+
return http_version.decode("ascii", errors="ignore")
|
| 618 |
+
|
| 619 |
+
@property
|
| 620 |
+
def reason_phrase(self) -> str:
|
| 621 |
+
try:
|
| 622 |
+
reason_phrase: bytes = self.extensions["reason_phrase"]
|
| 623 |
+
except KeyError:
|
| 624 |
+
return codes.get_reason_phrase(self.status_code)
|
| 625 |
+
else:
|
| 626 |
+
return reason_phrase.decode("ascii", errors="ignore")
|
| 627 |
+
|
| 628 |
+
@property
|
| 629 |
+
def url(self) -> URL:
|
| 630 |
+
"""
|
| 631 |
+
Returns the URL for which the request was made.
|
| 632 |
+
"""
|
| 633 |
+
return self.request.url
|
| 634 |
+
|
| 635 |
+
@property
|
| 636 |
+
def content(self) -> bytes:
|
| 637 |
+
if not hasattr(self, "_content"):
|
| 638 |
+
raise ResponseNotRead()
|
| 639 |
+
return self._content
|
| 640 |
+
|
| 641 |
+
@property
|
| 642 |
+
def text(self) -> str:
|
| 643 |
+
if not hasattr(self, "_text"):
|
| 644 |
+
content = self.content
|
| 645 |
+
if not content:
|
| 646 |
+
self._text = ""
|
| 647 |
+
else:
|
| 648 |
+
decoder = TextDecoder(encoding=self.encoding or "utf-8")
|
| 649 |
+
self._text = "".join([decoder.decode(self.content), decoder.flush()])
|
| 650 |
+
return self._text
|
| 651 |
+
|
| 652 |
+
@property
|
| 653 |
+
def encoding(self) -> str | None:
|
| 654 |
+
"""
|
| 655 |
+
Return an encoding to use for decoding the byte content into text.
|
| 656 |
+
The priority for determining this is given by...
|
| 657 |
+
|
| 658 |
+
* `.encoding = <>` has been set explicitly.
|
| 659 |
+
* The encoding as specified by the charset parameter in the Content-Type header.
|
| 660 |
+
* The encoding as determined by `default_encoding`, which may either be
|
| 661 |
+
a string like "utf-8" indicating the encoding to use, or may be a callable
|
| 662 |
+
which enables charset autodetection.
|
| 663 |
+
"""
|
| 664 |
+
if not hasattr(self, "_encoding"):
|
| 665 |
+
encoding = self.charset_encoding
|
| 666 |
+
if encoding is None or not _is_known_encoding(encoding):
|
| 667 |
+
if isinstance(self.default_encoding, str):
|
| 668 |
+
encoding = self.default_encoding
|
| 669 |
+
elif hasattr(self, "_content"):
|
| 670 |
+
encoding = self.default_encoding(self._content)
|
| 671 |
+
self._encoding = encoding or "utf-8"
|
| 672 |
+
return self._encoding
|
| 673 |
+
|
| 674 |
+
@encoding.setter
|
| 675 |
+
def encoding(self, value: str) -> None:
|
| 676 |
+
"""
|
| 677 |
+
Set the encoding to use for decoding the byte content into text.
|
| 678 |
+
|
| 679 |
+
If the `text` attribute has been accessed, attempting to set the
|
| 680 |
+
encoding will throw a ValueError.
|
| 681 |
+
"""
|
| 682 |
+
if hasattr(self, "_text"):
|
| 683 |
+
raise ValueError(
|
| 684 |
+
"Setting encoding after `text` has been accessed is not allowed."
|
| 685 |
+
)
|
| 686 |
+
self._encoding = value
|
| 687 |
+
|
| 688 |
+
@property
|
| 689 |
+
def charset_encoding(self) -> str | None:
|
| 690 |
+
"""
|
| 691 |
+
Return the encoding, as specified by the Content-Type header.
|
| 692 |
+
"""
|
| 693 |
+
content_type = self.headers.get("Content-Type")
|
| 694 |
+
if content_type is None:
|
| 695 |
+
return None
|
| 696 |
+
|
| 697 |
+
return _parse_content_type_charset(content_type)
|
| 698 |
+
|
| 699 |
+
def _get_content_decoder(self) -> ContentDecoder:
|
| 700 |
+
"""
|
| 701 |
+
Returns a decoder instance which can be used to decode the raw byte
|
| 702 |
+
content, depending on the Content-Encoding used in the response.
|
| 703 |
+
"""
|
| 704 |
+
if not hasattr(self, "_decoder"):
|
| 705 |
+
decoders: list[ContentDecoder] = []
|
| 706 |
+
values = self.headers.get_list("content-encoding", split_commas=True)
|
| 707 |
+
for value in values:
|
| 708 |
+
value = value.strip().lower()
|
| 709 |
+
try:
|
| 710 |
+
decoder_cls = SUPPORTED_DECODERS[value]
|
| 711 |
+
decoders.append(decoder_cls())
|
| 712 |
+
except KeyError:
|
| 713 |
+
continue
|
| 714 |
+
|
| 715 |
+
if len(decoders) == 1:
|
| 716 |
+
self._decoder = decoders[0]
|
| 717 |
+
elif len(decoders) > 1:
|
| 718 |
+
self._decoder = MultiDecoder(children=decoders)
|
| 719 |
+
else:
|
| 720 |
+
self._decoder = IdentityDecoder()
|
| 721 |
+
|
| 722 |
+
return self._decoder
|
| 723 |
+
|
| 724 |
+
@property
|
| 725 |
+
def is_informational(self) -> bool:
|
| 726 |
+
"""
|
| 727 |
+
A property which is `True` for 1xx status codes, `False` otherwise.
|
| 728 |
+
"""
|
| 729 |
+
return codes.is_informational(self.status_code)
|
| 730 |
+
|
| 731 |
+
@property
|
| 732 |
+
def is_success(self) -> bool:
|
| 733 |
+
"""
|
| 734 |
+
A property which is `True` for 2xx status codes, `False` otherwise.
|
| 735 |
+
"""
|
| 736 |
+
return codes.is_success(self.status_code)
|
| 737 |
+
|
| 738 |
+
@property
|
| 739 |
+
def is_redirect(self) -> bool:
|
| 740 |
+
"""
|
| 741 |
+
A property which is `True` for 3xx status codes, `False` otherwise.
|
| 742 |
+
|
| 743 |
+
Note that not all responses with a 3xx status code indicate a URL redirect.
|
| 744 |
+
|
| 745 |
+
Use `response.has_redirect_location` to determine responses with a properly
|
| 746 |
+
formed URL redirection.
|
| 747 |
+
"""
|
| 748 |
+
return codes.is_redirect(self.status_code)
|
| 749 |
+
|
| 750 |
+
@property
|
| 751 |
+
def is_client_error(self) -> bool:
|
| 752 |
+
"""
|
| 753 |
+
A property which is `True` for 4xx status codes, `False` otherwise.
|
| 754 |
+
"""
|
| 755 |
+
return codes.is_client_error(self.status_code)
|
| 756 |
+
|
| 757 |
+
@property
|
| 758 |
+
def is_server_error(self) -> bool:
|
| 759 |
+
"""
|
| 760 |
+
A property which is `True` for 5xx status codes, `False` otherwise.
|
| 761 |
+
"""
|
| 762 |
+
return codes.is_server_error(self.status_code)
|
| 763 |
+
|
| 764 |
+
@property
|
| 765 |
+
def is_error(self) -> bool:
|
| 766 |
+
"""
|
| 767 |
+
A property which is `True` for 4xx and 5xx status codes, `False` otherwise.
|
| 768 |
+
"""
|
| 769 |
+
return codes.is_error(self.status_code)
|
| 770 |
+
|
| 771 |
+
@property
|
| 772 |
+
def has_redirect_location(self) -> bool:
|
| 773 |
+
"""
|
| 774 |
+
Returns True for 3xx responses with a properly formed URL redirection,
|
| 775 |
+
`False` otherwise.
|
| 776 |
+
"""
|
| 777 |
+
return (
|
| 778 |
+
self.status_code
|
| 779 |
+
in (
|
| 780 |
+
# 301 (Cacheable redirect. Method may change to GET.)
|
| 781 |
+
codes.MOVED_PERMANENTLY,
|
| 782 |
+
# 302 (Uncacheable redirect. Method may change to GET.)
|
| 783 |
+
codes.FOUND,
|
| 784 |
+
# 303 (Client should make a GET or HEAD request.)
|
| 785 |
+
codes.SEE_OTHER,
|
| 786 |
+
# 307 (Equiv. 302, but retain method)
|
| 787 |
+
codes.TEMPORARY_REDIRECT,
|
| 788 |
+
# 308 (Equiv. 301, but retain method)
|
| 789 |
+
codes.PERMANENT_REDIRECT,
|
| 790 |
+
)
|
| 791 |
+
and "Location" in self.headers
|
| 792 |
+
)
|
| 793 |
+
|
| 794 |
+
def raise_for_status(self) -> Response:
|
| 795 |
+
"""
|
| 796 |
+
Raise the `HTTPStatusError` if one occurred.
|
| 797 |
+
"""
|
| 798 |
+
request = self._request
|
| 799 |
+
if request is None:
|
| 800 |
+
raise RuntimeError(
|
| 801 |
+
"Cannot call `raise_for_status` as the request "
|
| 802 |
+
"instance has not been set on this response."
|
| 803 |
+
)
|
| 804 |
+
|
| 805 |
+
if self.is_success:
|
| 806 |
+
return self
|
| 807 |
+
|
| 808 |
+
if self.has_redirect_location:
|
| 809 |
+
message = (
|
| 810 |
+
"{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n"
|
| 811 |
+
"Redirect location: '{0.headers[location]}'\n"
|
| 812 |
+
"For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}"
|
| 813 |
+
)
|
| 814 |
+
else:
|
| 815 |
+
message = (
|
| 816 |
+
"{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n"
|
| 817 |
+
"For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}"
|
| 818 |
+
)
|
| 819 |
+
|
| 820 |
+
status_class = self.status_code // 100
|
| 821 |
+
error_types = {
|
| 822 |
+
1: "Informational response",
|
| 823 |
+
3: "Redirect response",
|
| 824 |
+
4: "Client error",
|
| 825 |
+
5: "Server error",
|
| 826 |
+
}
|
| 827 |
+
error_type = error_types.get(status_class, "Invalid status code")
|
| 828 |
+
message = message.format(self, error_type=error_type)
|
| 829 |
+
raise HTTPStatusError(message, request=request, response=self)
|
| 830 |
+
|
| 831 |
+
def json(self, **kwargs: typing.Any) -> typing.Any:
|
| 832 |
+
return jsonlib.loads(self.content, **kwargs)
|
| 833 |
+
|
| 834 |
+
@property
|
| 835 |
+
def cookies(self) -> Cookies:
|
| 836 |
+
if not hasattr(self, "_cookies"):
|
| 837 |
+
self._cookies = Cookies()
|
| 838 |
+
self._cookies.extract_cookies(self)
|
| 839 |
+
return self._cookies
|
| 840 |
+
|
| 841 |
+
@property
|
| 842 |
+
def links(self) -> dict[str | None, dict[str, str]]:
|
| 843 |
+
"""
|
| 844 |
+
Returns the parsed header links of the response, if any
|
| 845 |
+
"""
|
| 846 |
+
header = self.headers.get("link")
|
| 847 |
+
if header is None:
|
| 848 |
+
return {}
|
| 849 |
+
|
| 850 |
+
return {
|
| 851 |
+
(link.get("rel") or link.get("url")): link
|
| 852 |
+
for link in _parse_header_links(header)
|
| 853 |
+
}
|
| 854 |
+
|
| 855 |
+
@property
|
| 856 |
+
def num_bytes_downloaded(self) -> int:
|
| 857 |
+
return self._num_bytes_downloaded
|
| 858 |
+
|
| 859 |
+
def __repr__(self) -> str:
|
| 860 |
+
return f"<Response [{self.status_code} {self.reason_phrase}]>"
|
| 861 |
+
|
| 862 |
+
def __getstate__(self) -> dict[str, typing.Any]:
|
| 863 |
+
return {
|
| 864 |
+
name: value
|
| 865 |
+
for name, value in self.__dict__.items()
|
| 866 |
+
if name not in ["extensions", "stream", "is_closed", "_decoder"]
|
| 867 |
+
}
|
| 868 |
+
|
| 869 |
+
def __setstate__(self, state: dict[str, typing.Any]) -> None:
|
| 870 |
+
for name, value in state.items():
|
| 871 |
+
setattr(self, name, value)
|
| 872 |
+
self.is_closed = True
|
| 873 |
+
self.extensions = {}
|
| 874 |
+
self.stream = UnattachedStream()
|
| 875 |
+
|
| 876 |
+
def read(self) -> bytes:
|
| 877 |
+
"""
|
| 878 |
+
Read and return the response content.
|
| 879 |
+
"""
|
| 880 |
+
if not hasattr(self, "_content"):
|
| 881 |
+
self._content = b"".join(self.iter_bytes())
|
| 882 |
+
return self._content
|
| 883 |
+
|
| 884 |
+
def iter_bytes(self, chunk_size: int | None = None) -> typing.Iterator[bytes]:
|
| 885 |
+
"""
|
| 886 |
+
A byte-iterator over the decoded response content.
|
| 887 |
+
This allows us to handle gzip, deflate, brotli, and zstd encoded responses.
|
| 888 |
+
"""
|
| 889 |
+
if hasattr(self, "_content"):
|
| 890 |
+
chunk_size = len(self._content) if chunk_size is None else chunk_size
|
| 891 |
+
for i in range(0, len(self._content), max(chunk_size, 1)):
|
| 892 |
+
yield self._content[i : i + chunk_size]
|
| 893 |
+
else:
|
| 894 |
+
decoder = self._get_content_decoder()
|
| 895 |
+
chunker = ByteChunker(chunk_size=chunk_size)
|
| 896 |
+
with request_context(request=self._request):
|
| 897 |
+
for raw_bytes in self.iter_raw():
|
| 898 |
+
decoded = decoder.decode(raw_bytes)
|
| 899 |
+
for chunk in chunker.decode(decoded):
|
| 900 |
+
yield chunk
|
| 901 |
+
decoded = decoder.flush()
|
| 902 |
+
for chunk in chunker.decode(decoded):
|
| 903 |
+
yield chunk # pragma: no cover
|
| 904 |
+
for chunk in chunker.flush():
|
| 905 |
+
yield chunk
|
| 906 |
+
|
| 907 |
+
def iter_text(self, chunk_size: int | None = None) -> typing.Iterator[str]:
|
| 908 |
+
"""
|
| 909 |
+
A str-iterator over the decoded response content
|
| 910 |
+
that handles both gzip, deflate, etc but also detects the content's
|
| 911 |
+
string encoding.
|
| 912 |
+
"""
|
| 913 |
+
decoder = TextDecoder(encoding=self.encoding or "utf-8")
|
| 914 |
+
chunker = TextChunker(chunk_size=chunk_size)
|
| 915 |
+
with request_context(request=self._request):
|
| 916 |
+
for byte_content in self.iter_bytes():
|
| 917 |
+
text_content = decoder.decode(byte_content)
|
| 918 |
+
for chunk in chunker.decode(text_content):
|
| 919 |
+
yield chunk
|
| 920 |
+
text_content = decoder.flush()
|
| 921 |
+
for chunk in chunker.decode(text_content):
|
| 922 |
+
yield chunk # pragma: no cover
|
| 923 |
+
for chunk in chunker.flush():
|
| 924 |
+
yield chunk
|
| 925 |
+
|
| 926 |
+
def iter_lines(self) -> typing.Iterator[str]:
|
| 927 |
+
decoder = LineDecoder()
|
| 928 |
+
with request_context(request=self._request):
|
| 929 |
+
for text in self.iter_text():
|
| 930 |
+
for line in decoder.decode(text):
|
| 931 |
+
yield line
|
| 932 |
+
for line in decoder.flush():
|
| 933 |
+
yield line
|
| 934 |
+
|
| 935 |
+
def iter_raw(self, chunk_size: int | None = None) -> typing.Iterator[bytes]:
|
| 936 |
+
"""
|
| 937 |
+
A byte-iterator over the raw response content.
|
| 938 |
+
"""
|
| 939 |
+
if self.is_stream_consumed:
|
| 940 |
+
raise StreamConsumed()
|
| 941 |
+
if self.is_closed:
|
| 942 |
+
raise StreamClosed()
|
| 943 |
+
if not isinstance(self.stream, SyncByteStream):
|
| 944 |
+
raise RuntimeError("Attempted to call a sync iterator on an async stream.")
|
| 945 |
+
|
| 946 |
+
self.is_stream_consumed = True
|
| 947 |
+
self._num_bytes_downloaded = 0
|
| 948 |
+
chunker = ByteChunker(chunk_size=chunk_size)
|
| 949 |
+
|
| 950 |
+
with request_context(request=self._request):
|
| 951 |
+
for raw_stream_bytes in self.stream:
|
| 952 |
+
self._num_bytes_downloaded += len(raw_stream_bytes)
|
| 953 |
+
for chunk in chunker.decode(raw_stream_bytes):
|
| 954 |
+
yield chunk
|
| 955 |
+
|
| 956 |
+
for chunk in chunker.flush():
|
| 957 |
+
yield chunk
|
| 958 |
+
|
| 959 |
+
self.close()
|
| 960 |
+
|
| 961 |
+
def close(self) -> None:
|
| 962 |
+
"""
|
| 963 |
+
Close the response and release the connection.
|
| 964 |
+
Automatically called if the response body is read to completion.
|
| 965 |
+
"""
|
| 966 |
+
if not isinstance(self.stream, SyncByteStream):
|
| 967 |
+
raise RuntimeError("Attempted to call an sync close on an async stream.")
|
| 968 |
+
|
| 969 |
+
if not self.is_closed:
|
| 970 |
+
self.is_closed = True
|
| 971 |
+
with request_context(request=self._request):
|
| 972 |
+
self.stream.close()
|
| 973 |
+
|
| 974 |
+
async def aread(self) -> bytes:
|
| 975 |
+
"""
|
| 976 |
+
Read and return the response content.
|
| 977 |
+
"""
|
| 978 |
+
if not hasattr(self, "_content"):
|
| 979 |
+
self._content = b"".join([part async for part in self.aiter_bytes()])
|
| 980 |
+
return self._content
|
| 981 |
+
|
| 982 |
+
async def aiter_bytes(
|
| 983 |
+
self, chunk_size: int | None = None
|
| 984 |
+
) -> typing.AsyncIterator[bytes]:
|
| 985 |
+
"""
|
| 986 |
+
A byte-iterator over the decoded response content.
|
| 987 |
+
This allows us to handle gzip, deflate, brotli, and zstd encoded responses.
|
| 988 |
+
"""
|
| 989 |
+
if hasattr(self, "_content"):
|
| 990 |
+
chunk_size = len(self._content) if chunk_size is None else chunk_size
|
| 991 |
+
for i in range(0, len(self._content), max(chunk_size, 1)):
|
| 992 |
+
yield self._content[i : i + chunk_size]
|
| 993 |
+
else:
|
| 994 |
+
decoder = self._get_content_decoder()
|
| 995 |
+
chunker = ByteChunker(chunk_size=chunk_size)
|
| 996 |
+
with request_context(request=self._request):
|
| 997 |
+
async for raw_bytes in self.aiter_raw():
|
| 998 |
+
decoded = decoder.decode(raw_bytes)
|
| 999 |
+
for chunk in chunker.decode(decoded):
|
| 1000 |
+
yield chunk
|
| 1001 |
+
decoded = decoder.flush()
|
| 1002 |
+
for chunk in chunker.decode(decoded):
|
| 1003 |
+
yield chunk # pragma: no cover
|
| 1004 |
+
for chunk in chunker.flush():
|
| 1005 |
+
yield chunk
|
| 1006 |
+
|
| 1007 |
+
async def aiter_text(
|
| 1008 |
+
self, chunk_size: int | None = None
|
| 1009 |
+
) -> typing.AsyncIterator[str]:
|
| 1010 |
+
"""
|
| 1011 |
+
A str-iterator over the decoded response content
|
| 1012 |
+
that handles both gzip, deflate, etc but also detects the content's
|
| 1013 |
+
string encoding.
|
| 1014 |
+
"""
|
| 1015 |
+
decoder = TextDecoder(encoding=self.encoding or "utf-8")
|
| 1016 |
+
chunker = TextChunker(chunk_size=chunk_size)
|
| 1017 |
+
with request_context(request=self._request):
|
| 1018 |
+
async for byte_content in self.aiter_bytes():
|
| 1019 |
+
text_content = decoder.decode(byte_content)
|
| 1020 |
+
for chunk in chunker.decode(text_content):
|
| 1021 |
+
yield chunk
|
| 1022 |
+
text_content = decoder.flush()
|
| 1023 |
+
for chunk in chunker.decode(text_content):
|
| 1024 |
+
yield chunk # pragma: no cover
|
| 1025 |
+
for chunk in chunker.flush():
|
| 1026 |
+
yield chunk
|
| 1027 |
+
|
| 1028 |
+
async def aiter_lines(self) -> typing.AsyncIterator[str]:
|
| 1029 |
+
decoder = LineDecoder()
|
| 1030 |
+
with request_context(request=self._request):
|
| 1031 |
+
async for text in self.aiter_text():
|
| 1032 |
+
for line in decoder.decode(text):
|
| 1033 |
+
yield line
|
| 1034 |
+
for line in decoder.flush():
|
| 1035 |
+
yield line
|
| 1036 |
+
|
| 1037 |
+
async def aiter_raw(
|
| 1038 |
+
self, chunk_size: int | None = None
|
| 1039 |
+
) -> typing.AsyncIterator[bytes]:
|
| 1040 |
+
"""
|
| 1041 |
+
A byte-iterator over the raw response content.
|
| 1042 |
+
"""
|
| 1043 |
+
if self.is_stream_consumed:
|
| 1044 |
+
raise StreamConsumed()
|
| 1045 |
+
if self.is_closed:
|
| 1046 |
+
raise StreamClosed()
|
| 1047 |
+
if not isinstance(self.stream, AsyncByteStream):
|
| 1048 |
+
raise RuntimeError("Attempted to call an async iterator on an sync stream.")
|
| 1049 |
+
|
| 1050 |
+
self.is_stream_consumed = True
|
| 1051 |
+
self._num_bytes_downloaded = 0
|
| 1052 |
+
chunker = ByteChunker(chunk_size=chunk_size)
|
| 1053 |
+
|
| 1054 |
+
with request_context(request=self._request):
|
| 1055 |
+
async for raw_stream_bytes in self.stream:
|
| 1056 |
+
self._num_bytes_downloaded += len(raw_stream_bytes)
|
| 1057 |
+
for chunk in chunker.decode(raw_stream_bytes):
|
| 1058 |
+
yield chunk
|
| 1059 |
+
|
| 1060 |
+
for chunk in chunker.flush():
|
| 1061 |
+
yield chunk
|
| 1062 |
+
|
| 1063 |
+
await self.aclose()
|
| 1064 |
+
|
| 1065 |
+
async def aclose(self) -> None:
|
| 1066 |
+
"""
|
| 1067 |
+
Close the response and release the connection.
|
| 1068 |
+
Automatically called if the response body is read to completion.
|
| 1069 |
+
"""
|
| 1070 |
+
if not isinstance(self.stream, AsyncByteStream):
|
| 1071 |
+
raise RuntimeError("Attempted to call an async close on an sync stream.")
|
| 1072 |
+
|
| 1073 |
+
if not self.is_closed:
|
| 1074 |
+
self.is_closed = True
|
| 1075 |
+
with request_context(request=self._request):
|
| 1076 |
+
await self.stream.aclose()
|
| 1077 |
+
|
| 1078 |
+
|
| 1079 |
+
class Cookies(typing.MutableMapping[str, str]):
|
| 1080 |
+
"""
|
| 1081 |
+
HTTP Cookies, as a mutable mapping.
|
| 1082 |
+
"""
|
| 1083 |
+
|
| 1084 |
+
def __init__(self, cookies: CookieTypes | None = None) -> None:
|
| 1085 |
+
if cookies is None or isinstance(cookies, dict):
|
| 1086 |
+
self.jar = CookieJar()
|
| 1087 |
+
if isinstance(cookies, dict):
|
| 1088 |
+
for key, value in cookies.items():
|
| 1089 |
+
self.set(key, value)
|
| 1090 |
+
elif isinstance(cookies, list):
|
| 1091 |
+
self.jar = CookieJar()
|
| 1092 |
+
for key, value in cookies:
|
| 1093 |
+
self.set(key, value)
|
| 1094 |
+
elif isinstance(cookies, Cookies):
|
| 1095 |
+
self.jar = CookieJar()
|
| 1096 |
+
for cookie in cookies.jar:
|
| 1097 |
+
self.jar.set_cookie(cookie)
|
| 1098 |
+
else:
|
| 1099 |
+
self.jar = cookies
|
| 1100 |
+
|
| 1101 |
+
def extract_cookies(self, response: Response) -> None:
|
| 1102 |
+
"""
|
| 1103 |
+
Loads any cookies based on the response `Set-Cookie` headers.
|
| 1104 |
+
"""
|
| 1105 |
+
urllib_response = self._CookieCompatResponse(response)
|
| 1106 |
+
urllib_request = self._CookieCompatRequest(response.request)
|
| 1107 |
+
|
| 1108 |
+
self.jar.extract_cookies(urllib_response, urllib_request) # type: ignore
|
| 1109 |
+
|
| 1110 |
+
def set_cookie_header(self, request: Request) -> None:
|
| 1111 |
+
"""
|
| 1112 |
+
Sets an appropriate 'Cookie:' HTTP header on the `Request`.
|
| 1113 |
+
"""
|
| 1114 |
+
urllib_request = self._CookieCompatRequest(request)
|
| 1115 |
+
self.jar.add_cookie_header(urllib_request)
|
| 1116 |
+
|
| 1117 |
+
def set(self, name: str, value: str, domain: str = "", path: str = "/") -> None:
|
| 1118 |
+
"""
|
| 1119 |
+
Set a cookie value by name. May optionally include domain and path.
|
| 1120 |
+
"""
|
| 1121 |
+
kwargs = {
|
| 1122 |
+
"version": 0,
|
| 1123 |
+
"name": name,
|
| 1124 |
+
"value": value,
|
| 1125 |
+
"port": None,
|
| 1126 |
+
"port_specified": False,
|
| 1127 |
+
"domain": domain,
|
| 1128 |
+
"domain_specified": bool(domain),
|
| 1129 |
+
"domain_initial_dot": domain.startswith("."),
|
| 1130 |
+
"path": path,
|
| 1131 |
+
"path_specified": bool(path),
|
| 1132 |
+
"secure": False,
|
| 1133 |
+
"expires": None,
|
| 1134 |
+
"discard": True,
|
| 1135 |
+
"comment": None,
|
| 1136 |
+
"comment_url": None,
|
| 1137 |
+
"rest": {"HttpOnly": None},
|
| 1138 |
+
"rfc2109": False,
|
| 1139 |
+
}
|
| 1140 |
+
cookie = Cookie(**kwargs) # type: ignore
|
| 1141 |
+
self.jar.set_cookie(cookie)
|
| 1142 |
+
|
| 1143 |
+
def get( # type: ignore
|
| 1144 |
+
self,
|
| 1145 |
+
name: str,
|
| 1146 |
+
default: str | None = None,
|
| 1147 |
+
domain: str | None = None,
|
| 1148 |
+
path: str | None = None,
|
| 1149 |
+
) -> str | None:
|
| 1150 |
+
"""
|
| 1151 |
+
Get a cookie by name. May optionally include domain and path
|
| 1152 |
+
in order to specify exactly which cookie to retrieve.
|
| 1153 |
+
"""
|
| 1154 |
+
value = None
|
| 1155 |
+
for cookie in self.jar:
|
| 1156 |
+
if cookie.name == name:
|
| 1157 |
+
if domain is None or cookie.domain == domain:
|
| 1158 |
+
if path is None or cookie.path == path:
|
| 1159 |
+
if value is not None:
|
| 1160 |
+
message = f"Multiple cookies exist with name={name}"
|
| 1161 |
+
raise CookieConflict(message)
|
| 1162 |
+
value = cookie.value
|
| 1163 |
+
|
| 1164 |
+
if value is None:
|
| 1165 |
+
return default
|
| 1166 |
+
return value
|
| 1167 |
+
|
| 1168 |
+
def delete(
|
| 1169 |
+
self,
|
| 1170 |
+
name: str,
|
| 1171 |
+
domain: str | None = None,
|
| 1172 |
+
path: str | None = None,
|
| 1173 |
+
) -> None:
|
| 1174 |
+
"""
|
| 1175 |
+
Delete a cookie by name. May optionally include domain and path
|
| 1176 |
+
in order to specify exactly which cookie to delete.
|
| 1177 |
+
"""
|
| 1178 |
+
if domain is not None and path is not None:
|
| 1179 |
+
return self.jar.clear(domain, path, name)
|
| 1180 |
+
|
| 1181 |
+
remove = [
|
| 1182 |
+
cookie
|
| 1183 |
+
for cookie in self.jar
|
| 1184 |
+
if cookie.name == name
|
| 1185 |
+
and (domain is None or cookie.domain == domain)
|
| 1186 |
+
and (path is None or cookie.path == path)
|
| 1187 |
+
]
|
| 1188 |
+
|
| 1189 |
+
for cookie in remove:
|
| 1190 |
+
self.jar.clear(cookie.domain, cookie.path, cookie.name)
|
| 1191 |
+
|
| 1192 |
+
def clear(self, domain: str | None = None, path: str | None = None) -> None:
|
| 1193 |
+
"""
|
| 1194 |
+
Delete all cookies. Optionally include a domain and path in
|
| 1195 |
+
order to only delete a subset of all the cookies.
|
| 1196 |
+
"""
|
| 1197 |
+
args = []
|
| 1198 |
+
if domain is not None:
|
| 1199 |
+
args.append(domain)
|
| 1200 |
+
if path is not None:
|
| 1201 |
+
assert domain is not None
|
| 1202 |
+
args.append(path)
|
| 1203 |
+
self.jar.clear(*args)
|
| 1204 |
+
|
| 1205 |
+
def update(self, cookies: CookieTypes | None = None) -> None: # type: ignore
|
| 1206 |
+
cookies = Cookies(cookies)
|
| 1207 |
+
for cookie in cookies.jar:
|
| 1208 |
+
self.jar.set_cookie(cookie)
|
| 1209 |
+
|
| 1210 |
+
def __setitem__(self, name: str, value: str) -> None:
|
| 1211 |
+
return self.set(name, value)
|
| 1212 |
+
|
| 1213 |
+
def __getitem__(self, name: str) -> str:
|
| 1214 |
+
value = self.get(name)
|
| 1215 |
+
if value is None:
|
| 1216 |
+
raise KeyError(name)
|
| 1217 |
+
return value
|
| 1218 |
+
|
| 1219 |
+
def __delitem__(self, name: str) -> None:
|
| 1220 |
+
return self.delete(name)
|
| 1221 |
+
|
| 1222 |
+
def __len__(self) -> int:
|
| 1223 |
+
return len(self.jar)
|
| 1224 |
+
|
| 1225 |
+
def __iter__(self) -> typing.Iterator[str]:
|
| 1226 |
+
return (cookie.name for cookie in self.jar)
|
| 1227 |
+
|
| 1228 |
+
def __bool__(self) -> bool:
|
| 1229 |
+
for _ in self.jar:
|
| 1230 |
+
return True
|
| 1231 |
+
return False
|
| 1232 |
+
|
| 1233 |
+
def __repr__(self) -> str:
|
| 1234 |
+
cookies_repr = ", ".join(
|
| 1235 |
+
[
|
| 1236 |
+
f"<Cookie {cookie.name}={cookie.value} for {cookie.domain} />"
|
| 1237 |
+
for cookie in self.jar
|
| 1238 |
+
]
|
| 1239 |
+
)
|
| 1240 |
+
|
| 1241 |
+
return f"<Cookies[{cookies_repr}]>"
|
| 1242 |
+
|
| 1243 |
+
class _CookieCompatRequest(urllib.request.Request):
|
| 1244 |
+
"""
|
| 1245 |
+
Wraps a `Request` instance up in a compatibility interface suitable
|
| 1246 |
+
for use with `CookieJar` operations.
|
| 1247 |
+
"""
|
| 1248 |
+
|
| 1249 |
+
def __init__(self, request: Request) -> None:
|
| 1250 |
+
super().__init__(
|
| 1251 |
+
url=str(request.url),
|
| 1252 |
+
headers=dict(request.headers),
|
| 1253 |
+
method=request.method,
|
| 1254 |
+
)
|
| 1255 |
+
self.request = request
|
| 1256 |
+
|
| 1257 |
+
def add_unredirected_header(self, key: str, value: str) -> None:
|
| 1258 |
+
super().add_unredirected_header(key, value)
|
| 1259 |
+
self.request.headers[key] = value
|
| 1260 |
+
|
| 1261 |
+
class _CookieCompatResponse:
|
| 1262 |
+
"""
|
| 1263 |
+
Wraps a `Request` instance up in a compatibility interface suitable
|
| 1264 |
+
for use with `CookieJar` operations.
|
| 1265 |
+
"""
|
| 1266 |
+
|
| 1267 |
+
def __init__(self, response: Response) -> None:
|
| 1268 |
+
self.response = response
|
| 1269 |
+
|
| 1270 |
+
def info(self) -> email.message.Message:
|
| 1271 |
+
info = email.message.Message()
|
| 1272 |
+
for key, value in self.response.headers.multi_items():
|
| 1273 |
+
# Note that setting `info[key]` here is an "append" operation,
|
| 1274 |
+
# not a "replace" operation.
|
| 1275 |
+
# https://docs.python.org/3/library/email.compat32-message.html#email.message.Message.__setitem__
|
| 1276 |
+
info[key] = value
|
| 1277 |
+
return info
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/_status_codes.py
ADDED
|
@@ -0,0 +1,162 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from enum import IntEnum
|
| 4 |
+
|
| 5 |
+
__all__ = ["codes"]
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class codes(IntEnum):
|
| 9 |
+
"""HTTP status codes and reason phrases
|
| 10 |
+
|
| 11 |
+
Status codes from the following RFCs are all observed:
|
| 12 |
+
|
| 13 |
+
* RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616
|
| 14 |
+
* RFC 6585: Additional HTTP Status Codes
|
| 15 |
+
* RFC 3229: Delta encoding in HTTP
|
| 16 |
+
* RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518
|
| 17 |
+
* RFC 5842: Binding Extensions to WebDAV
|
| 18 |
+
* RFC 7238: Permanent Redirect
|
| 19 |
+
* RFC 2295: Transparent Content Negotiation in HTTP
|
| 20 |
+
* RFC 2774: An HTTP Extension Framework
|
| 21 |
+
* RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2)
|
| 22 |
+
* RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0)
|
| 23 |
+
* RFC 7725: An HTTP Status Code to Report Legal Obstacles
|
| 24 |
+
* RFC 8297: An HTTP Status Code for Indicating Hints
|
| 25 |
+
* RFC 8470: Using Early Data in HTTP
|
| 26 |
+
"""
|
| 27 |
+
|
| 28 |
+
def __new__(cls, value: int, phrase: str = "") -> codes:
|
| 29 |
+
obj = int.__new__(cls, value)
|
| 30 |
+
obj._value_ = value
|
| 31 |
+
|
| 32 |
+
obj.phrase = phrase # type: ignore[attr-defined]
|
| 33 |
+
return obj
|
| 34 |
+
|
| 35 |
+
def __str__(self) -> str:
|
| 36 |
+
return str(self.value)
|
| 37 |
+
|
| 38 |
+
@classmethod
|
| 39 |
+
def get_reason_phrase(cls, value: int) -> str:
|
| 40 |
+
try:
|
| 41 |
+
return codes(value).phrase # type: ignore
|
| 42 |
+
except ValueError:
|
| 43 |
+
return ""
|
| 44 |
+
|
| 45 |
+
@classmethod
|
| 46 |
+
def is_informational(cls, value: int) -> bool:
|
| 47 |
+
"""
|
| 48 |
+
Returns `True` for 1xx status codes, `False` otherwise.
|
| 49 |
+
"""
|
| 50 |
+
return 100 <= value <= 199
|
| 51 |
+
|
| 52 |
+
@classmethod
|
| 53 |
+
def is_success(cls, value: int) -> bool:
|
| 54 |
+
"""
|
| 55 |
+
Returns `True` for 2xx status codes, `False` otherwise.
|
| 56 |
+
"""
|
| 57 |
+
return 200 <= value <= 299
|
| 58 |
+
|
| 59 |
+
@classmethod
|
| 60 |
+
def is_redirect(cls, value: int) -> bool:
|
| 61 |
+
"""
|
| 62 |
+
Returns `True` for 3xx status codes, `False` otherwise.
|
| 63 |
+
"""
|
| 64 |
+
return 300 <= value <= 399
|
| 65 |
+
|
| 66 |
+
@classmethod
|
| 67 |
+
def is_client_error(cls, value: int) -> bool:
|
| 68 |
+
"""
|
| 69 |
+
Returns `True` for 4xx status codes, `False` otherwise.
|
| 70 |
+
"""
|
| 71 |
+
return 400 <= value <= 499
|
| 72 |
+
|
| 73 |
+
@classmethod
|
| 74 |
+
def is_server_error(cls, value: int) -> bool:
|
| 75 |
+
"""
|
| 76 |
+
Returns `True` for 5xx status codes, `False` otherwise.
|
| 77 |
+
"""
|
| 78 |
+
return 500 <= value <= 599
|
| 79 |
+
|
| 80 |
+
@classmethod
|
| 81 |
+
def is_error(cls, value: int) -> bool:
|
| 82 |
+
"""
|
| 83 |
+
Returns `True` for 4xx or 5xx status codes, `False` otherwise.
|
| 84 |
+
"""
|
| 85 |
+
return 400 <= value <= 599
|
| 86 |
+
|
| 87 |
+
# informational
|
| 88 |
+
CONTINUE = 100, "Continue"
|
| 89 |
+
SWITCHING_PROTOCOLS = 101, "Switching Protocols"
|
| 90 |
+
PROCESSING = 102, "Processing"
|
| 91 |
+
EARLY_HINTS = 103, "Early Hints"
|
| 92 |
+
|
| 93 |
+
# success
|
| 94 |
+
OK = 200, "OK"
|
| 95 |
+
CREATED = 201, "Created"
|
| 96 |
+
ACCEPTED = 202, "Accepted"
|
| 97 |
+
NON_AUTHORITATIVE_INFORMATION = 203, "Non-Authoritative Information"
|
| 98 |
+
NO_CONTENT = 204, "No Content"
|
| 99 |
+
RESET_CONTENT = 205, "Reset Content"
|
| 100 |
+
PARTIAL_CONTENT = 206, "Partial Content"
|
| 101 |
+
MULTI_STATUS = 207, "Multi-Status"
|
| 102 |
+
ALREADY_REPORTED = 208, "Already Reported"
|
| 103 |
+
IM_USED = 226, "IM Used"
|
| 104 |
+
|
| 105 |
+
# redirection
|
| 106 |
+
MULTIPLE_CHOICES = 300, "Multiple Choices"
|
| 107 |
+
MOVED_PERMANENTLY = 301, "Moved Permanently"
|
| 108 |
+
FOUND = 302, "Found"
|
| 109 |
+
SEE_OTHER = 303, "See Other"
|
| 110 |
+
NOT_MODIFIED = 304, "Not Modified"
|
| 111 |
+
USE_PROXY = 305, "Use Proxy"
|
| 112 |
+
TEMPORARY_REDIRECT = 307, "Temporary Redirect"
|
| 113 |
+
PERMANENT_REDIRECT = 308, "Permanent Redirect"
|
| 114 |
+
|
| 115 |
+
# client error
|
| 116 |
+
BAD_REQUEST = 400, "Bad Request"
|
| 117 |
+
UNAUTHORIZED = 401, "Unauthorized"
|
| 118 |
+
PAYMENT_REQUIRED = 402, "Payment Required"
|
| 119 |
+
FORBIDDEN = 403, "Forbidden"
|
| 120 |
+
NOT_FOUND = 404, "Not Found"
|
| 121 |
+
METHOD_NOT_ALLOWED = 405, "Method Not Allowed"
|
| 122 |
+
NOT_ACCEPTABLE = 406, "Not Acceptable"
|
| 123 |
+
PROXY_AUTHENTICATION_REQUIRED = 407, "Proxy Authentication Required"
|
| 124 |
+
REQUEST_TIMEOUT = 408, "Request Timeout"
|
| 125 |
+
CONFLICT = 409, "Conflict"
|
| 126 |
+
GONE = 410, "Gone"
|
| 127 |
+
LENGTH_REQUIRED = 411, "Length Required"
|
| 128 |
+
PRECONDITION_FAILED = 412, "Precondition Failed"
|
| 129 |
+
REQUEST_ENTITY_TOO_LARGE = 413, "Request Entity Too Large"
|
| 130 |
+
REQUEST_URI_TOO_LONG = 414, "Request-URI Too Long"
|
| 131 |
+
UNSUPPORTED_MEDIA_TYPE = 415, "Unsupported Media Type"
|
| 132 |
+
REQUESTED_RANGE_NOT_SATISFIABLE = 416, "Requested Range Not Satisfiable"
|
| 133 |
+
EXPECTATION_FAILED = 417, "Expectation Failed"
|
| 134 |
+
IM_A_TEAPOT = 418, "I'm a teapot"
|
| 135 |
+
MISDIRECTED_REQUEST = 421, "Misdirected Request"
|
| 136 |
+
UNPROCESSABLE_ENTITY = 422, "Unprocessable Entity"
|
| 137 |
+
LOCKED = 423, "Locked"
|
| 138 |
+
FAILED_DEPENDENCY = 424, "Failed Dependency"
|
| 139 |
+
TOO_EARLY = 425, "Too Early"
|
| 140 |
+
UPGRADE_REQUIRED = 426, "Upgrade Required"
|
| 141 |
+
PRECONDITION_REQUIRED = 428, "Precondition Required"
|
| 142 |
+
TOO_MANY_REQUESTS = 429, "Too Many Requests"
|
| 143 |
+
REQUEST_HEADER_FIELDS_TOO_LARGE = 431, "Request Header Fields Too Large"
|
| 144 |
+
UNAVAILABLE_FOR_LEGAL_REASONS = 451, "Unavailable For Legal Reasons"
|
| 145 |
+
|
| 146 |
+
# server errors
|
| 147 |
+
INTERNAL_SERVER_ERROR = 500, "Internal Server Error"
|
| 148 |
+
NOT_IMPLEMENTED = 501, "Not Implemented"
|
| 149 |
+
BAD_GATEWAY = 502, "Bad Gateway"
|
| 150 |
+
SERVICE_UNAVAILABLE = 503, "Service Unavailable"
|
| 151 |
+
GATEWAY_TIMEOUT = 504, "Gateway Timeout"
|
| 152 |
+
HTTP_VERSION_NOT_SUPPORTED = 505, "HTTP Version Not Supported"
|
| 153 |
+
VARIANT_ALSO_NEGOTIATES = 506, "Variant Also Negotiates"
|
| 154 |
+
INSUFFICIENT_STORAGE = 507, "Insufficient Storage"
|
| 155 |
+
LOOP_DETECTED = 508, "Loop Detected"
|
| 156 |
+
NOT_EXTENDED = 510, "Not Extended"
|
| 157 |
+
NETWORK_AUTHENTICATION_REQUIRED = 511, "Network Authentication Required"
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
# Include lower-case styles for `requests` compatibility.
|
| 161 |
+
for code in codes:
|
| 162 |
+
setattr(codes, code._name_.lower(), int(code))
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/httpx/_urlparse.py
ADDED
|
@@ -0,0 +1,527 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
An implementation of `urlparse` that provides URL validation and normalization
|
| 3 |
+
as described by RFC3986.
|
| 4 |
+
|
| 5 |
+
We rely on this implementation rather than the one in Python's stdlib, because:
|
| 6 |
+
|
| 7 |
+
* It provides more complete URL validation.
|
| 8 |
+
* It properly differentiates between an empty querystring and an absent querystring,
|
| 9 |
+
to distinguish URLs with a trailing '?'.
|
| 10 |
+
* It handles scheme, hostname, port, and path normalization.
|
| 11 |
+
* It supports IDNA hostnames, normalizing them to their encoded form.
|
| 12 |
+
* The API supports passing individual components, as well as the complete URL string.
|
| 13 |
+
|
| 14 |
+
Previously we relied on the excellent `rfc3986` package to handle URL parsing and
|
| 15 |
+
validation, but this module provides a simpler alternative, with less indirection
|
| 16 |
+
required.
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
from __future__ import annotations
|
| 20 |
+
|
| 21 |
+
import ipaddress
|
| 22 |
+
import re
|
| 23 |
+
import typing
|
| 24 |
+
|
| 25 |
+
import idna
|
| 26 |
+
|
| 27 |
+
from ._exceptions import InvalidURL
|
| 28 |
+
|
| 29 |
+
MAX_URL_LENGTH = 65536
|
| 30 |
+
|
| 31 |
+
# https://datatracker.ietf.org/doc/html/rfc3986.html#section-2.3
|
| 32 |
+
UNRESERVED_CHARACTERS = (
|
| 33 |
+
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~"
|
| 34 |
+
)
|
| 35 |
+
SUB_DELIMS = "!$&'()*+,;="
|
| 36 |
+
|
| 37 |
+
PERCENT_ENCODED_REGEX = re.compile("%[A-Fa-f0-9]{2}")
|
| 38 |
+
|
| 39 |
+
# https://url.spec.whatwg.org/#percent-encoded-bytes
|
| 40 |
+
|
| 41 |
+
# The fragment percent-encode set is the C0 control percent-encode set
|
| 42 |
+
# and U+0020 SPACE, U+0022 ("), U+003C (<), U+003E (>), and U+0060 (`).
|
| 43 |
+
FRAG_SAFE = "".join(
|
| 44 |
+
[chr(i) for i in range(0x20, 0x7F) if i not in (0x20, 0x22, 0x3C, 0x3E, 0x60)]
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
# The query percent-encode set is the C0 control percent-encode set
|
| 48 |
+
# and U+0020 SPACE, U+0022 ("), U+0023 (#), U+003C (<), and U+003E (>).
|
| 49 |
+
QUERY_SAFE = "".join(
|
| 50 |
+
[chr(i) for i in range(0x20, 0x7F) if i not in (0x20, 0x22, 0x23, 0x3C, 0x3E)]
|
| 51 |
+
)
|
| 52 |
+
|
| 53 |
+
# The path percent-encode set is the query percent-encode set
|
| 54 |
+
# and U+003F (?), U+0060 (`), U+007B ({), and U+007D (}).
|
| 55 |
+
PATH_SAFE = "".join(
|
| 56 |
+
[
|
| 57 |
+
chr(i)
|
| 58 |
+
for i in range(0x20, 0x7F)
|
| 59 |
+
if i not in (0x20, 0x22, 0x23, 0x3C, 0x3E) + (0x3F, 0x60, 0x7B, 0x7D)
|
| 60 |
+
]
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
# The userinfo percent-encode set is the path percent-encode set
|
| 64 |
+
# and U+002F (/), U+003A (:), U+003B (;), U+003D (=), U+0040 (@),
|
| 65 |
+
# U+005B ([) to U+005E (^), inclusive, and U+007C (|).
|
| 66 |
+
USERNAME_SAFE = "".join(
|
| 67 |
+
[
|
| 68 |
+
chr(i)
|
| 69 |
+
for i in range(0x20, 0x7F)
|
| 70 |
+
if i
|
| 71 |
+
not in (0x20, 0x22, 0x23, 0x3C, 0x3E)
|
| 72 |
+
+ (0x3F, 0x60, 0x7B, 0x7D)
|
| 73 |
+
+ (0x2F, 0x3A, 0x3B, 0x3D, 0x40, 0x5B, 0x5C, 0x5D, 0x5E, 0x7C)
|
| 74 |
+
]
|
| 75 |
+
)
|
| 76 |
+
PASSWORD_SAFE = "".join(
|
| 77 |
+
[
|
| 78 |
+
chr(i)
|
| 79 |
+
for i in range(0x20, 0x7F)
|
| 80 |
+
if i
|
| 81 |
+
not in (0x20, 0x22, 0x23, 0x3C, 0x3E)
|
| 82 |
+
+ (0x3F, 0x60, 0x7B, 0x7D)
|
| 83 |
+
+ (0x2F, 0x3A, 0x3B, 0x3D, 0x40, 0x5B, 0x5C, 0x5D, 0x5E, 0x7C)
|
| 84 |
+
]
|
| 85 |
+
)
|
| 86 |
+
# Note... The terminology 'userinfo' percent-encode set in the WHATWG document
|
| 87 |
+
# is used for the username and password quoting. For the joint userinfo component
|
| 88 |
+
# we remove U+003A (:) from the safe set.
|
| 89 |
+
USERINFO_SAFE = "".join(
|
| 90 |
+
[
|
| 91 |
+
chr(i)
|
| 92 |
+
for i in range(0x20, 0x7F)
|
| 93 |
+
if i
|
| 94 |
+
not in (0x20, 0x22, 0x23, 0x3C, 0x3E)
|
| 95 |
+
+ (0x3F, 0x60, 0x7B, 0x7D)
|
| 96 |
+
+ (0x2F, 0x3B, 0x3D, 0x40, 0x5B, 0x5C, 0x5D, 0x5E, 0x7C)
|
| 97 |
+
]
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
# {scheme}: (optional)
|
| 102 |
+
# //{authority} (optional)
|
| 103 |
+
# {path}
|
| 104 |
+
# ?{query} (optional)
|
| 105 |
+
# #{fragment} (optional)
|
| 106 |
+
URL_REGEX = re.compile(
|
| 107 |
+
(
|
| 108 |
+
r"(?:(?P<scheme>{scheme}):)?"
|
| 109 |
+
r"(?://(?P<authority>{authority}))?"
|
| 110 |
+
r"(?P<path>{path})"
|
| 111 |
+
r"(?:\?(?P<query>{query}))?"
|
| 112 |
+
r"(?:#(?P<fragment>{fragment}))?"
|
| 113 |
+
).format(
|
| 114 |
+
scheme="([a-zA-Z][a-zA-Z0-9+.-]*)?",
|
| 115 |
+
authority="[^/?#]*",
|
| 116 |
+
path="[^?#]*",
|
| 117 |
+
query="[^#]*",
|
| 118 |
+
fragment=".*",
|
| 119 |
+
)
|
| 120 |
+
)
|
| 121 |
+
|
| 122 |
+
# {userinfo}@ (optional)
|
| 123 |
+
# {host}
|
| 124 |
+
# :{port} (optional)
|
| 125 |
+
AUTHORITY_REGEX = re.compile(
|
| 126 |
+
(
|
| 127 |
+
r"(?:(?P<userinfo>{userinfo})@)?" r"(?P<host>{host})" r":?(?P<port>{port})?"
|
| 128 |
+
).format(
|
| 129 |
+
userinfo=".*", # Any character sequence.
|
| 130 |
+
host="(\\[.*\\]|[^:@]*)", # Either any character sequence excluding ':' or '@',
|
| 131 |
+
# or an IPv6 address enclosed within square brackets.
|
| 132 |
+
port=".*", # Any character sequence.
|
| 133 |
+
)
|
| 134 |
+
)
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
# If we call urlparse with an individual component, then we need to regex
|
| 138 |
+
# validate that component individually.
|
| 139 |
+
# Note that we're duplicating the same strings as above. Shock! Horror!!
|
| 140 |
+
COMPONENT_REGEX = {
|
| 141 |
+
"scheme": re.compile("([a-zA-Z][a-zA-Z0-9+.-]*)?"),
|
| 142 |
+
"authority": re.compile("[^/?#]*"),
|
| 143 |
+
"path": re.compile("[^?#]*"),
|
| 144 |
+
"query": re.compile("[^#]*"),
|
| 145 |
+
"fragment": re.compile(".*"),
|
| 146 |
+
"userinfo": re.compile("[^@]*"),
|
| 147 |
+
"host": re.compile("(\\[.*\\]|[^:]*)"),
|
| 148 |
+
"port": re.compile(".*"),
|
| 149 |
+
}
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
# We use these simple regexs as a first pass before handing off to
|
| 153 |
+
# the stdlib 'ipaddress' module for IP address validation.
|
| 154 |
+
IPv4_STYLE_HOSTNAME = re.compile(r"^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$")
|
| 155 |
+
IPv6_STYLE_HOSTNAME = re.compile(r"^\[.*\]$")
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
class ParseResult(typing.NamedTuple):
|
| 159 |
+
scheme: str
|
| 160 |
+
userinfo: str
|
| 161 |
+
host: str
|
| 162 |
+
port: int | None
|
| 163 |
+
path: str
|
| 164 |
+
query: str | None
|
| 165 |
+
fragment: str | None
|
| 166 |
+
|
| 167 |
+
@property
|
| 168 |
+
def authority(self) -> str:
|
| 169 |
+
return "".join(
|
| 170 |
+
[
|
| 171 |
+
f"{self.userinfo}@" if self.userinfo else "",
|
| 172 |
+
f"[{self.host}]" if ":" in self.host else self.host,
|
| 173 |
+
f":{self.port}" if self.port is not None else "",
|
| 174 |
+
]
|
| 175 |
+
)
|
| 176 |
+
|
| 177 |
+
@property
|
| 178 |
+
def netloc(self) -> str:
|
| 179 |
+
return "".join(
|
| 180 |
+
[
|
| 181 |
+
f"[{self.host}]" if ":" in self.host else self.host,
|
| 182 |
+
f":{self.port}" if self.port is not None else "",
|
| 183 |
+
]
|
| 184 |
+
)
|
| 185 |
+
|
| 186 |
+
def copy_with(self, **kwargs: str | None) -> ParseResult:
|
| 187 |
+
if not kwargs:
|
| 188 |
+
return self
|
| 189 |
+
|
| 190 |
+
defaults = {
|
| 191 |
+
"scheme": self.scheme,
|
| 192 |
+
"authority": self.authority,
|
| 193 |
+
"path": self.path,
|
| 194 |
+
"query": self.query,
|
| 195 |
+
"fragment": self.fragment,
|
| 196 |
+
}
|
| 197 |
+
defaults.update(kwargs)
|
| 198 |
+
return urlparse("", **defaults)
|
| 199 |
+
|
| 200 |
+
def __str__(self) -> str:
|
| 201 |
+
authority = self.authority
|
| 202 |
+
return "".join(
|
| 203 |
+
[
|
| 204 |
+
f"{self.scheme}:" if self.scheme else "",
|
| 205 |
+
f"//{authority}" if authority else "",
|
| 206 |
+
self.path,
|
| 207 |
+
f"?{self.query}" if self.query is not None else "",
|
| 208 |
+
f"#{self.fragment}" if self.fragment is not None else "",
|
| 209 |
+
]
|
| 210 |
+
)
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
def urlparse(url: str = "", **kwargs: str | None) -> ParseResult:
|
| 214 |
+
# Initial basic checks on allowable URLs.
|
| 215 |
+
# ---------------------------------------
|
| 216 |
+
|
| 217 |
+
# Hard limit the maximum allowable URL length.
|
| 218 |
+
if len(url) > MAX_URL_LENGTH:
|
| 219 |
+
raise InvalidURL("URL too long")
|
| 220 |
+
|
| 221 |
+
# If a URL includes any ASCII control characters including \t, \r, \n,
|
| 222 |
+
# then treat it as invalid.
|
| 223 |
+
if any(char.isascii() and not char.isprintable() for char in url):
|
| 224 |
+
char = next(char for char in url if char.isascii() and not char.isprintable())
|
| 225 |
+
idx = url.find(char)
|
| 226 |
+
error = (
|
| 227 |
+
f"Invalid non-printable ASCII character in URL, {char!r} at position {idx}."
|
| 228 |
+
)
|
| 229 |
+
raise InvalidURL(error)
|
| 230 |
+
|
| 231 |
+
# Some keyword arguments require special handling.
|
| 232 |
+
# ------------------------------------------------
|
| 233 |
+
|
| 234 |
+
# Coerce "port" to a string, if it is provided as an integer.
|
| 235 |
+
if "port" in kwargs:
|
| 236 |
+
port = kwargs["port"]
|
| 237 |
+
kwargs["port"] = str(port) if isinstance(port, int) else port
|
| 238 |
+
|
| 239 |
+
# Replace "netloc" with "host and "port".
|
| 240 |
+
if "netloc" in kwargs:
|
| 241 |
+
netloc = kwargs.pop("netloc") or ""
|
| 242 |
+
kwargs["host"], _, kwargs["port"] = netloc.partition(":")
|
| 243 |
+
|
| 244 |
+
# Replace "username" and/or "password" with "userinfo".
|
| 245 |
+
if "username" in kwargs or "password" in kwargs:
|
| 246 |
+
username = quote(kwargs.pop("username", "") or "", safe=USERNAME_SAFE)
|
| 247 |
+
password = quote(kwargs.pop("password", "") or "", safe=PASSWORD_SAFE)
|
| 248 |
+
kwargs["userinfo"] = f"{username}:{password}" if password else username
|
| 249 |
+
|
| 250 |
+
# Replace "raw_path" with "path" and "query".
|
| 251 |
+
if "raw_path" in kwargs:
|
| 252 |
+
raw_path = kwargs.pop("raw_path") or ""
|
| 253 |
+
kwargs["path"], seperator, kwargs["query"] = raw_path.partition("?")
|
| 254 |
+
if not seperator:
|
| 255 |
+
kwargs["query"] = None
|
| 256 |
+
|
| 257 |
+
# Ensure that IPv6 "host" addresses are always escaped with "[...]".
|
| 258 |
+
if "host" in kwargs:
|
| 259 |
+
host = kwargs.get("host") or ""
|
| 260 |
+
if ":" in host and not (host.startswith("[") and host.endswith("]")):
|
| 261 |
+
kwargs["host"] = f"[{host}]"
|
| 262 |
+
|
| 263 |
+
# If any keyword arguments are provided, ensure they are valid.
|
| 264 |
+
# -------------------------------------------------------------
|
| 265 |
+
|
| 266 |
+
for key, value in kwargs.items():
|
| 267 |
+
if value is not None:
|
| 268 |
+
if len(value) > MAX_URL_LENGTH:
|
| 269 |
+
raise InvalidURL(f"URL component '{key}' too long")
|
| 270 |
+
|
| 271 |
+
# If a component includes any ASCII control characters including \t, \r, \n,
|
| 272 |
+
# then treat it as invalid.
|
| 273 |
+
if any(char.isascii() and not char.isprintable() for char in value):
|
| 274 |
+
char = next(
|
| 275 |
+
char for char in value if char.isascii() and not char.isprintable()
|
| 276 |
+
)
|
| 277 |
+
idx = value.find(char)
|
| 278 |
+
error = (
|
| 279 |
+
f"Invalid non-printable ASCII character in URL {key} component, "
|
| 280 |
+
f"{char!r} at position {idx}."
|
| 281 |
+
)
|
| 282 |
+
raise InvalidURL(error)
|
| 283 |
+
|
| 284 |
+
# Ensure that keyword arguments match as a valid regex.
|
| 285 |
+
if not COMPONENT_REGEX[key].fullmatch(value):
|
| 286 |
+
raise InvalidURL(f"Invalid URL component '{key}'")
|
| 287 |
+
|
| 288 |
+
# The URL_REGEX will always match, but may have empty components.
|
| 289 |
+
url_match = URL_REGEX.match(url)
|
| 290 |
+
assert url_match is not None
|
| 291 |
+
url_dict = url_match.groupdict()
|
| 292 |
+
|
| 293 |
+
# * 'scheme', 'authority', and 'path' may be empty strings.
|
| 294 |
+
# * 'query' may be 'None', indicating no trailing "?" portion.
|
| 295 |
+
# Any string including the empty string, indicates a trailing "?".
|
| 296 |
+
# * 'fragment' may be 'None', indicating no trailing "#" portion.
|
| 297 |
+
# Any string including the empty string, indicates a trailing "#".
|
| 298 |
+
scheme = kwargs.get("scheme", url_dict["scheme"]) or ""
|
| 299 |
+
authority = kwargs.get("authority", url_dict["authority"]) or ""
|
| 300 |
+
path = kwargs.get("path", url_dict["path"]) or ""
|
| 301 |
+
query = kwargs.get("query", url_dict["query"])
|
| 302 |
+
frag = kwargs.get("fragment", url_dict["fragment"])
|
| 303 |
+
|
| 304 |
+
# The AUTHORITY_REGEX will always match, but may have empty components.
|
| 305 |
+
authority_match = AUTHORITY_REGEX.match(authority)
|
| 306 |
+
assert authority_match is not None
|
| 307 |
+
authority_dict = authority_match.groupdict()
|
| 308 |
+
|
| 309 |
+
# * 'userinfo' and 'host' may be empty strings.
|
| 310 |
+
# * 'port' may be 'None'.
|
| 311 |
+
userinfo = kwargs.get("userinfo", authority_dict["userinfo"]) or ""
|
| 312 |
+
host = kwargs.get("host", authority_dict["host"]) or ""
|
| 313 |
+
port = kwargs.get("port", authority_dict["port"])
|
| 314 |
+
|
| 315 |
+
# Normalize and validate each component.
|
| 316 |
+
# We end up with a parsed representation of the URL,
|
| 317 |
+
# with components that are plain ASCII bytestrings.
|
| 318 |
+
parsed_scheme: str = scheme.lower()
|
| 319 |
+
parsed_userinfo: str = quote(userinfo, safe=USERINFO_SAFE)
|
| 320 |
+
parsed_host: str = encode_host(host)
|
| 321 |
+
parsed_port: int | None = normalize_port(port, scheme)
|
| 322 |
+
|
| 323 |
+
has_scheme = parsed_scheme != ""
|
| 324 |
+
has_authority = (
|
| 325 |
+
parsed_userinfo != "" or parsed_host != "" or parsed_port is not None
|
| 326 |
+
)
|
| 327 |
+
validate_path(path, has_scheme=has_scheme, has_authority=has_authority)
|
| 328 |
+
if has_scheme or has_authority:
|
| 329 |
+
path = normalize_path(path)
|
| 330 |
+
|
| 331 |
+
parsed_path: str = quote(path, safe=PATH_SAFE)
|
| 332 |
+
parsed_query: str | None = None if query is None else quote(query, safe=QUERY_SAFE)
|
| 333 |
+
parsed_frag: str | None = None if frag is None else quote(frag, safe=FRAG_SAFE)
|
| 334 |
+
|
| 335 |
+
# The parsed ASCII bytestrings are our canonical form.
|
| 336 |
+
# All properties of the URL are derived from these.
|
| 337 |
+
return ParseResult(
|
| 338 |
+
parsed_scheme,
|
| 339 |
+
parsed_userinfo,
|
| 340 |
+
parsed_host,
|
| 341 |
+
parsed_port,
|
| 342 |
+
parsed_path,
|
| 343 |
+
parsed_query,
|
| 344 |
+
parsed_frag,
|
| 345 |
+
)
|
| 346 |
+
|
| 347 |
+
|
| 348 |
+
def encode_host(host: str) -> str:
|
| 349 |
+
if not host:
|
| 350 |
+
return ""
|
| 351 |
+
|
| 352 |
+
elif IPv4_STYLE_HOSTNAME.match(host):
|
| 353 |
+
# Validate IPv4 hostnames like #.#.#.#
|
| 354 |
+
#
|
| 355 |
+
# From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2
|
| 356 |
+
#
|
| 357 |
+
# IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet
|
| 358 |
+
try:
|
| 359 |
+
ipaddress.IPv4Address(host)
|
| 360 |
+
except ipaddress.AddressValueError:
|
| 361 |
+
raise InvalidURL(f"Invalid IPv4 address: {host!r}")
|
| 362 |
+
return host
|
| 363 |
+
|
| 364 |
+
elif IPv6_STYLE_HOSTNAME.match(host):
|
| 365 |
+
# Validate IPv6 hostnames like [...]
|
| 366 |
+
#
|
| 367 |
+
# From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2
|
| 368 |
+
#
|
| 369 |
+
# "A host identified by an Internet Protocol literal address, version 6
|
| 370 |
+
# [RFC3513] or later, is distinguished by enclosing the IP literal
|
| 371 |
+
# within square brackets ("[" and "]"). This is the only place where
|
| 372 |
+
# square bracket characters are allowed in the URI syntax."
|
| 373 |
+
try:
|
| 374 |
+
ipaddress.IPv6Address(host[1:-1])
|
| 375 |
+
except ipaddress.AddressValueError:
|
| 376 |
+
raise InvalidURL(f"Invalid IPv6 address: {host!r}")
|
| 377 |
+
return host[1:-1]
|
| 378 |
+
|
| 379 |
+
elif host.isascii():
|
| 380 |
+
# Regular ASCII hostnames
|
| 381 |
+
#
|
| 382 |
+
# From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2
|
| 383 |
+
#
|
| 384 |
+
# reg-name = *( unreserved / pct-encoded / sub-delims )
|
| 385 |
+
WHATWG_SAFE = '"`{}%|\\'
|
| 386 |
+
return quote(host.lower(), safe=SUB_DELIMS + WHATWG_SAFE)
|
| 387 |
+
|
| 388 |
+
# IDNA hostnames
|
| 389 |
+
try:
|
| 390 |
+
return idna.encode(host.lower()).decode("ascii")
|
| 391 |
+
except idna.IDNAError:
|
| 392 |
+
raise InvalidURL(f"Invalid IDNA hostname: {host!r}")
|
| 393 |
+
|
| 394 |
+
|
| 395 |
+
def normalize_port(port: str | int | None, scheme: str) -> int | None:
|
| 396 |
+
# From https://tools.ietf.org/html/rfc3986#section-3.2.3
|
| 397 |
+
#
|
| 398 |
+
# "A scheme may define a default port. For example, the "http" scheme
|
| 399 |
+
# defines a default port of "80", corresponding to its reserved TCP
|
| 400 |
+
# port number. The type of port designated by the port number (e.g.,
|
| 401 |
+
# TCP, UDP, SCTP) is defined by the URI scheme. URI producers and
|
| 402 |
+
# normalizers should omit the port component and its ":" delimiter if
|
| 403 |
+
# port is empty or if its value would be the same as that of the
|
| 404 |
+
# scheme's default."
|
| 405 |
+
if port is None or port == "":
|
| 406 |
+
return None
|
| 407 |
+
|
| 408 |
+
try:
|
| 409 |
+
port_as_int = int(port)
|
| 410 |
+
except ValueError:
|
| 411 |
+
raise InvalidURL(f"Invalid port: {port!r}")
|
| 412 |
+
|
| 413 |
+
# See https://url.spec.whatwg.org/#url-miscellaneous
|
| 414 |
+
default_port = {"ftp": 21, "http": 80, "https": 443, "ws": 80, "wss": 443}.get(
|
| 415 |
+
scheme
|
| 416 |
+
)
|
| 417 |
+
if port_as_int == default_port:
|
| 418 |
+
return None
|
| 419 |
+
return port_as_int
|
| 420 |
+
|
| 421 |
+
|
| 422 |
+
def validate_path(path: str, has_scheme: bool, has_authority: bool) -> None:
|
| 423 |
+
"""
|
| 424 |
+
Path validation rules that depend on if the URL contains
|
| 425 |
+
a scheme or authority component.
|
| 426 |
+
|
| 427 |
+
See https://datatracker.ietf.org/doc/html/rfc3986.html#section-3.3
|
| 428 |
+
"""
|
| 429 |
+
if has_authority:
|
| 430 |
+
# If a URI contains an authority component, then the path component
|
| 431 |
+
# must either be empty or begin with a slash ("/") character."
|
| 432 |
+
if path and not path.startswith("/"):
|
| 433 |
+
raise InvalidURL("For absolute URLs, path must be empty or begin with '/'")
|
| 434 |
+
|
| 435 |
+
if not has_scheme and not has_authority:
|
| 436 |
+
# If a URI does not contain an authority component, then the path cannot begin
|
| 437 |
+
# with two slash characters ("//").
|
| 438 |
+
if path.startswith("//"):
|
| 439 |
+
raise InvalidURL("Relative URLs cannot have a path starting with '//'")
|
| 440 |
+
|
| 441 |
+
# In addition, a URI reference (Section 4.1) may be a relative-path reference,
|
| 442 |
+
# in which case the first path segment cannot contain a colon (":") character.
|
| 443 |
+
if path.startswith(":"):
|
| 444 |
+
raise InvalidURL("Relative URLs cannot have a path starting with ':'")
|
| 445 |
+
|
| 446 |
+
|
| 447 |
+
def normalize_path(path: str) -> str:
|
| 448 |
+
"""
|
| 449 |
+
Drop "." and ".." segments from a URL path.
|
| 450 |
+
|
| 451 |
+
For example:
|
| 452 |
+
|
| 453 |
+
normalize_path("/path/./to/somewhere/..") == "/path/to"
|
| 454 |
+
"""
|
| 455 |
+
# Fast return when no '.' characters in the path.
|
| 456 |
+
if "." not in path:
|
| 457 |
+
return path
|
| 458 |
+
|
| 459 |
+
components = path.split("/")
|
| 460 |
+
|
| 461 |
+
# Fast return when no '.' or '..' components in the path.
|
| 462 |
+
if "." not in components and ".." not in components:
|
| 463 |
+
return path
|
| 464 |
+
|
| 465 |
+
# https://datatracker.ietf.org/doc/html/rfc3986#section-5.2.4
|
| 466 |
+
output: list[str] = []
|
| 467 |
+
for component in components:
|
| 468 |
+
if component == ".":
|
| 469 |
+
pass
|
| 470 |
+
elif component == "..":
|
| 471 |
+
if output and output != [""]:
|
| 472 |
+
output.pop()
|
| 473 |
+
else:
|
| 474 |
+
output.append(component)
|
| 475 |
+
return "/".join(output)
|
| 476 |
+
|
| 477 |
+
|
| 478 |
+
def PERCENT(string: str) -> str:
|
| 479 |
+
return "".join([f"%{byte:02X}" for byte in string.encode("utf-8")])
|
| 480 |
+
|
| 481 |
+
|
| 482 |
+
def percent_encoded(string: str, safe: str) -> str:
|
| 483 |
+
"""
|
| 484 |
+
Use percent-encoding to quote a string.
|
| 485 |
+
"""
|
| 486 |
+
NON_ESCAPED_CHARS = UNRESERVED_CHARACTERS + safe
|
| 487 |
+
|
| 488 |
+
# Fast path for strings that don't need escaping.
|
| 489 |
+
if not string.rstrip(NON_ESCAPED_CHARS):
|
| 490 |
+
return string
|
| 491 |
+
|
| 492 |
+
return "".join(
|
| 493 |
+
[char if char in NON_ESCAPED_CHARS else PERCENT(char) for char in string]
|
| 494 |
+
)
|
| 495 |
+
|
| 496 |
+
|
| 497 |
+
def quote(string: str, safe: str) -> str:
|
| 498 |
+
"""
|
| 499 |
+
Use percent-encoding to quote a string, omitting existing '%xx' escape sequences.
|
| 500 |
+
|
| 501 |
+
See: https://www.rfc-editor.org/rfc/rfc3986#section-2.1
|
| 502 |
+
|
| 503 |
+
* `string`: The string to be percent-escaped.
|
| 504 |
+
* `safe`: A string containing characters that may be treated as safe, and do not
|
| 505 |
+
need to be escaped. Unreserved characters are always treated as safe.
|
| 506 |
+
See: https://www.rfc-editor.org/rfc/rfc3986#section-2.3
|
| 507 |
+
"""
|
| 508 |
+
parts = []
|
| 509 |
+
current_position = 0
|
| 510 |
+
for match in re.finditer(PERCENT_ENCODED_REGEX, string):
|
| 511 |
+
start_position, end_position = match.start(), match.end()
|
| 512 |
+
matched_text = match.group(0)
|
| 513 |
+
# Add any text up to the '%xx' escape sequence.
|
| 514 |
+
if start_position != current_position:
|
| 515 |
+
leading_text = string[current_position:start_position]
|
| 516 |
+
parts.append(percent_encoded(leading_text, safe=safe))
|
| 517 |
+
|
| 518 |
+
# Add the '%xx' escape sequence.
|
| 519 |
+
parts.append(matched_text)
|
| 520 |
+
current_position = end_position
|
| 521 |
+
|
| 522 |
+
# Add any text after the final '%xx' escape sequence.
|
| 523 |
+
if current_position != len(string):
|
| 524 |
+
trailing_text = string[current_position:]
|
| 525 |
+
parts.append(percent_encoded(trailing_text, safe=safe))
|
| 526 |
+
|
| 527 |
+
return "".join(parts)
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (1.81 kB). View file
|
|
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/__pycache__/cache.cpython-310.pyc
ADDED
|
Binary file (4 kB). View file
|
|
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/__pycache__/common.cpython-310.pyc
ADDED
|
Binary file (1.04 kB). View file
|
|
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/__pycache__/file_io.cpython-310.pyc
ADDED
|
Binary file (3.93 kB). View file
|
|
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/__pycache__/parser_utils.cpython-310.pyc
ADDED
|
Binary file (9.2 kB). View file
|
|
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/__pycache__/settings.cpython-310.pyc
ADDED
|
Binary file (1.82 kB). View file
|
|
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/__pycache__/utils.cpython-310.pyc
ADDED
|
Binary file (4.56 kB). View file
|
|
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/api/classes.py
ADDED
|
@@ -0,0 +1,895 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
There are a couple of classes documented in here:
|
| 3 |
+
|
| 4 |
+
- :class:`.BaseName` as an abstact base class for almost everything.
|
| 5 |
+
- :class:`.Name` used in a lot of places
|
| 6 |
+
- :class:`.Completion` for completions
|
| 7 |
+
- :class:`.BaseSignature` as a base class for signatures
|
| 8 |
+
- :class:`.Signature` for :meth:`.Script.get_signatures` only
|
| 9 |
+
- :class:`.ParamName` used for parameters of signatures
|
| 10 |
+
- :class:`.Refactoring` for refactorings
|
| 11 |
+
- :class:`.SyntaxError` for :meth:`.Script.get_syntax_errors` only
|
| 12 |
+
|
| 13 |
+
These classes are the much biggest part of the API, because they contain
|
| 14 |
+
the interesting information about all operations.
|
| 15 |
+
"""
|
| 16 |
+
import re
|
| 17 |
+
from pathlib import Path
|
| 18 |
+
from typing import Optional
|
| 19 |
+
|
| 20 |
+
from parso.tree import search_ancestor
|
| 21 |
+
|
| 22 |
+
from jedi import settings
|
| 23 |
+
from jedi import debug
|
| 24 |
+
from jedi.inference.utils import unite
|
| 25 |
+
from jedi.cache import memoize_method
|
| 26 |
+
from jedi.inference.compiled.mixed import MixedName
|
| 27 |
+
from jedi.inference.names import ImportName, SubModuleName
|
| 28 |
+
from jedi.inference.gradual.stub_value import StubModuleValue
|
| 29 |
+
from jedi.inference.gradual.conversion import convert_names, convert_values
|
| 30 |
+
from jedi.inference.base_value import ValueSet, HasNoContext
|
| 31 |
+
from jedi.api.keywords import KeywordName
|
| 32 |
+
from jedi.api import completion_cache
|
| 33 |
+
from jedi.api.helpers import filter_follow_imports
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def _sort_names_by_start_pos(names):
|
| 37 |
+
return sorted(names, key=lambda s: s.start_pos or (0, 0))
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def defined_names(inference_state, value):
|
| 41 |
+
"""
|
| 42 |
+
List sub-definitions (e.g., methods in class).
|
| 43 |
+
|
| 44 |
+
:type scope: Scope
|
| 45 |
+
:rtype: list of Name
|
| 46 |
+
"""
|
| 47 |
+
try:
|
| 48 |
+
context = value.as_context()
|
| 49 |
+
except HasNoContext:
|
| 50 |
+
return []
|
| 51 |
+
filter = next(context.get_filters())
|
| 52 |
+
names = [name for name in filter.values()]
|
| 53 |
+
return [Name(inference_state, n) for n in _sort_names_by_start_pos(names)]
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def _values_to_definitions(values):
|
| 57 |
+
return [Name(c.inference_state, c.name) for c in values]
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class BaseName:
|
| 61 |
+
"""
|
| 62 |
+
The base class for all definitions, completions and signatures.
|
| 63 |
+
"""
|
| 64 |
+
_mapping = {
|
| 65 |
+
'posixpath': 'os.path',
|
| 66 |
+
'riscospath': 'os.path',
|
| 67 |
+
'ntpath': 'os.path',
|
| 68 |
+
'os2emxpath': 'os.path',
|
| 69 |
+
'macpath': 'os.path',
|
| 70 |
+
'genericpath': 'os.path',
|
| 71 |
+
'posix': 'os',
|
| 72 |
+
'_io': 'io',
|
| 73 |
+
'_functools': 'functools',
|
| 74 |
+
'_collections': 'collections',
|
| 75 |
+
'_socket': 'socket',
|
| 76 |
+
'_sqlite3': 'sqlite3',
|
| 77 |
+
}
|
| 78 |
+
|
| 79 |
+
_tuple_mapping = dict((tuple(k.split('.')), v) for (k, v) in {
|
| 80 |
+
'argparse._ActionsContainer': 'argparse.ArgumentParser',
|
| 81 |
+
}.items())
|
| 82 |
+
|
| 83 |
+
def __init__(self, inference_state, name):
|
| 84 |
+
self._inference_state = inference_state
|
| 85 |
+
self._name = name
|
| 86 |
+
"""
|
| 87 |
+
An instance of :class:`parso.python.tree.Name` subclass.
|
| 88 |
+
"""
|
| 89 |
+
self.is_keyword = isinstance(self._name, KeywordName)
|
| 90 |
+
|
| 91 |
+
@memoize_method
|
| 92 |
+
def _get_module_context(self):
|
| 93 |
+
# This can take a while to complete, because in the worst case of
|
| 94 |
+
# imports (consider `import a` completions), we need to load all
|
| 95 |
+
# modules starting with a first.
|
| 96 |
+
return self._name.get_root_context()
|
| 97 |
+
|
| 98 |
+
@property
|
| 99 |
+
def module_path(self) -> Optional[Path]:
|
| 100 |
+
"""
|
| 101 |
+
Shows the file path of a module. e.g. ``/usr/lib/python3.9/os.py``
|
| 102 |
+
"""
|
| 103 |
+
module = self._get_module_context()
|
| 104 |
+
if module.is_stub() or not module.is_compiled():
|
| 105 |
+
# Compiled modules should not return a module path even if they
|
| 106 |
+
# have one.
|
| 107 |
+
path: Optional[Path] = self._get_module_context().py__file__()
|
| 108 |
+
return path
|
| 109 |
+
|
| 110 |
+
return None
|
| 111 |
+
|
| 112 |
+
@property
|
| 113 |
+
def name(self):
|
| 114 |
+
"""
|
| 115 |
+
Name of variable/function/class/module.
|
| 116 |
+
|
| 117 |
+
For example, for ``x = None`` it returns ``'x'``.
|
| 118 |
+
|
| 119 |
+
:rtype: str or None
|
| 120 |
+
"""
|
| 121 |
+
return self._name.get_public_name()
|
| 122 |
+
|
| 123 |
+
@property
|
| 124 |
+
def type(self):
|
| 125 |
+
"""
|
| 126 |
+
The type of the definition.
|
| 127 |
+
|
| 128 |
+
Here is an example of the value of this attribute. Let's consider
|
| 129 |
+
the following source. As what is in ``variable`` is unambiguous
|
| 130 |
+
to Jedi, :meth:`jedi.Script.infer` should return a list of
|
| 131 |
+
definition for ``sys``, ``f``, ``C`` and ``x``.
|
| 132 |
+
|
| 133 |
+
>>> from jedi import Script
|
| 134 |
+
>>> source = '''
|
| 135 |
+
... import keyword
|
| 136 |
+
...
|
| 137 |
+
... class C:
|
| 138 |
+
... pass
|
| 139 |
+
...
|
| 140 |
+
... class D:
|
| 141 |
+
... pass
|
| 142 |
+
...
|
| 143 |
+
... x = D()
|
| 144 |
+
...
|
| 145 |
+
... def f():
|
| 146 |
+
... pass
|
| 147 |
+
...
|
| 148 |
+
... for variable in [keyword, f, C, x]:
|
| 149 |
+
... variable'''
|
| 150 |
+
|
| 151 |
+
>>> script = Script(source)
|
| 152 |
+
>>> defs = script.infer()
|
| 153 |
+
|
| 154 |
+
Before showing what is in ``defs``, let's sort it by :attr:`line`
|
| 155 |
+
so that it is easy to relate the result to the source code.
|
| 156 |
+
|
| 157 |
+
>>> defs = sorted(defs, key=lambda d: d.line)
|
| 158 |
+
>>> print(defs) # doctest: +NORMALIZE_WHITESPACE
|
| 159 |
+
[<Name full_name='keyword', description='module keyword'>,
|
| 160 |
+
<Name full_name='__main__.C', description='class C'>,
|
| 161 |
+
<Name full_name='__main__.D', description='instance D'>,
|
| 162 |
+
<Name full_name='__main__.f', description='def f'>]
|
| 163 |
+
|
| 164 |
+
Finally, here is what you can get from :attr:`type`:
|
| 165 |
+
|
| 166 |
+
>>> defs = [d.type for d in defs]
|
| 167 |
+
>>> defs[0]
|
| 168 |
+
'module'
|
| 169 |
+
>>> defs[1]
|
| 170 |
+
'class'
|
| 171 |
+
>>> defs[2]
|
| 172 |
+
'instance'
|
| 173 |
+
>>> defs[3]
|
| 174 |
+
'function'
|
| 175 |
+
|
| 176 |
+
Valid values for type are ``module``, ``class``, ``instance``, ``function``,
|
| 177 |
+
``param``, ``path``, ``keyword``, ``property`` and ``statement``.
|
| 178 |
+
|
| 179 |
+
"""
|
| 180 |
+
tree_name = self._name.tree_name
|
| 181 |
+
resolve = False
|
| 182 |
+
if tree_name is not None:
|
| 183 |
+
# TODO move this to their respective names.
|
| 184 |
+
definition = tree_name.get_definition()
|
| 185 |
+
if definition is not None and definition.type == 'import_from' and \
|
| 186 |
+
tree_name.is_definition():
|
| 187 |
+
resolve = True
|
| 188 |
+
|
| 189 |
+
if isinstance(self._name, SubModuleName) or resolve:
|
| 190 |
+
for value in self._name.infer():
|
| 191 |
+
return value.api_type
|
| 192 |
+
return self._name.api_type
|
| 193 |
+
|
| 194 |
+
@property
|
| 195 |
+
def module_name(self):
|
| 196 |
+
"""
|
| 197 |
+
The module name, a bit similar to what ``__name__`` is in a random
|
| 198 |
+
Python module.
|
| 199 |
+
|
| 200 |
+
>>> from jedi import Script
|
| 201 |
+
>>> source = 'import json'
|
| 202 |
+
>>> script = Script(source, path='example.py')
|
| 203 |
+
>>> d = script.infer()[0]
|
| 204 |
+
>>> print(d.module_name) # doctest: +ELLIPSIS
|
| 205 |
+
json
|
| 206 |
+
"""
|
| 207 |
+
return self._get_module_context().py__name__()
|
| 208 |
+
|
| 209 |
+
def in_builtin_module(self):
|
| 210 |
+
"""
|
| 211 |
+
Returns True, if this is a builtin module.
|
| 212 |
+
"""
|
| 213 |
+
value = self._get_module_context().get_value()
|
| 214 |
+
if isinstance(value, StubModuleValue):
|
| 215 |
+
return any(v.is_compiled() for v in value.non_stub_value_set)
|
| 216 |
+
return value.is_compiled()
|
| 217 |
+
|
| 218 |
+
@property
|
| 219 |
+
def line(self):
|
| 220 |
+
"""The line where the definition occurs (starting with 1)."""
|
| 221 |
+
start_pos = self._name.start_pos
|
| 222 |
+
if start_pos is None:
|
| 223 |
+
return None
|
| 224 |
+
return start_pos[0]
|
| 225 |
+
|
| 226 |
+
@property
|
| 227 |
+
def column(self):
|
| 228 |
+
"""The column where the definition occurs (starting with 0)."""
|
| 229 |
+
start_pos = self._name.start_pos
|
| 230 |
+
if start_pos is None:
|
| 231 |
+
return None
|
| 232 |
+
return start_pos[1]
|
| 233 |
+
|
| 234 |
+
def get_definition_start_position(self):
|
| 235 |
+
"""
|
| 236 |
+
The (row, column) of the start of the definition range. Rows start with
|
| 237 |
+
1, columns start with 0.
|
| 238 |
+
|
| 239 |
+
:rtype: Optional[Tuple[int, int]]
|
| 240 |
+
"""
|
| 241 |
+
if self._name.tree_name is None:
|
| 242 |
+
return None
|
| 243 |
+
definition = self._name.tree_name.get_definition()
|
| 244 |
+
if definition is None:
|
| 245 |
+
return self._name.start_pos
|
| 246 |
+
return definition.start_pos
|
| 247 |
+
|
| 248 |
+
def get_definition_end_position(self):
|
| 249 |
+
"""
|
| 250 |
+
The (row, column) of the end of the definition range. Rows start with
|
| 251 |
+
1, columns start with 0.
|
| 252 |
+
|
| 253 |
+
:rtype: Optional[Tuple[int, int]]
|
| 254 |
+
"""
|
| 255 |
+
if self._name.tree_name is None:
|
| 256 |
+
return None
|
| 257 |
+
definition = self._name.tree_name.get_definition()
|
| 258 |
+
if definition is None:
|
| 259 |
+
return self._name.tree_name.end_pos
|
| 260 |
+
if self.type in ("function", "class"):
|
| 261 |
+
last_leaf = definition.get_last_leaf()
|
| 262 |
+
if last_leaf.type == "newline":
|
| 263 |
+
return last_leaf.get_previous_leaf().end_pos
|
| 264 |
+
return last_leaf.end_pos
|
| 265 |
+
return definition.end_pos
|
| 266 |
+
|
| 267 |
+
def docstring(self, raw=False, fast=True):
|
| 268 |
+
r"""
|
| 269 |
+
Return a document string for this completion object.
|
| 270 |
+
|
| 271 |
+
Example:
|
| 272 |
+
|
| 273 |
+
>>> from jedi import Script
|
| 274 |
+
>>> source = '''\
|
| 275 |
+
... def f(a, b=1):
|
| 276 |
+
... "Document for function f."
|
| 277 |
+
... '''
|
| 278 |
+
>>> script = Script(source, path='example.py')
|
| 279 |
+
>>> doc = script.infer(1, len('def f'))[0].docstring()
|
| 280 |
+
>>> print(doc)
|
| 281 |
+
f(a, b=1)
|
| 282 |
+
<BLANKLINE>
|
| 283 |
+
Document for function f.
|
| 284 |
+
|
| 285 |
+
Notice that useful extra information is added to the actual
|
| 286 |
+
docstring, e.g. function signatures are prepended to their docstrings.
|
| 287 |
+
If you need the actual docstring, use ``raw=True`` instead.
|
| 288 |
+
|
| 289 |
+
>>> print(script.infer(1, len('def f'))[0].docstring(raw=True))
|
| 290 |
+
Document for function f.
|
| 291 |
+
|
| 292 |
+
:param fast: Don't follow imports that are only one level deep like
|
| 293 |
+
``import foo``, but follow ``from foo import bar``. This makes
|
| 294 |
+
sense for speed reasons. Completing `import a` is slow if you use
|
| 295 |
+
the ``foo.docstring(fast=False)`` on every object, because it
|
| 296 |
+
parses all libraries starting with ``a``.
|
| 297 |
+
"""
|
| 298 |
+
if isinstance(self._name, ImportName) and fast:
|
| 299 |
+
return ''
|
| 300 |
+
doc = self._get_docstring()
|
| 301 |
+
if raw:
|
| 302 |
+
return doc
|
| 303 |
+
|
| 304 |
+
signature_text = self._get_docstring_signature()
|
| 305 |
+
if signature_text and doc:
|
| 306 |
+
return signature_text + '\n\n' + doc
|
| 307 |
+
else:
|
| 308 |
+
return signature_text + doc
|
| 309 |
+
|
| 310 |
+
def _get_docstring(self):
|
| 311 |
+
return self._name.py__doc__()
|
| 312 |
+
|
| 313 |
+
def _get_docstring_signature(self):
|
| 314 |
+
return '\n'.join(
|
| 315 |
+
signature.to_string()
|
| 316 |
+
for signature in self._get_signatures(for_docstring=True)
|
| 317 |
+
)
|
| 318 |
+
|
| 319 |
+
@property
|
| 320 |
+
def description(self):
|
| 321 |
+
"""
|
| 322 |
+
A description of the :class:`.Name` object, which is heavily used
|
| 323 |
+
in testing. e.g. for ``isinstance`` it returns ``def isinstance``.
|
| 324 |
+
|
| 325 |
+
Example:
|
| 326 |
+
|
| 327 |
+
>>> from jedi import Script
|
| 328 |
+
>>> source = '''
|
| 329 |
+
... def f():
|
| 330 |
+
... pass
|
| 331 |
+
...
|
| 332 |
+
... class C:
|
| 333 |
+
... pass
|
| 334 |
+
...
|
| 335 |
+
... variable = f if random.choice([0,1]) else C'''
|
| 336 |
+
>>> script = Script(source) # line is maximum by default
|
| 337 |
+
>>> defs = script.infer(column=3)
|
| 338 |
+
>>> defs = sorted(defs, key=lambda d: d.line)
|
| 339 |
+
>>> print(defs) # doctest: +NORMALIZE_WHITESPACE
|
| 340 |
+
[<Name full_name='__main__.f', description='def f'>,
|
| 341 |
+
<Name full_name='__main__.C', description='class C'>]
|
| 342 |
+
>>> str(defs[0].description)
|
| 343 |
+
'def f'
|
| 344 |
+
>>> str(defs[1].description)
|
| 345 |
+
'class C'
|
| 346 |
+
|
| 347 |
+
"""
|
| 348 |
+
typ = self.type
|
| 349 |
+
tree_name = self._name.tree_name
|
| 350 |
+
if typ == 'param':
|
| 351 |
+
return typ + ' ' + self._name.to_string()
|
| 352 |
+
if typ in ('function', 'class', 'module', 'instance') or tree_name is None:
|
| 353 |
+
if typ == 'function':
|
| 354 |
+
# For the description we want a short and a pythonic way.
|
| 355 |
+
typ = 'def'
|
| 356 |
+
return typ + ' ' + self._name.get_public_name()
|
| 357 |
+
|
| 358 |
+
definition = tree_name.get_definition(include_setitem=True) or tree_name
|
| 359 |
+
# Remove the prefix, because that's not what we want for get_code
|
| 360 |
+
# here.
|
| 361 |
+
txt = definition.get_code(include_prefix=False)
|
| 362 |
+
# Delete comments:
|
| 363 |
+
txt = re.sub(r'#[^\n]+\n', ' ', txt)
|
| 364 |
+
# Delete multi spaces/newlines
|
| 365 |
+
txt = re.sub(r'\s+', ' ', txt).strip()
|
| 366 |
+
return txt
|
| 367 |
+
|
| 368 |
+
@property
|
| 369 |
+
def full_name(self):
|
| 370 |
+
"""
|
| 371 |
+
Dot-separated path of this object.
|
| 372 |
+
|
| 373 |
+
It is in the form of ``<module>[.<submodule>[...]][.<object>]``.
|
| 374 |
+
It is useful when you want to look up Python manual of the
|
| 375 |
+
object at hand.
|
| 376 |
+
|
| 377 |
+
Example:
|
| 378 |
+
|
| 379 |
+
>>> from jedi import Script
|
| 380 |
+
>>> source = '''
|
| 381 |
+
... import os
|
| 382 |
+
... os.path.join'''
|
| 383 |
+
>>> script = Script(source, path='example.py')
|
| 384 |
+
>>> print(script.infer(3, len('os.path.join'))[0].full_name)
|
| 385 |
+
os.path.join
|
| 386 |
+
|
| 387 |
+
Notice that it returns ``'os.path.join'`` instead of (for example)
|
| 388 |
+
``'posixpath.join'``. This is not correct, since the modules name would
|
| 389 |
+
be ``<module 'posixpath' ...>```. However most users find the latter
|
| 390 |
+
more practical.
|
| 391 |
+
"""
|
| 392 |
+
if not self._name.is_value_name:
|
| 393 |
+
return None
|
| 394 |
+
|
| 395 |
+
names = self._name.get_qualified_names(include_module_names=True)
|
| 396 |
+
if names is None:
|
| 397 |
+
return None
|
| 398 |
+
|
| 399 |
+
names = list(names)
|
| 400 |
+
try:
|
| 401 |
+
names[0] = self._mapping[names[0]]
|
| 402 |
+
except KeyError:
|
| 403 |
+
pass
|
| 404 |
+
|
| 405 |
+
return '.'.join(names)
|
| 406 |
+
|
| 407 |
+
def is_stub(self):
|
| 408 |
+
"""
|
| 409 |
+
Returns True if the current name is defined in a stub file.
|
| 410 |
+
"""
|
| 411 |
+
if not self._name.is_value_name:
|
| 412 |
+
return False
|
| 413 |
+
|
| 414 |
+
return self._name.get_root_context().is_stub()
|
| 415 |
+
|
| 416 |
+
def is_side_effect(self):
|
| 417 |
+
"""
|
| 418 |
+
Checks if a name is defined as ``self.foo = 3``. In case of self, this
|
| 419 |
+
function would return False, for foo it would return True.
|
| 420 |
+
"""
|
| 421 |
+
tree_name = self._name.tree_name
|
| 422 |
+
if tree_name is None:
|
| 423 |
+
return False
|
| 424 |
+
return tree_name.is_definition() and tree_name.parent.type == 'trailer'
|
| 425 |
+
|
| 426 |
+
@debug.increase_indent_cm('goto on name')
|
| 427 |
+
def goto(self, *, follow_imports=False, follow_builtin_imports=False,
|
| 428 |
+
only_stubs=False, prefer_stubs=False):
|
| 429 |
+
|
| 430 |
+
"""
|
| 431 |
+
Like :meth:`.Script.goto` (also supports the same params), but does it
|
| 432 |
+
for the current name. This is typically useful if you are using
|
| 433 |
+
something like :meth:`.Script.get_names()`.
|
| 434 |
+
|
| 435 |
+
:param follow_imports: The goto call will follow imports.
|
| 436 |
+
:param follow_builtin_imports: If follow_imports is True will try to
|
| 437 |
+
look up names in builtins (i.e. compiled or extension modules).
|
| 438 |
+
:param only_stubs: Only return stubs for this goto call.
|
| 439 |
+
:param prefer_stubs: Prefer stubs to Python objects for this goto call.
|
| 440 |
+
:rtype: list of :class:`Name`
|
| 441 |
+
"""
|
| 442 |
+
if not self._name.is_value_name:
|
| 443 |
+
return []
|
| 444 |
+
|
| 445 |
+
names = self._name.goto()
|
| 446 |
+
if follow_imports:
|
| 447 |
+
names = filter_follow_imports(names, follow_builtin_imports)
|
| 448 |
+
names = convert_names(
|
| 449 |
+
names,
|
| 450 |
+
only_stubs=only_stubs,
|
| 451 |
+
prefer_stubs=prefer_stubs,
|
| 452 |
+
)
|
| 453 |
+
return [self if n == self._name else Name(self._inference_state, n)
|
| 454 |
+
for n in names]
|
| 455 |
+
|
| 456 |
+
@debug.increase_indent_cm('infer on name')
|
| 457 |
+
def infer(self, *, only_stubs=False, prefer_stubs=False):
|
| 458 |
+
"""
|
| 459 |
+
Like :meth:`.Script.infer`, it can be useful to understand which type
|
| 460 |
+
the current name has.
|
| 461 |
+
|
| 462 |
+
Return the actual definitions. I strongly recommend not using it for
|
| 463 |
+
your completions, because it might slow down |jedi|. If you want to
|
| 464 |
+
read only a few objects (<=20), it might be useful, especially to get
|
| 465 |
+
the original docstrings. The basic problem of this function is that it
|
| 466 |
+
follows all results. This means with 1000 completions (e.g. numpy),
|
| 467 |
+
it's just very, very slow.
|
| 468 |
+
|
| 469 |
+
:param only_stubs: Only return stubs for this goto call.
|
| 470 |
+
:param prefer_stubs: Prefer stubs to Python objects for this type
|
| 471 |
+
inference call.
|
| 472 |
+
:rtype: list of :class:`Name`
|
| 473 |
+
"""
|
| 474 |
+
assert not (only_stubs and prefer_stubs)
|
| 475 |
+
|
| 476 |
+
if not self._name.is_value_name:
|
| 477 |
+
return []
|
| 478 |
+
|
| 479 |
+
# First we need to make sure that we have stub names (if possible) that
|
| 480 |
+
# we can follow. If we don't do that, we can end up with the inferred
|
| 481 |
+
# results of Python objects instead of stubs.
|
| 482 |
+
names = convert_names([self._name], prefer_stubs=True)
|
| 483 |
+
values = convert_values(
|
| 484 |
+
ValueSet.from_sets(n.infer() for n in names),
|
| 485 |
+
only_stubs=only_stubs,
|
| 486 |
+
prefer_stubs=prefer_stubs,
|
| 487 |
+
)
|
| 488 |
+
resulting_names = [c.name for c in values]
|
| 489 |
+
return [self if n == self._name else Name(self._inference_state, n)
|
| 490 |
+
for n in resulting_names]
|
| 491 |
+
|
| 492 |
+
def parent(self):
|
| 493 |
+
"""
|
| 494 |
+
Returns the parent scope of this identifier.
|
| 495 |
+
|
| 496 |
+
:rtype: Name
|
| 497 |
+
"""
|
| 498 |
+
if not self._name.is_value_name:
|
| 499 |
+
return None
|
| 500 |
+
|
| 501 |
+
if self.type in ('function', 'class', 'param') and self._name.tree_name is not None:
|
| 502 |
+
# Since the parent_context doesn't really match what the user
|
| 503 |
+
# thinks of that the parent is here, we do these cases separately.
|
| 504 |
+
# The reason for this is the following:
|
| 505 |
+
# - class: Nested classes parent_context is always the
|
| 506 |
+
# parent_context of the most outer one.
|
| 507 |
+
# - function: Functions in classes have the module as
|
| 508 |
+
# parent_context.
|
| 509 |
+
# - param: The parent_context of a param is not its function but
|
| 510 |
+
# e.g. the outer class or module.
|
| 511 |
+
cls_or_func_node = self._name.tree_name.get_definition()
|
| 512 |
+
parent = search_ancestor(cls_or_func_node, 'funcdef', 'classdef', 'file_input')
|
| 513 |
+
context = self._get_module_context().create_value(parent).as_context()
|
| 514 |
+
else:
|
| 515 |
+
context = self._name.parent_context
|
| 516 |
+
|
| 517 |
+
if context is None:
|
| 518 |
+
return None
|
| 519 |
+
while context.name is None:
|
| 520 |
+
# Happens for comprehension contexts
|
| 521 |
+
context = context.parent_context
|
| 522 |
+
|
| 523 |
+
return Name(self._inference_state, context.name)
|
| 524 |
+
|
| 525 |
+
def __repr__(self):
|
| 526 |
+
return "<%s %sname=%r, description=%r>" % (
|
| 527 |
+
self.__class__.__name__,
|
| 528 |
+
'full_' if self.full_name else '',
|
| 529 |
+
self.full_name or self.name,
|
| 530 |
+
self.description,
|
| 531 |
+
)
|
| 532 |
+
|
| 533 |
+
def get_line_code(self, before=0, after=0):
|
| 534 |
+
"""
|
| 535 |
+
Returns the line of code where this object was defined.
|
| 536 |
+
|
| 537 |
+
:param before: Add n lines before the current line to the output.
|
| 538 |
+
:param after: Add n lines after the current line to the output.
|
| 539 |
+
|
| 540 |
+
:return str: Returns the line(s) of code or an empty string if it's a
|
| 541 |
+
builtin.
|
| 542 |
+
"""
|
| 543 |
+
if not self._name.is_value_name:
|
| 544 |
+
return ''
|
| 545 |
+
|
| 546 |
+
lines = self._name.get_root_context().code_lines
|
| 547 |
+
if lines is None:
|
| 548 |
+
# Probably a builtin module, just ignore in that case.
|
| 549 |
+
return ''
|
| 550 |
+
|
| 551 |
+
index = self._name.start_pos[0] - 1
|
| 552 |
+
start_index = max(index - before, 0)
|
| 553 |
+
return ''.join(lines[start_index:index + after + 1])
|
| 554 |
+
|
| 555 |
+
def _get_signatures(self, for_docstring=False):
|
| 556 |
+
if self._name.api_type == 'property':
|
| 557 |
+
return []
|
| 558 |
+
if for_docstring and self._name.api_type == 'statement' and not self.is_stub():
|
| 559 |
+
# For docstrings we don't resolve signatures if they are simple
|
| 560 |
+
# statements and not stubs. This is a speed optimization.
|
| 561 |
+
return []
|
| 562 |
+
|
| 563 |
+
if isinstance(self._name, MixedName):
|
| 564 |
+
# While this would eventually happen anyway, it's basically just a
|
| 565 |
+
# shortcut to not infer anything tree related, because it's really
|
| 566 |
+
# not necessary.
|
| 567 |
+
return self._name.infer_compiled_value().get_signatures()
|
| 568 |
+
|
| 569 |
+
names = convert_names([self._name], prefer_stubs=True)
|
| 570 |
+
return [sig for name in names for sig in name.infer().get_signatures()]
|
| 571 |
+
|
| 572 |
+
def get_signatures(self):
|
| 573 |
+
"""
|
| 574 |
+
Returns all potential signatures for a function or a class. Multiple
|
| 575 |
+
signatures are typical if you use Python stubs with ``@overload``.
|
| 576 |
+
|
| 577 |
+
:rtype: list of :class:`BaseSignature`
|
| 578 |
+
"""
|
| 579 |
+
return [
|
| 580 |
+
BaseSignature(self._inference_state, s)
|
| 581 |
+
for s in self._get_signatures()
|
| 582 |
+
]
|
| 583 |
+
|
| 584 |
+
def execute(self):
|
| 585 |
+
"""
|
| 586 |
+
Uses type inference to "execute" this identifier and returns the
|
| 587 |
+
executed objects.
|
| 588 |
+
|
| 589 |
+
:rtype: list of :class:`Name`
|
| 590 |
+
"""
|
| 591 |
+
return _values_to_definitions(self._name.infer().execute_with_values())
|
| 592 |
+
|
| 593 |
+
def get_type_hint(self):
|
| 594 |
+
"""
|
| 595 |
+
Returns type hints like ``Iterable[int]`` or ``Union[int, str]``.
|
| 596 |
+
|
| 597 |
+
This method might be quite slow, especially for functions. The problem
|
| 598 |
+
is finding executions for those functions to return something like
|
| 599 |
+
``Callable[[int, str], str]``.
|
| 600 |
+
|
| 601 |
+
:rtype: str
|
| 602 |
+
"""
|
| 603 |
+
return self._name.infer().get_type_hint()
|
| 604 |
+
|
| 605 |
+
|
| 606 |
+
class Completion(BaseName):
|
| 607 |
+
"""
|
| 608 |
+
``Completion`` objects are returned from :meth:`.Script.complete`. They
|
| 609 |
+
provide additional information about a completion.
|
| 610 |
+
"""
|
| 611 |
+
def __init__(self, inference_state, name, stack, like_name_length,
|
| 612 |
+
is_fuzzy, cached_name=None):
|
| 613 |
+
super().__init__(inference_state, name)
|
| 614 |
+
|
| 615 |
+
self._like_name_length = like_name_length
|
| 616 |
+
self._stack = stack
|
| 617 |
+
self._is_fuzzy = is_fuzzy
|
| 618 |
+
self._cached_name = cached_name
|
| 619 |
+
|
| 620 |
+
# Completion objects with the same Completion name (which means
|
| 621 |
+
# duplicate items in the completion)
|
| 622 |
+
self._same_name_completions = []
|
| 623 |
+
|
| 624 |
+
def _complete(self, like_name):
|
| 625 |
+
append = ''
|
| 626 |
+
if settings.add_bracket_after_function \
|
| 627 |
+
and self.type == 'function':
|
| 628 |
+
append = '('
|
| 629 |
+
|
| 630 |
+
name = self._name.get_public_name()
|
| 631 |
+
if like_name:
|
| 632 |
+
name = name[self._like_name_length:]
|
| 633 |
+
return name + append
|
| 634 |
+
|
| 635 |
+
@property
|
| 636 |
+
def complete(self):
|
| 637 |
+
"""
|
| 638 |
+
Only works with non-fuzzy completions. Returns None if fuzzy
|
| 639 |
+
completions are used.
|
| 640 |
+
|
| 641 |
+
Return the rest of the word, e.g. completing ``isinstance``::
|
| 642 |
+
|
| 643 |
+
isinstan# <-- Cursor is here
|
| 644 |
+
|
| 645 |
+
would return the string 'ce'. It also adds additional stuff, depending
|
| 646 |
+
on your ``settings.py``.
|
| 647 |
+
|
| 648 |
+
Assuming the following function definition::
|
| 649 |
+
|
| 650 |
+
def foo(param=0):
|
| 651 |
+
pass
|
| 652 |
+
|
| 653 |
+
completing ``foo(par`` would give a ``Completion`` which ``complete``
|
| 654 |
+
would be ``am=``.
|
| 655 |
+
"""
|
| 656 |
+
if self._is_fuzzy:
|
| 657 |
+
return None
|
| 658 |
+
return self._complete(True)
|
| 659 |
+
|
| 660 |
+
@property
|
| 661 |
+
def name_with_symbols(self):
|
| 662 |
+
"""
|
| 663 |
+
Similar to :attr:`.name`, but like :attr:`.name` returns also the
|
| 664 |
+
symbols, for example assuming the following function definition::
|
| 665 |
+
|
| 666 |
+
def foo(param=0):
|
| 667 |
+
pass
|
| 668 |
+
|
| 669 |
+
completing ``foo(`` would give a ``Completion`` which
|
| 670 |
+
``name_with_symbols`` would be "param=".
|
| 671 |
+
|
| 672 |
+
"""
|
| 673 |
+
return self._complete(False)
|
| 674 |
+
|
| 675 |
+
def docstring(self, raw=False, fast=True):
|
| 676 |
+
"""
|
| 677 |
+
Documented under :meth:`BaseName.docstring`.
|
| 678 |
+
"""
|
| 679 |
+
if self._like_name_length >= 3:
|
| 680 |
+
# In this case we can just resolve the like name, because we
|
| 681 |
+
# wouldn't load like > 100 Python modules anymore.
|
| 682 |
+
fast = False
|
| 683 |
+
|
| 684 |
+
return super().docstring(raw=raw, fast=fast)
|
| 685 |
+
|
| 686 |
+
def _get_docstring(self):
|
| 687 |
+
if self._cached_name is not None:
|
| 688 |
+
return completion_cache.get_docstring(
|
| 689 |
+
self._cached_name,
|
| 690 |
+
self._name.get_public_name(),
|
| 691 |
+
lambda: self._get_cache()
|
| 692 |
+
)
|
| 693 |
+
return super()._get_docstring()
|
| 694 |
+
|
| 695 |
+
def _get_docstring_signature(self):
|
| 696 |
+
if self._cached_name is not None:
|
| 697 |
+
return completion_cache.get_docstring_signature(
|
| 698 |
+
self._cached_name,
|
| 699 |
+
self._name.get_public_name(),
|
| 700 |
+
lambda: self._get_cache()
|
| 701 |
+
)
|
| 702 |
+
return super()._get_docstring_signature()
|
| 703 |
+
|
| 704 |
+
def _get_cache(self):
|
| 705 |
+
return (
|
| 706 |
+
super().type,
|
| 707 |
+
super()._get_docstring_signature(),
|
| 708 |
+
super()._get_docstring(),
|
| 709 |
+
)
|
| 710 |
+
|
| 711 |
+
@property
|
| 712 |
+
def type(self):
|
| 713 |
+
"""
|
| 714 |
+
Documented under :meth:`BaseName.type`.
|
| 715 |
+
"""
|
| 716 |
+
# Purely a speed optimization.
|
| 717 |
+
if self._cached_name is not None:
|
| 718 |
+
return completion_cache.get_type(
|
| 719 |
+
self._cached_name,
|
| 720 |
+
self._name.get_public_name(),
|
| 721 |
+
lambda: self._get_cache()
|
| 722 |
+
)
|
| 723 |
+
|
| 724 |
+
return super().type
|
| 725 |
+
|
| 726 |
+
def get_completion_prefix_length(self):
|
| 727 |
+
"""
|
| 728 |
+
Returns the length of the prefix being completed.
|
| 729 |
+
For example, completing ``isinstance``::
|
| 730 |
+
|
| 731 |
+
isinstan# <-- Cursor is here
|
| 732 |
+
|
| 733 |
+
would return 8, because len('isinstan') == 8.
|
| 734 |
+
|
| 735 |
+
Assuming the following function definition::
|
| 736 |
+
|
| 737 |
+
def foo(param=0):
|
| 738 |
+
pass
|
| 739 |
+
|
| 740 |
+
completing ``foo(par`` would return 3.
|
| 741 |
+
"""
|
| 742 |
+
return self._like_name_length
|
| 743 |
+
|
| 744 |
+
def __repr__(self):
|
| 745 |
+
return '<%s: %s>' % (type(self).__name__, self._name.get_public_name())
|
| 746 |
+
|
| 747 |
+
|
| 748 |
+
class Name(BaseName):
|
| 749 |
+
"""
|
| 750 |
+
*Name* objects are returned from many different APIs including
|
| 751 |
+
:meth:`.Script.goto` or :meth:`.Script.infer`.
|
| 752 |
+
"""
|
| 753 |
+
def __init__(self, inference_state, definition):
|
| 754 |
+
super().__init__(inference_state, definition)
|
| 755 |
+
|
| 756 |
+
@memoize_method
|
| 757 |
+
def defined_names(self):
|
| 758 |
+
"""
|
| 759 |
+
List sub-definitions (e.g., methods in class).
|
| 760 |
+
|
| 761 |
+
:rtype: list of :class:`Name`
|
| 762 |
+
"""
|
| 763 |
+
defs = self._name.infer()
|
| 764 |
+
return sorted(
|
| 765 |
+
unite(defined_names(self._inference_state, d) for d in defs),
|
| 766 |
+
key=lambda s: s._name.start_pos or (0, 0)
|
| 767 |
+
)
|
| 768 |
+
|
| 769 |
+
def is_definition(self):
|
| 770 |
+
"""
|
| 771 |
+
Returns True, if defined as a name in a statement, function or class.
|
| 772 |
+
Returns False, if it's a reference to such a definition.
|
| 773 |
+
"""
|
| 774 |
+
if self._name.tree_name is None:
|
| 775 |
+
return True
|
| 776 |
+
else:
|
| 777 |
+
return self._name.tree_name.is_definition()
|
| 778 |
+
|
| 779 |
+
def __eq__(self, other):
|
| 780 |
+
return self._name.start_pos == other._name.start_pos \
|
| 781 |
+
and self.module_path == other.module_path \
|
| 782 |
+
and self.name == other.name \
|
| 783 |
+
and self._inference_state == other._inference_state
|
| 784 |
+
|
| 785 |
+
def __ne__(self, other):
|
| 786 |
+
return not self.__eq__(other)
|
| 787 |
+
|
| 788 |
+
def __hash__(self):
|
| 789 |
+
return hash((self._name.start_pos, self.module_path, self.name, self._inference_state))
|
| 790 |
+
|
| 791 |
+
|
| 792 |
+
class BaseSignature(Name):
|
| 793 |
+
"""
|
| 794 |
+
These signatures are returned by :meth:`BaseName.get_signatures`
|
| 795 |
+
calls.
|
| 796 |
+
"""
|
| 797 |
+
def __init__(self, inference_state, signature):
|
| 798 |
+
super().__init__(inference_state, signature.name)
|
| 799 |
+
self._signature = signature
|
| 800 |
+
|
| 801 |
+
@property
|
| 802 |
+
def params(self):
|
| 803 |
+
"""
|
| 804 |
+
Returns definitions for all parameters that a signature defines.
|
| 805 |
+
This includes stuff like ``*args`` and ``**kwargs``.
|
| 806 |
+
|
| 807 |
+
:rtype: list of :class:`.ParamName`
|
| 808 |
+
"""
|
| 809 |
+
return [ParamName(self._inference_state, n)
|
| 810 |
+
for n in self._signature.get_param_names(resolve_stars=True)]
|
| 811 |
+
|
| 812 |
+
def to_string(self):
|
| 813 |
+
"""
|
| 814 |
+
Returns a text representation of the signature. This could for example
|
| 815 |
+
look like ``foo(bar, baz: int, **kwargs)``.
|
| 816 |
+
|
| 817 |
+
:rtype: str
|
| 818 |
+
"""
|
| 819 |
+
return self._signature.to_string()
|
| 820 |
+
|
| 821 |
+
|
| 822 |
+
class Signature(BaseSignature):
|
| 823 |
+
"""
|
| 824 |
+
A full signature object is the return value of
|
| 825 |
+
:meth:`.Script.get_signatures`.
|
| 826 |
+
"""
|
| 827 |
+
def __init__(self, inference_state, signature, call_details):
|
| 828 |
+
super().__init__(inference_state, signature)
|
| 829 |
+
self._call_details = call_details
|
| 830 |
+
self._signature = signature
|
| 831 |
+
|
| 832 |
+
@property
|
| 833 |
+
def index(self):
|
| 834 |
+
"""
|
| 835 |
+
Returns the param index of the current cursor position.
|
| 836 |
+
Returns None if the index cannot be found in the curent call.
|
| 837 |
+
|
| 838 |
+
:rtype: int
|
| 839 |
+
"""
|
| 840 |
+
return self._call_details.calculate_index(
|
| 841 |
+
self._signature.get_param_names(resolve_stars=True)
|
| 842 |
+
)
|
| 843 |
+
|
| 844 |
+
@property
|
| 845 |
+
def bracket_start(self):
|
| 846 |
+
"""
|
| 847 |
+
Returns a line/column tuple of the bracket that is responsible for the
|
| 848 |
+
last function call. The first line is 1 and the first column 0.
|
| 849 |
+
|
| 850 |
+
:rtype: int, int
|
| 851 |
+
"""
|
| 852 |
+
return self._call_details.bracket_leaf.start_pos
|
| 853 |
+
|
| 854 |
+
def __repr__(self):
|
| 855 |
+
return '<%s: index=%r %s>' % (
|
| 856 |
+
type(self).__name__,
|
| 857 |
+
self.index,
|
| 858 |
+
self._signature.to_string(),
|
| 859 |
+
)
|
| 860 |
+
|
| 861 |
+
|
| 862 |
+
class ParamName(Name):
|
| 863 |
+
def infer_default(self):
|
| 864 |
+
"""
|
| 865 |
+
Returns default values like the ``1`` of ``def foo(x=1):``.
|
| 866 |
+
|
| 867 |
+
:rtype: list of :class:`.Name`
|
| 868 |
+
"""
|
| 869 |
+
return _values_to_definitions(self._name.infer_default())
|
| 870 |
+
|
| 871 |
+
def infer_annotation(self, **kwargs):
|
| 872 |
+
"""
|
| 873 |
+
:param execute_annotation: Default True; If False, values are not
|
| 874 |
+
executed and classes are returned instead of instances.
|
| 875 |
+
:rtype: list of :class:`.Name`
|
| 876 |
+
"""
|
| 877 |
+
return _values_to_definitions(self._name.infer_annotation(ignore_stars=True, **kwargs))
|
| 878 |
+
|
| 879 |
+
def to_string(self):
|
| 880 |
+
"""
|
| 881 |
+
Returns a simple representation of a param, like
|
| 882 |
+
``f: Callable[..., Any]``.
|
| 883 |
+
|
| 884 |
+
:rtype: str
|
| 885 |
+
"""
|
| 886 |
+
return self._name.to_string()
|
| 887 |
+
|
| 888 |
+
@property
|
| 889 |
+
def kind(self):
|
| 890 |
+
"""
|
| 891 |
+
Returns an enum instance of :mod:`inspect`'s ``Parameter`` enum.
|
| 892 |
+
|
| 893 |
+
:rtype: :py:attr:`inspect.Parameter.kind`
|
| 894 |
+
"""
|
| 895 |
+
return self._name.get_kind()
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/api/completion.py
ADDED
|
@@ -0,0 +1,696 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
from textwrap import dedent
|
| 3 |
+
from inspect import Parameter
|
| 4 |
+
|
| 5 |
+
from parso.python.token import PythonTokenTypes
|
| 6 |
+
from parso.python import tree
|
| 7 |
+
from parso.tree import search_ancestor, Leaf
|
| 8 |
+
from parso import split_lines
|
| 9 |
+
|
| 10 |
+
from jedi import debug
|
| 11 |
+
from jedi import settings
|
| 12 |
+
from jedi.api import classes
|
| 13 |
+
from jedi.api import helpers
|
| 14 |
+
from jedi.api import keywords
|
| 15 |
+
from jedi.api.strings import complete_dict
|
| 16 |
+
from jedi.api.file_name import complete_file_name
|
| 17 |
+
from jedi.inference import imports
|
| 18 |
+
from jedi.inference.base_value import ValueSet
|
| 19 |
+
from jedi.inference.helpers import infer_call_of_leaf, parse_dotted_names
|
| 20 |
+
from jedi.inference.context import get_global_filters
|
| 21 |
+
from jedi.inference.value import TreeInstance
|
| 22 |
+
from jedi.inference.docstring_utils import DocstringModule
|
| 23 |
+
from jedi.inference.names import ParamNameWrapper, SubModuleName
|
| 24 |
+
from jedi.inference.gradual.conversion import convert_values, convert_names
|
| 25 |
+
from jedi.parser_utils import cut_value_at_position
|
| 26 |
+
from jedi.plugins import plugin_manager
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class ParamNameWithEquals(ParamNameWrapper):
|
| 30 |
+
def get_public_name(self):
|
| 31 |
+
return self.string_name + '='
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def _get_signature_param_names(signatures, positional_count, used_kwargs):
|
| 35 |
+
# Add named params
|
| 36 |
+
for call_sig in signatures:
|
| 37 |
+
for i, p in enumerate(call_sig.params):
|
| 38 |
+
kind = p.kind
|
| 39 |
+
if i < positional_count and kind == Parameter.POSITIONAL_OR_KEYWORD:
|
| 40 |
+
continue
|
| 41 |
+
if kind in (Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY) \
|
| 42 |
+
and p.name not in used_kwargs:
|
| 43 |
+
yield ParamNameWithEquals(p._name)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def _must_be_kwarg(signatures, positional_count, used_kwargs):
|
| 47 |
+
if used_kwargs:
|
| 48 |
+
return True
|
| 49 |
+
|
| 50 |
+
must_be_kwarg = True
|
| 51 |
+
for signature in signatures:
|
| 52 |
+
for i, p in enumerate(signature.params):
|
| 53 |
+
kind = p.kind
|
| 54 |
+
if kind is Parameter.VAR_POSITIONAL:
|
| 55 |
+
# In case there were not already kwargs, the next param can
|
| 56 |
+
# always be a normal argument.
|
| 57 |
+
return False
|
| 58 |
+
|
| 59 |
+
if i >= positional_count and kind in (Parameter.POSITIONAL_OR_KEYWORD,
|
| 60 |
+
Parameter.POSITIONAL_ONLY):
|
| 61 |
+
must_be_kwarg = False
|
| 62 |
+
break
|
| 63 |
+
if not must_be_kwarg:
|
| 64 |
+
break
|
| 65 |
+
return must_be_kwarg
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def filter_names(inference_state, completion_names, stack, like_name, fuzzy,
|
| 69 |
+
imported_names, cached_name):
|
| 70 |
+
comp_dct = set()
|
| 71 |
+
if settings.case_insensitive_completion:
|
| 72 |
+
like_name = like_name.lower()
|
| 73 |
+
for name in completion_names:
|
| 74 |
+
string = name.string_name
|
| 75 |
+
if string in imported_names and string != like_name:
|
| 76 |
+
continue
|
| 77 |
+
if settings.case_insensitive_completion:
|
| 78 |
+
string = string.lower()
|
| 79 |
+
if helpers.match(string, like_name, fuzzy=fuzzy):
|
| 80 |
+
new = classes.Completion(
|
| 81 |
+
inference_state,
|
| 82 |
+
name,
|
| 83 |
+
stack,
|
| 84 |
+
len(like_name),
|
| 85 |
+
is_fuzzy=fuzzy,
|
| 86 |
+
cached_name=cached_name,
|
| 87 |
+
)
|
| 88 |
+
k = (new.name, new.complete) # key
|
| 89 |
+
if k not in comp_dct:
|
| 90 |
+
comp_dct.add(k)
|
| 91 |
+
tree_name = name.tree_name
|
| 92 |
+
if tree_name is not None:
|
| 93 |
+
definition = tree_name.get_definition()
|
| 94 |
+
if definition is not None and definition.type == 'del_stmt':
|
| 95 |
+
continue
|
| 96 |
+
yield new
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def _remove_duplicates(completions, other_completions):
|
| 100 |
+
names = {d.name for d in other_completions}
|
| 101 |
+
return [c for c in completions if c.name not in names]
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def get_user_context(module_context, position):
|
| 105 |
+
"""
|
| 106 |
+
Returns the scope in which the user resides. This includes flows.
|
| 107 |
+
"""
|
| 108 |
+
leaf = module_context.tree_node.get_leaf_for_position(position, include_prefixes=True)
|
| 109 |
+
return module_context.create_context(leaf)
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
def get_flow_scope_node(module_node, position):
|
| 113 |
+
node = module_node.get_leaf_for_position(position, include_prefixes=True)
|
| 114 |
+
while not isinstance(node, (tree.Scope, tree.Flow)):
|
| 115 |
+
node = node.parent
|
| 116 |
+
|
| 117 |
+
return node
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
@plugin_manager.decorate()
|
| 121 |
+
def complete_param_names(context, function_name, decorator_nodes):
|
| 122 |
+
# Basically there's no way to do param completion. The plugins are
|
| 123 |
+
# responsible for this.
|
| 124 |
+
return []
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
class Completion:
|
| 128 |
+
def __init__(self, inference_state, module_context, code_lines, position,
|
| 129 |
+
signatures_callback, fuzzy=False):
|
| 130 |
+
self._inference_state = inference_state
|
| 131 |
+
self._module_context = module_context
|
| 132 |
+
self._module_node = module_context.tree_node
|
| 133 |
+
self._code_lines = code_lines
|
| 134 |
+
|
| 135 |
+
# The first step of completions is to get the name
|
| 136 |
+
self._like_name = helpers.get_on_completion_name(self._module_node, code_lines, position)
|
| 137 |
+
# The actual cursor position is not what we need to calculate
|
| 138 |
+
# everything. We want the start of the name we're on.
|
| 139 |
+
self._original_position = position
|
| 140 |
+
self._signatures_callback = signatures_callback
|
| 141 |
+
|
| 142 |
+
self._fuzzy = fuzzy
|
| 143 |
+
|
| 144 |
+
# Return list of completions in this order:
|
| 145 |
+
# - Beginning with what user is typing
|
| 146 |
+
# - Public (alphabet)
|
| 147 |
+
# - Private ("_xxx")
|
| 148 |
+
# - Dunder ("__xxx")
|
| 149 |
+
def complete(self):
|
| 150 |
+
leaf = self._module_node.get_leaf_for_position(
|
| 151 |
+
self._original_position,
|
| 152 |
+
include_prefixes=True
|
| 153 |
+
)
|
| 154 |
+
string, start_leaf, quote = _extract_string_while_in_string(leaf, self._original_position)
|
| 155 |
+
|
| 156 |
+
prefixed_completions = complete_dict(
|
| 157 |
+
self._module_context,
|
| 158 |
+
self._code_lines,
|
| 159 |
+
start_leaf or leaf,
|
| 160 |
+
self._original_position,
|
| 161 |
+
None if string is None else quote + string,
|
| 162 |
+
fuzzy=self._fuzzy,
|
| 163 |
+
)
|
| 164 |
+
|
| 165 |
+
if string is not None and not prefixed_completions:
|
| 166 |
+
prefixed_completions = list(complete_file_name(
|
| 167 |
+
self._inference_state, self._module_context, start_leaf, quote, string,
|
| 168 |
+
self._like_name, self._signatures_callback,
|
| 169 |
+
self._code_lines, self._original_position,
|
| 170 |
+
self._fuzzy
|
| 171 |
+
))
|
| 172 |
+
if string is not None:
|
| 173 |
+
if not prefixed_completions and '\n' in string:
|
| 174 |
+
# Complete only multi line strings
|
| 175 |
+
prefixed_completions = self._complete_in_string(start_leaf, string)
|
| 176 |
+
return prefixed_completions
|
| 177 |
+
|
| 178 |
+
cached_name, completion_names = self._complete_python(leaf)
|
| 179 |
+
|
| 180 |
+
imported_names = []
|
| 181 |
+
if leaf.parent is not None and leaf.parent.type in ['import_as_names', 'dotted_as_names']:
|
| 182 |
+
imported_names.extend(extract_imported_names(leaf.parent))
|
| 183 |
+
|
| 184 |
+
completions = list(filter_names(self._inference_state, completion_names,
|
| 185 |
+
self.stack, self._like_name,
|
| 186 |
+
self._fuzzy, imported_names, cached_name=cached_name))
|
| 187 |
+
|
| 188 |
+
return (
|
| 189 |
+
# Removing duplicates mostly to remove False/True/None duplicates.
|
| 190 |
+
_remove_duplicates(prefixed_completions, completions)
|
| 191 |
+
+ sorted(completions, key=lambda x: (not x.name.startswith(self._like_name),
|
| 192 |
+
x.name.startswith('__'),
|
| 193 |
+
x.name.startswith('_'),
|
| 194 |
+
x.name.lower()))
|
| 195 |
+
)
|
| 196 |
+
|
| 197 |
+
def _complete_python(self, leaf):
|
| 198 |
+
"""
|
| 199 |
+
Analyzes the current context of a completion and decides what to
|
| 200 |
+
return.
|
| 201 |
+
|
| 202 |
+
Technically this works by generating a parser stack and analysing the
|
| 203 |
+
current stack for possible grammar nodes.
|
| 204 |
+
|
| 205 |
+
Possible enhancements:
|
| 206 |
+
- global/nonlocal search global
|
| 207 |
+
- yield from / raise from <- could be only exceptions/generators
|
| 208 |
+
- In args: */**: no completion
|
| 209 |
+
- In params (also lambda): no completion before =
|
| 210 |
+
"""
|
| 211 |
+
grammar = self._inference_state.grammar
|
| 212 |
+
self.stack = stack = None
|
| 213 |
+
self._position = (
|
| 214 |
+
self._original_position[0],
|
| 215 |
+
self._original_position[1] - len(self._like_name)
|
| 216 |
+
)
|
| 217 |
+
cached_name = None
|
| 218 |
+
|
| 219 |
+
try:
|
| 220 |
+
self.stack = stack = helpers.get_stack_at_position(
|
| 221 |
+
grammar, self._code_lines, leaf, self._position
|
| 222 |
+
)
|
| 223 |
+
except helpers.OnErrorLeaf as e:
|
| 224 |
+
value = e.error_leaf.value
|
| 225 |
+
if value == '.':
|
| 226 |
+
# After ErrorLeaf's that are dots, we will not do any
|
| 227 |
+
# completions since this probably just confuses the user.
|
| 228 |
+
return cached_name, []
|
| 229 |
+
|
| 230 |
+
# If we don't have a value, just use global completion.
|
| 231 |
+
return cached_name, self._complete_global_scope()
|
| 232 |
+
|
| 233 |
+
allowed_transitions = \
|
| 234 |
+
list(stack._allowed_transition_names_and_token_types())
|
| 235 |
+
|
| 236 |
+
if 'if' in allowed_transitions:
|
| 237 |
+
leaf = self._module_node.get_leaf_for_position(self._position, include_prefixes=True)
|
| 238 |
+
previous_leaf = leaf.get_previous_leaf()
|
| 239 |
+
|
| 240 |
+
indent = self._position[1]
|
| 241 |
+
if not (leaf.start_pos <= self._position <= leaf.end_pos):
|
| 242 |
+
indent = leaf.start_pos[1]
|
| 243 |
+
|
| 244 |
+
if previous_leaf is not None:
|
| 245 |
+
stmt = previous_leaf
|
| 246 |
+
while True:
|
| 247 |
+
stmt = search_ancestor(
|
| 248 |
+
stmt, 'if_stmt', 'for_stmt', 'while_stmt', 'try_stmt',
|
| 249 |
+
'error_node',
|
| 250 |
+
)
|
| 251 |
+
if stmt is None:
|
| 252 |
+
break
|
| 253 |
+
|
| 254 |
+
type_ = stmt.type
|
| 255 |
+
if type_ == 'error_node':
|
| 256 |
+
first = stmt.children[0]
|
| 257 |
+
if isinstance(first, Leaf):
|
| 258 |
+
type_ = first.value + '_stmt'
|
| 259 |
+
# Compare indents
|
| 260 |
+
if stmt.start_pos[1] == indent:
|
| 261 |
+
if type_ == 'if_stmt':
|
| 262 |
+
allowed_transitions += ['elif', 'else']
|
| 263 |
+
elif type_ == 'try_stmt':
|
| 264 |
+
allowed_transitions += ['except', 'finally', 'else']
|
| 265 |
+
elif type_ == 'for_stmt':
|
| 266 |
+
allowed_transitions.append('else')
|
| 267 |
+
|
| 268 |
+
completion_names = []
|
| 269 |
+
|
| 270 |
+
kwargs_only = False
|
| 271 |
+
if any(t in allowed_transitions for t in (PythonTokenTypes.NAME,
|
| 272 |
+
PythonTokenTypes.INDENT)):
|
| 273 |
+
# This means that we actually have to do type inference.
|
| 274 |
+
|
| 275 |
+
nonterminals = [stack_node.nonterminal for stack_node in stack]
|
| 276 |
+
|
| 277 |
+
nodes = _gather_nodes(stack)
|
| 278 |
+
if nodes and nodes[-1] in ('as', 'def', 'class'):
|
| 279 |
+
# No completions for ``with x as foo`` and ``import x as foo``.
|
| 280 |
+
# Also true for defining names as a class or function.
|
| 281 |
+
return cached_name, list(self._complete_inherited(is_function=True))
|
| 282 |
+
elif "import_stmt" in nonterminals:
|
| 283 |
+
level, names = parse_dotted_names(nodes, "import_from" in nonterminals)
|
| 284 |
+
|
| 285 |
+
only_modules = not ("import_from" in nonterminals and 'import' in nodes)
|
| 286 |
+
completion_names += self._get_importer_names(
|
| 287 |
+
names,
|
| 288 |
+
level,
|
| 289 |
+
only_modules=only_modules,
|
| 290 |
+
)
|
| 291 |
+
elif nonterminals[-1] in ('trailer', 'dotted_name') and nodes[-1] == '.':
|
| 292 |
+
dot = self._module_node.get_leaf_for_position(self._position)
|
| 293 |
+
if dot.type == "endmarker":
|
| 294 |
+
# This is a bit of a weird edge case, maybe we can somehow
|
| 295 |
+
# generalize this.
|
| 296 |
+
dot = leaf.get_previous_leaf()
|
| 297 |
+
cached_name, n = self._complete_trailer(dot.get_previous_leaf())
|
| 298 |
+
completion_names += n
|
| 299 |
+
elif self._is_parameter_completion():
|
| 300 |
+
completion_names += self._complete_params(leaf)
|
| 301 |
+
else:
|
| 302 |
+
# Apparently this looks like it's good enough to filter most cases
|
| 303 |
+
# so that signature completions don't randomly appear.
|
| 304 |
+
# To understand why this works, three things are important:
|
| 305 |
+
# 1. trailer with a `,` in it is either a subscript or an arglist.
|
| 306 |
+
# 2. If there's no `,`, it's at the start and only signatures start
|
| 307 |
+
# with `(`. Other trailers could start with `.` or `[`.
|
| 308 |
+
# 3. Decorators are very primitive and have an optional `(` with
|
| 309 |
+
# optional arglist in them.
|
| 310 |
+
if nodes[-1] in ['(', ','] \
|
| 311 |
+
and nonterminals[-1] in ('trailer', 'arglist', 'decorator'):
|
| 312 |
+
signatures = self._signatures_callback(*self._position)
|
| 313 |
+
if signatures:
|
| 314 |
+
call_details = signatures[0]._call_details
|
| 315 |
+
used_kwargs = list(call_details.iter_used_keyword_arguments())
|
| 316 |
+
positional_count = call_details.count_positional_arguments()
|
| 317 |
+
|
| 318 |
+
completion_names += _get_signature_param_names(
|
| 319 |
+
signatures,
|
| 320 |
+
positional_count,
|
| 321 |
+
used_kwargs,
|
| 322 |
+
)
|
| 323 |
+
|
| 324 |
+
kwargs_only = _must_be_kwarg(signatures, positional_count, used_kwargs)
|
| 325 |
+
|
| 326 |
+
if not kwargs_only:
|
| 327 |
+
completion_names += self._complete_global_scope()
|
| 328 |
+
completion_names += self._complete_inherited(is_function=False)
|
| 329 |
+
|
| 330 |
+
if not kwargs_only:
|
| 331 |
+
current_line = self._code_lines[self._position[0] - 1][:self._position[1]]
|
| 332 |
+
completion_names += self._complete_keywords(
|
| 333 |
+
allowed_transitions,
|
| 334 |
+
only_values=not (not current_line or current_line[-1] in ' \t.;'
|
| 335 |
+
and current_line[-3:] != '...')
|
| 336 |
+
)
|
| 337 |
+
|
| 338 |
+
return cached_name, completion_names
|
| 339 |
+
|
| 340 |
+
def _is_parameter_completion(self):
|
| 341 |
+
tos = self.stack[-1]
|
| 342 |
+
if tos.nonterminal == 'lambdef' and len(tos.nodes) == 1:
|
| 343 |
+
# We are at the position `lambda `, where basically the next node
|
| 344 |
+
# is a param.
|
| 345 |
+
return True
|
| 346 |
+
if tos.nonterminal in 'parameters':
|
| 347 |
+
# Basically we are at the position `foo(`, there's nothing there
|
| 348 |
+
# yet, so we have no `typedargslist`.
|
| 349 |
+
return True
|
| 350 |
+
# var args is for lambdas and typed args for normal functions
|
| 351 |
+
return tos.nonterminal in ('typedargslist', 'varargslist') and tos.nodes[-1] == ','
|
| 352 |
+
|
| 353 |
+
def _complete_params(self, leaf):
|
| 354 |
+
stack_node = self.stack[-2]
|
| 355 |
+
if stack_node.nonterminal == 'parameters':
|
| 356 |
+
stack_node = self.stack[-3]
|
| 357 |
+
if stack_node.nonterminal == 'funcdef':
|
| 358 |
+
context = get_user_context(self._module_context, self._position)
|
| 359 |
+
node = search_ancestor(leaf, 'error_node', 'funcdef')
|
| 360 |
+
if node is not None:
|
| 361 |
+
if node.type == 'error_node':
|
| 362 |
+
n = node.children[0]
|
| 363 |
+
if n.type == 'decorators':
|
| 364 |
+
decorators = n.children
|
| 365 |
+
elif n.type == 'decorator':
|
| 366 |
+
decorators = [n]
|
| 367 |
+
else:
|
| 368 |
+
decorators = []
|
| 369 |
+
else:
|
| 370 |
+
decorators = node.get_decorators()
|
| 371 |
+
function_name = stack_node.nodes[1]
|
| 372 |
+
|
| 373 |
+
return complete_param_names(context, function_name.value, decorators)
|
| 374 |
+
return []
|
| 375 |
+
|
| 376 |
+
def _complete_keywords(self, allowed_transitions, only_values):
|
| 377 |
+
for k in allowed_transitions:
|
| 378 |
+
if isinstance(k, str) and k.isalpha():
|
| 379 |
+
if not only_values or k in ('True', 'False', 'None'):
|
| 380 |
+
yield keywords.KeywordName(self._inference_state, k)
|
| 381 |
+
|
| 382 |
+
def _complete_global_scope(self):
|
| 383 |
+
context = get_user_context(self._module_context, self._position)
|
| 384 |
+
debug.dbg('global completion scope: %s', context)
|
| 385 |
+
flow_scope_node = get_flow_scope_node(self._module_node, self._position)
|
| 386 |
+
filters = get_global_filters(
|
| 387 |
+
context,
|
| 388 |
+
self._position,
|
| 389 |
+
flow_scope_node
|
| 390 |
+
)
|
| 391 |
+
completion_names = []
|
| 392 |
+
for filter in filters:
|
| 393 |
+
completion_names += filter.values()
|
| 394 |
+
return completion_names
|
| 395 |
+
|
| 396 |
+
def _complete_trailer(self, previous_leaf):
|
| 397 |
+
inferred_context = self._module_context.create_context(previous_leaf)
|
| 398 |
+
values = infer_call_of_leaf(inferred_context, previous_leaf)
|
| 399 |
+
debug.dbg('trailer completion values: %s', values, color='MAGENTA')
|
| 400 |
+
|
| 401 |
+
# The cached name simply exists to make speed optimizations for certain
|
| 402 |
+
# modules.
|
| 403 |
+
cached_name = None
|
| 404 |
+
if len(values) == 1:
|
| 405 |
+
v, = values
|
| 406 |
+
if v.is_module():
|
| 407 |
+
if len(v.string_names) == 1:
|
| 408 |
+
module_name = v.string_names[0]
|
| 409 |
+
if module_name in ('numpy', 'tensorflow', 'matplotlib', 'pandas'):
|
| 410 |
+
cached_name = module_name
|
| 411 |
+
|
| 412 |
+
return cached_name, self._complete_trailer_for_values(values)
|
| 413 |
+
|
| 414 |
+
def _complete_trailer_for_values(self, values):
|
| 415 |
+
user_context = get_user_context(self._module_context, self._position)
|
| 416 |
+
|
| 417 |
+
return complete_trailer(user_context, values)
|
| 418 |
+
|
| 419 |
+
def _get_importer_names(self, names, level=0, only_modules=True):
|
| 420 |
+
names = [n.value for n in names]
|
| 421 |
+
i = imports.Importer(self._inference_state, names, self._module_context, level)
|
| 422 |
+
return i.completion_names(self._inference_state, only_modules=only_modules)
|
| 423 |
+
|
| 424 |
+
def _complete_inherited(self, is_function=True):
|
| 425 |
+
"""
|
| 426 |
+
Autocomplete inherited methods when overriding in child class.
|
| 427 |
+
"""
|
| 428 |
+
leaf = self._module_node.get_leaf_for_position(self._position, include_prefixes=True)
|
| 429 |
+
cls = tree.search_ancestor(leaf, 'classdef')
|
| 430 |
+
if cls is None:
|
| 431 |
+
return
|
| 432 |
+
|
| 433 |
+
# Complete the methods that are defined in the super classes.
|
| 434 |
+
class_value = self._module_context.create_value(cls)
|
| 435 |
+
|
| 436 |
+
if cls.start_pos[1] >= leaf.start_pos[1]:
|
| 437 |
+
return
|
| 438 |
+
|
| 439 |
+
filters = class_value.get_filters(is_instance=True)
|
| 440 |
+
# The first dict is the dictionary of class itself.
|
| 441 |
+
next(filters)
|
| 442 |
+
for filter in filters:
|
| 443 |
+
for name in filter.values():
|
| 444 |
+
# TODO we should probably check here for properties
|
| 445 |
+
if (name.api_type == 'function') == is_function:
|
| 446 |
+
yield name
|
| 447 |
+
|
| 448 |
+
def _complete_in_string(self, start_leaf, string):
|
| 449 |
+
"""
|
| 450 |
+
To make it possible for people to have completions in doctests or
|
| 451 |
+
generally in "Python" code in docstrings, we use the following
|
| 452 |
+
heuristic:
|
| 453 |
+
|
| 454 |
+
- Having an indented block of code
|
| 455 |
+
- Having some doctest code that starts with `>>>`
|
| 456 |
+
- Having backticks that doesn't have whitespace inside it
|
| 457 |
+
"""
|
| 458 |
+
|
| 459 |
+
def iter_relevant_lines(lines):
|
| 460 |
+
include_next_line = False
|
| 461 |
+
for l in code_lines:
|
| 462 |
+
if include_next_line or l.startswith('>>>') or l.startswith(' '):
|
| 463 |
+
yield re.sub(r'^( *>>> ?| +)', '', l)
|
| 464 |
+
else:
|
| 465 |
+
yield None
|
| 466 |
+
|
| 467 |
+
include_next_line = bool(re.match(' *>>>', l))
|
| 468 |
+
|
| 469 |
+
string = dedent(string)
|
| 470 |
+
code_lines = split_lines(string, keepends=True)
|
| 471 |
+
relevant_code_lines = list(iter_relevant_lines(code_lines))
|
| 472 |
+
if relevant_code_lines[-1] is not None:
|
| 473 |
+
# Some code lines might be None, therefore get rid of that.
|
| 474 |
+
relevant_code_lines = ['\n' if c is None else c for c in relevant_code_lines]
|
| 475 |
+
return self._complete_code_lines(relevant_code_lines)
|
| 476 |
+
match = re.search(r'`([^`\s]+)', code_lines[-1])
|
| 477 |
+
if match:
|
| 478 |
+
return self._complete_code_lines([match.group(1)])
|
| 479 |
+
return []
|
| 480 |
+
|
| 481 |
+
def _complete_code_lines(self, code_lines):
|
| 482 |
+
module_node = self._inference_state.grammar.parse(''.join(code_lines))
|
| 483 |
+
module_value = DocstringModule(
|
| 484 |
+
in_module_context=self._module_context,
|
| 485 |
+
inference_state=self._inference_state,
|
| 486 |
+
module_node=module_node,
|
| 487 |
+
code_lines=code_lines,
|
| 488 |
+
)
|
| 489 |
+
return Completion(
|
| 490 |
+
self._inference_state,
|
| 491 |
+
module_value.as_context(),
|
| 492 |
+
code_lines=code_lines,
|
| 493 |
+
position=module_node.end_pos,
|
| 494 |
+
signatures_callback=lambda *args, **kwargs: [],
|
| 495 |
+
fuzzy=self._fuzzy
|
| 496 |
+
).complete()
|
| 497 |
+
|
| 498 |
+
|
| 499 |
+
def _gather_nodes(stack):
|
| 500 |
+
nodes = []
|
| 501 |
+
for stack_node in stack:
|
| 502 |
+
if stack_node.dfa.from_rule == 'small_stmt':
|
| 503 |
+
nodes = []
|
| 504 |
+
else:
|
| 505 |
+
nodes += stack_node.nodes
|
| 506 |
+
return nodes
|
| 507 |
+
|
| 508 |
+
|
| 509 |
+
_string_start = re.compile(r'^\w*(\'{3}|"{3}|\'|")')
|
| 510 |
+
|
| 511 |
+
|
| 512 |
+
def _extract_string_while_in_string(leaf, position):
|
| 513 |
+
def return_part_of_leaf(leaf):
|
| 514 |
+
kwargs = {}
|
| 515 |
+
if leaf.line == position[0]:
|
| 516 |
+
kwargs['endpos'] = position[1] - leaf.column
|
| 517 |
+
match = _string_start.match(leaf.value, **kwargs)
|
| 518 |
+
if not match:
|
| 519 |
+
return None, None, None
|
| 520 |
+
start = match.group(0)
|
| 521 |
+
if leaf.line == position[0] and position[1] < leaf.column + match.end():
|
| 522 |
+
return None, None, None
|
| 523 |
+
return cut_value_at_position(leaf, position)[match.end():], leaf, start
|
| 524 |
+
|
| 525 |
+
if position < leaf.start_pos:
|
| 526 |
+
return None, None, None
|
| 527 |
+
|
| 528 |
+
if leaf.type == 'string':
|
| 529 |
+
return return_part_of_leaf(leaf)
|
| 530 |
+
|
| 531 |
+
leaves = []
|
| 532 |
+
while leaf is not None:
|
| 533 |
+
if leaf.type == 'error_leaf' and ('"' in leaf.value or "'" in leaf.value):
|
| 534 |
+
if len(leaf.value) > 1:
|
| 535 |
+
return return_part_of_leaf(leaf)
|
| 536 |
+
prefix_leaf = None
|
| 537 |
+
if not leaf.prefix:
|
| 538 |
+
prefix_leaf = leaf.get_previous_leaf()
|
| 539 |
+
if prefix_leaf is None or prefix_leaf.type != 'name' \
|
| 540 |
+
or not all(c in 'rubf' for c in prefix_leaf.value.lower()):
|
| 541 |
+
prefix_leaf = None
|
| 542 |
+
|
| 543 |
+
return (
|
| 544 |
+
''.join(cut_value_at_position(l, position) for l in leaves),
|
| 545 |
+
prefix_leaf or leaf,
|
| 546 |
+
('' if prefix_leaf is None else prefix_leaf.value)
|
| 547 |
+
+ cut_value_at_position(leaf, position),
|
| 548 |
+
)
|
| 549 |
+
if leaf.line != position[0]:
|
| 550 |
+
# Multi line strings are always simple error leaves and contain the
|
| 551 |
+
# whole string, single line error leaves are atherefore important
|
| 552 |
+
# now and since the line is different, it's not really a single
|
| 553 |
+
# line string anymore.
|
| 554 |
+
break
|
| 555 |
+
leaves.insert(0, leaf)
|
| 556 |
+
leaf = leaf.get_previous_leaf()
|
| 557 |
+
return None, None, None
|
| 558 |
+
|
| 559 |
+
|
| 560 |
+
def complete_trailer(user_context, values):
|
| 561 |
+
completion_names = []
|
| 562 |
+
for value in values:
|
| 563 |
+
for filter in value.get_filters(origin_scope=user_context.tree_node):
|
| 564 |
+
completion_names += filter.values()
|
| 565 |
+
|
| 566 |
+
if not value.is_stub() and isinstance(value, TreeInstance):
|
| 567 |
+
completion_names += _complete_getattr(user_context, value)
|
| 568 |
+
|
| 569 |
+
python_values = convert_values(values)
|
| 570 |
+
for c in python_values:
|
| 571 |
+
if c not in values:
|
| 572 |
+
for filter in c.get_filters(origin_scope=user_context.tree_node):
|
| 573 |
+
completion_names += filter.values()
|
| 574 |
+
return completion_names
|
| 575 |
+
|
| 576 |
+
|
| 577 |
+
def _complete_getattr(user_context, instance):
|
| 578 |
+
"""
|
| 579 |
+
A heuristic to make completion for proxy objects work. This is not
|
| 580 |
+
intended to work in all cases. It works exactly in this case:
|
| 581 |
+
|
| 582 |
+
def __getattr__(self, name):
|
| 583 |
+
...
|
| 584 |
+
return getattr(any_object, name)
|
| 585 |
+
|
| 586 |
+
It is important that the return contains getattr directly, otherwise it
|
| 587 |
+
won't work anymore. It's really just a stupid heuristic. It will not
|
| 588 |
+
work if you write e.g. `return (getatr(o, name))`, because of the
|
| 589 |
+
additional parentheses. It will also not work if you move the getattr
|
| 590 |
+
to some other place that is not the return statement itself.
|
| 591 |
+
|
| 592 |
+
It is intentional that it doesn't work in all cases. Generally it's
|
| 593 |
+
really hard to do even this case (as you can see below). Most people
|
| 594 |
+
will write it like this anyway and the other ones, well they are just
|
| 595 |
+
out of luck I guess :) ~dave.
|
| 596 |
+
"""
|
| 597 |
+
names = (instance.get_function_slot_names('__getattr__')
|
| 598 |
+
or instance.get_function_slot_names('__getattribute__'))
|
| 599 |
+
functions = ValueSet.from_sets(
|
| 600 |
+
name.infer()
|
| 601 |
+
for name in names
|
| 602 |
+
)
|
| 603 |
+
for func in functions:
|
| 604 |
+
tree_node = func.tree_node
|
| 605 |
+
if tree_node is None or tree_node.type != 'funcdef':
|
| 606 |
+
continue
|
| 607 |
+
|
| 608 |
+
for return_stmt in tree_node.iter_return_stmts():
|
| 609 |
+
# Basically until the next comment we just try to find out if a
|
| 610 |
+
# return statement looks exactly like `return getattr(x, name)`.
|
| 611 |
+
if return_stmt.type != 'return_stmt':
|
| 612 |
+
continue
|
| 613 |
+
atom_expr = return_stmt.children[1]
|
| 614 |
+
if atom_expr.type != 'atom_expr':
|
| 615 |
+
continue
|
| 616 |
+
atom = atom_expr.children[0]
|
| 617 |
+
trailer = atom_expr.children[1]
|
| 618 |
+
if len(atom_expr.children) != 2 or atom.type != 'name' \
|
| 619 |
+
or atom.value != 'getattr':
|
| 620 |
+
continue
|
| 621 |
+
arglist = trailer.children[1]
|
| 622 |
+
if arglist.type != 'arglist' or len(arglist.children) < 3:
|
| 623 |
+
continue
|
| 624 |
+
context = func.as_context()
|
| 625 |
+
object_node = arglist.children[0]
|
| 626 |
+
|
| 627 |
+
# Make sure it's a param: foo in __getattr__(self, foo)
|
| 628 |
+
name_node = arglist.children[2]
|
| 629 |
+
name_list = context.goto(name_node, name_node.start_pos)
|
| 630 |
+
if not any(n.api_type == 'param' for n in name_list):
|
| 631 |
+
continue
|
| 632 |
+
|
| 633 |
+
# Now that we know that these are most probably completion
|
| 634 |
+
# objects, we just infer the object and return them as
|
| 635 |
+
# completions.
|
| 636 |
+
objects = context.infer_node(object_node)
|
| 637 |
+
return complete_trailer(user_context, objects)
|
| 638 |
+
return []
|
| 639 |
+
|
| 640 |
+
|
| 641 |
+
def search_in_module(inference_state, module_context, names, wanted_names,
|
| 642 |
+
wanted_type, complete=False, fuzzy=False,
|
| 643 |
+
ignore_imports=False, convert=False):
|
| 644 |
+
for s in wanted_names[:-1]:
|
| 645 |
+
new_names = []
|
| 646 |
+
for n in names:
|
| 647 |
+
if s == n.string_name:
|
| 648 |
+
if n.tree_name is not None and n.api_type in ('module', 'namespace') \
|
| 649 |
+
and ignore_imports:
|
| 650 |
+
continue
|
| 651 |
+
new_names += complete_trailer(
|
| 652 |
+
module_context,
|
| 653 |
+
n.infer()
|
| 654 |
+
)
|
| 655 |
+
debug.dbg('dot lookup on search %s from %s', new_names, names[:10])
|
| 656 |
+
names = new_names
|
| 657 |
+
|
| 658 |
+
last_name = wanted_names[-1].lower()
|
| 659 |
+
for n in names:
|
| 660 |
+
string = n.string_name.lower()
|
| 661 |
+
if complete and helpers.match(string, last_name, fuzzy=fuzzy) \
|
| 662 |
+
or not complete and string == last_name:
|
| 663 |
+
if isinstance(n, SubModuleName):
|
| 664 |
+
names = [v.name for v in n.infer()]
|
| 665 |
+
else:
|
| 666 |
+
names = [n]
|
| 667 |
+
if convert:
|
| 668 |
+
names = convert_names(names)
|
| 669 |
+
for n2 in names:
|
| 670 |
+
if complete:
|
| 671 |
+
def_ = classes.Completion(
|
| 672 |
+
inference_state, n2,
|
| 673 |
+
stack=None,
|
| 674 |
+
like_name_length=len(last_name),
|
| 675 |
+
is_fuzzy=fuzzy,
|
| 676 |
+
)
|
| 677 |
+
else:
|
| 678 |
+
def_ = classes.Name(inference_state, n2)
|
| 679 |
+
if not wanted_type or wanted_type == def_.type:
|
| 680 |
+
yield def_
|
| 681 |
+
|
| 682 |
+
|
| 683 |
+
def extract_imported_names(node):
|
| 684 |
+
imported_names = []
|
| 685 |
+
|
| 686 |
+
if node.type in ['import_as_names', 'dotted_as_names', 'import_as_name']:
|
| 687 |
+
for index, child in enumerate(node.children):
|
| 688 |
+
if child.type == 'name':
|
| 689 |
+
if (index > 0 and node.children[index - 1].type == "keyword"
|
| 690 |
+
and node.children[index - 1].value == "as"):
|
| 691 |
+
continue
|
| 692 |
+
imported_names.append(child.value)
|
| 693 |
+
elif child.type == 'import_as_name':
|
| 694 |
+
imported_names.extend(extract_imported_names(child))
|
| 695 |
+
|
| 696 |
+
return imported_names
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/api/completion_cache.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Dict, Tuple, Callable
|
| 2 |
+
|
| 3 |
+
CacheValues = Tuple[str, str, str]
|
| 4 |
+
CacheValuesCallback = Callable[[], CacheValues]
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
_cache: Dict[str, Dict[str, CacheValues]] = {}
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def save_entry(module_name: str, name: str, cache: CacheValues) -> None:
|
| 11 |
+
try:
|
| 12 |
+
module_cache = _cache[module_name]
|
| 13 |
+
except KeyError:
|
| 14 |
+
module_cache = _cache[module_name] = {}
|
| 15 |
+
module_cache[name] = cache
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def _create_get_from_cache(number: int) -> Callable[[str, str, CacheValuesCallback], str]:
|
| 19 |
+
def _get_from_cache(module_name: str, name: str, get_cache_values: CacheValuesCallback) -> str:
|
| 20 |
+
try:
|
| 21 |
+
return _cache[module_name][name][number]
|
| 22 |
+
except KeyError:
|
| 23 |
+
v = get_cache_values()
|
| 24 |
+
save_entry(module_name, name, v)
|
| 25 |
+
return v[number]
|
| 26 |
+
return _get_from_cache
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
get_type = _create_get_from_cache(0)
|
| 30 |
+
get_docstring_signature = _create_get_from_cache(1)
|
| 31 |
+
get_docstring = _create_get_from_cache(2)
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/api/environment.py
ADDED
|
@@ -0,0 +1,480 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Environments are a way to activate different Python versions or Virtualenvs for
|
| 3 |
+
static analysis. The Python binary in that environment is going to be executed.
|
| 4 |
+
"""
|
| 5 |
+
import os
|
| 6 |
+
import sys
|
| 7 |
+
import hashlib
|
| 8 |
+
import filecmp
|
| 9 |
+
from collections import namedtuple
|
| 10 |
+
from shutil import which
|
| 11 |
+
from typing import TYPE_CHECKING
|
| 12 |
+
|
| 13 |
+
from jedi.cache import memoize_method, time_cache
|
| 14 |
+
from jedi.inference.compiled.subprocess import CompiledSubprocess, \
|
| 15 |
+
InferenceStateSameProcess, InferenceStateSubprocess
|
| 16 |
+
|
| 17 |
+
import parso
|
| 18 |
+
|
| 19 |
+
if TYPE_CHECKING:
|
| 20 |
+
from jedi.inference import InferenceState
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
_VersionInfo = namedtuple('VersionInfo', 'major minor micro') # type: ignore[name-match]
|
| 24 |
+
|
| 25 |
+
_SUPPORTED_PYTHONS = ['3.13', '3.12', '3.11', '3.10', '3.9', '3.8', '3.7', '3.6']
|
| 26 |
+
_SAFE_PATHS = ['/usr/bin', '/usr/local/bin']
|
| 27 |
+
_CONDA_VAR = 'CONDA_PREFIX'
|
| 28 |
+
_CURRENT_VERSION = '%s.%s' % (sys.version_info.major, sys.version_info.minor)
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class InvalidPythonEnvironment(Exception):
|
| 32 |
+
"""
|
| 33 |
+
If you see this exception, the Python executable or Virtualenv you have
|
| 34 |
+
been trying to use is probably not a correct Python version.
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class _BaseEnvironment:
|
| 39 |
+
@memoize_method
|
| 40 |
+
def get_grammar(self):
|
| 41 |
+
version_string = '%s.%s' % (self.version_info.major, self.version_info.minor)
|
| 42 |
+
return parso.load_grammar(version=version_string)
|
| 43 |
+
|
| 44 |
+
@property
|
| 45 |
+
def _sha256(self):
|
| 46 |
+
try:
|
| 47 |
+
return self._hash
|
| 48 |
+
except AttributeError:
|
| 49 |
+
self._hash = _calculate_sha256_for_file(self.executable)
|
| 50 |
+
return self._hash
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def _get_info():
|
| 54 |
+
return (
|
| 55 |
+
sys.executable,
|
| 56 |
+
sys.prefix,
|
| 57 |
+
sys.version_info[:3],
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class Environment(_BaseEnvironment):
|
| 62 |
+
"""
|
| 63 |
+
This class is supposed to be created by internal Jedi architecture. You
|
| 64 |
+
should not create it directly. Please use create_environment or the other
|
| 65 |
+
functions instead. It is then returned by that function.
|
| 66 |
+
"""
|
| 67 |
+
_subprocess = None
|
| 68 |
+
|
| 69 |
+
def __init__(self, executable, env_vars=None):
|
| 70 |
+
self._start_executable = executable
|
| 71 |
+
self._env_vars = env_vars
|
| 72 |
+
# Initialize the environment
|
| 73 |
+
self._get_subprocess()
|
| 74 |
+
|
| 75 |
+
def _get_subprocess(self):
|
| 76 |
+
if self._subprocess is not None and not self._subprocess.is_crashed:
|
| 77 |
+
return self._subprocess
|
| 78 |
+
|
| 79 |
+
try:
|
| 80 |
+
self._subprocess = CompiledSubprocess(self._start_executable,
|
| 81 |
+
env_vars=self._env_vars)
|
| 82 |
+
info = self._subprocess._send(None, _get_info)
|
| 83 |
+
except Exception as exc:
|
| 84 |
+
raise InvalidPythonEnvironment(
|
| 85 |
+
"Could not get version information for %r: %r" % (
|
| 86 |
+
self._start_executable,
|
| 87 |
+
exc))
|
| 88 |
+
|
| 89 |
+
# Since it could change and might not be the same(?) as the one given,
|
| 90 |
+
# set it here.
|
| 91 |
+
self.executable = info[0]
|
| 92 |
+
"""
|
| 93 |
+
The Python executable, matches ``sys.executable``.
|
| 94 |
+
"""
|
| 95 |
+
self.path = info[1]
|
| 96 |
+
"""
|
| 97 |
+
The path to an environment, matches ``sys.prefix``.
|
| 98 |
+
"""
|
| 99 |
+
self.version_info = _VersionInfo(*info[2])
|
| 100 |
+
"""
|
| 101 |
+
Like :data:`sys.version_info`: a tuple to show the current
|
| 102 |
+
Environment's Python version.
|
| 103 |
+
"""
|
| 104 |
+
return self._subprocess
|
| 105 |
+
|
| 106 |
+
def __repr__(self):
|
| 107 |
+
version = '.'.join(str(i) for i in self.version_info)
|
| 108 |
+
return '<%s: %s in %s>' % (self.__class__.__name__, version, self.path)
|
| 109 |
+
|
| 110 |
+
def get_inference_state_subprocess(
|
| 111 |
+
self,
|
| 112 |
+
inference_state: 'InferenceState',
|
| 113 |
+
) -> InferenceStateSubprocess:
|
| 114 |
+
return InferenceStateSubprocess(inference_state, self._get_subprocess())
|
| 115 |
+
|
| 116 |
+
@memoize_method
|
| 117 |
+
def get_sys_path(self):
|
| 118 |
+
"""
|
| 119 |
+
The sys path for this environment. Does not include potential
|
| 120 |
+
modifications from e.g. appending to :data:`sys.path`.
|
| 121 |
+
|
| 122 |
+
:returns: list of str
|
| 123 |
+
"""
|
| 124 |
+
# It's pretty much impossible to generate the sys path without actually
|
| 125 |
+
# executing Python. The sys path (when starting with -S) itself depends
|
| 126 |
+
# on how the Python version was compiled (ENV variables).
|
| 127 |
+
# If you omit -S when starting Python (normal case), additionally
|
| 128 |
+
# site.py gets executed.
|
| 129 |
+
return self._get_subprocess().get_sys_path()
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
class _SameEnvironmentMixin:
|
| 133 |
+
def __init__(self):
|
| 134 |
+
self._start_executable = self.executable = sys.executable
|
| 135 |
+
self.path = sys.prefix
|
| 136 |
+
self.version_info = _VersionInfo(*sys.version_info[:3])
|
| 137 |
+
self._env_vars = None
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
class SameEnvironment(_SameEnvironmentMixin, Environment):
|
| 141 |
+
pass
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
class InterpreterEnvironment(_SameEnvironmentMixin, _BaseEnvironment):
|
| 145 |
+
def get_inference_state_subprocess(
|
| 146 |
+
self,
|
| 147 |
+
inference_state: 'InferenceState',
|
| 148 |
+
) -> InferenceStateSameProcess:
|
| 149 |
+
return InferenceStateSameProcess(inference_state)
|
| 150 |
+
|
| 151 |
+
def get_sys_path(self):
|
| 152 |
+
return sys.path
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
def _get_virtual_env_from_var(env_var='VIRTUAL_ENV'):
|
| 156 |
+
"""Get virtualenv environment from VIRTUAL_ENV environment variable.
|
| 157 |
+
|
| 158 |
+
It uses `safe=False` with ``create_environment``, because the environment
|
| 159 |
+
variable is considered to be safe / controlled by the user solely.
|
| 160 |
+
"""
|
| 161 |
+
var = os.environ.get(env_var)
|
| 162 |
+
if var:
|
| 163 |
+
# Under macOS in some cases - notably when using Pipenv - the
|
| 164 |
+
# sys.prefix of the virtualenv is /path/to/env/bin/.. instead of
|
| 165 |
+
# /path/to/env so we need to fully resolve the paths in order to
|
| 166 |
+
# compare them.
|
| 167 |
+
if os.path.realpath(var) == os.path.realpath(sys.prefix):
|
| 168 |
+
return _try_get_same_env()
|
| 169 |
+
|
| 170 |
+
try:
|
| 171 |
+
return create_environment(var, safe=False)
|
| 172 |
+
except InvalidPythonEnvironment:
|
| 173 |
+
pass
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
def _calculate_sha256_for_file(path):
|
| 177 |
+
sha256 = hashlib.sha256()
|
| 178 |
+
with open(path, 'rb') as f:
|
| 179 |
+
for block in iter(lambda: f.read(filecmp.BUFSIZE), b''):
|
| 180 |
+
sha256.update(block)
|
| 181 |
+
return sha256.hexdigest()
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
def get_default_environment():
|
| 185 |
+
"""
|
| 186 |
+
Tries to return an active Virtualenv or conda environment.
|
| 187 |
+
If there is no VIRTUAL_ENV variable or no CONDA_PREFIX variable set
|
| 188 |
+
set it will return the latest Python version installed on the system. This
|
| 189 |
+
makes it possible to use as many new Python features as possible when using
|
| 190 |
+
autocompletion and other functionality.
|
| 191 |
+
|
| 192 |
+
:returns: :class:`.Environment`
|
| 193 |
+
"""
|
| 194 |
+
virtual_env = _get_virtual_env_from_var()
|
| 195 |
+
if virtual_env is not None:
|
| 196 |
+
return virtual_env
|
| 197 |
+
|
| 198 |
+
conda_env = _get_virtual_env_from_var(_CONDA_VAR)
|
| 199 |
+
if conda_env is not None:
|
| 200 |
+
return conda_env
|
| 201 |
+
|
| 202 |
+
return _try_get_same_env()
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
def _try_get_same_env():
|
| 206 |
+
env = SameEnvironment()
|
| 207 |
+
if not os.path.basename(env.executable).lower().startswith('python'):
|
| 208 |
+
# This tries to counter issues with embedding. In some cases (e.g.
|
| 209 |
+
# VIM's Python Mac/Windows, sys.executable is /foo/bar/vim. This
|
| 210 |
+
# happens, because for Mac a function called `_NSGetExecutablePath` is
|
| 211 |
+
# used and for Windows `GetModuleFileNameW`. These are both platform
|
| 212 |
+
# specific functions. For all other systems sys.executable should be
|
| 213 |
+
# alright. However here we try to generalize:
|
| 214 |
+
#
|
| 215 |
+
# 1. Check if the executable looks like python (heuristic)
|
| 216 |
+
# 2. In case it's not try to find the executable
|
| 217 |
+
# 3. In case we don't find it use an interpreter environment.
|
| 218 |
+
#
|
| 219 |
+
# The last option will always work, but leads to potential crashes of
|
| 220 |
+
# Jedi - which is ok, because it happens very rarely and even less,
|
| 221 |
+
# because the code below should work for most cases.
|
| 222 |
+
if os.name == 'nt':
|
| 223 |
+
# The first case would be a virtualenv and the second a normal
|
| 224 |
+
# Python installation.
|
| 225 |
+
checks = (r'Scripts\python.exe', 'python.exe')
|
| 226 |
+
else:
|
| 227 |
+
# For unix it looks like Python is always in a bin folder.
|
| 228 |
+
checks = (
|
| 229 |
+
'bin/python%s.%s' % (sys.version_info[0], sys.version[1]),
|
| 230 |
+
'bin/python%s' % (sys.version_info[0]),
|
| 231 |
+
'bin/python',
|
| 232 |
+
)
|
| 233 |
+
for check in checks:
|
| 234 |
+
guess = os.path.join(sys.exec_prefix, check)
|
| 235 |
+
if os.path.isfile(guess):
|
| 236 |
+
# Bingo - We think we have our Python.
|
| 237 |
+
return Environment(guess)
|
| 238 |
+
# It looks like there is no reasonable Python to be found.
|
| 239 |
+
return InterpreterEnvironment()
|
| 240 |
+
# If no virtualenv is found, use the environment we're already
|
| 241 |
+
# using.
|
| 242 |
+
return env
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
def get_cached_default_environment():
|
| 246 |
+
var = os.environ.get('VIRTUAL_ENV') or os.environ.get(_CONDA_VAR)
|
| 247 |
+
environment = _get_cached_default_environment()
|
| 248 |
+
|
| 249 |
+
# Under macOS in some cases - notably when using Pipenv - the
|
| 250 |
+
# sys.prefix of the virtualenv is /path/to/env/bin/.. instead of
|
| 251 |
+
# /path/to/env so we need to fully resolve the paths in order to
|
| 252 |
+
# compare them.
|
| 253 |
+
if var and os.path.realpath(var) != os.path.realpath(environment.path):
|
| 254 |
+
_get_cached_default_environment.clear_cache()
|
| 255 |
+
return _get_cached_default_environment()
|
| 256 |
+
return environment
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
@time_cache(seconds=10 * 60) # 10 Minutes
|
| 260 |
+
def _get_cached_default_environment():
|
| 261 |
+
try:
|
| 262 |
+
return get_default_environment()
|
| 263 |
+
except InvalidPythonEnvironment:
|
| 264 |
+
# It's possible that `sys.executable` is wrong. Typically happens
|
| 265 |
+
# when Jedi is used in an executable that embeds Python. For further
|
| 266 |
+
# information, have a look at:
|
| 267 |
+
# https://github.com/davidhalter/jedi/issues/1531
|
| 268 |
+
return InterpreterEnvironment()
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
def find_virtualenvs(paths=None, *, safe=True, use_environment_vars=True):
|
| 272 |
+
"""
|
| 273 |
+
:param paths: A list of paths in your file system to be scanned for
|
| 274 |
+
Virtualenvs. It will search in these paths and potentially execute the
|
| 275 |
+
Python binaries.
|
| 276 |
+
:param safe: Default True. In case this is False, it will allow this
|
| 277 |
+
function to execute potential `python` environments. An attacker might
|
| 278 |
+
be able to drop an executable in a path this function is searching by
|
| 279 |
+
default. If the executable has not been installed by root, it will not
|
| 280 |
+
be executed.
|
| 281 |
+
:param use_environment_vars: Default True. If True, the VIRTUAL_ENV
|
| 282 |
+
variable will be checked if it contains a valid VirtualEnv.
|
| 283 |
+
CONDA_PREFIX will be checked to see if it contains a valid conda
|
| 284 |
+
environment.
|
| 285 |
+
|
| 286 |
+
:yields: :class:`.Environment`
|
| 287 |
+
"""
|
| 288 |
+
if paths is None:
|
| 289 |
+
paths = []
|
| 290 |
+
|
| 291 |
+
_used_paths = set()
|
| 292 |
+
|
| 293 |
+
if use_environment_vars:
|
| 294 |
+
# Using this variable should be safe, because attackers might be
|
| 295 |
+
# able to drop files (via git) but not environment variables.
|
| 296 |
+
virtual_env = _get_virtual_env_from_var()
|
| 297 |
+
if virtual_env is not None:
|
| 298 |
+
yield virtual_env
|
| 299 |
+
_used_paths.add(virtual_env.path)
|
| 300 |
+
|
| 301 |
+
conda_env = _get_virtual_env_from_var(_CONDA_VAR)
|
| 302 |
+
if conda_env is not None:
|
| 303 |
+
yield conda_env
|
| 304 |
+
_used_paths.add(conda_env.path)
|
| 305 |
+
|
| 306 |
+
for directory in paths:
|
| 307 |
+
if not os.path.isdir(directory):
|
| 308 |
+
continue
|
| 309 |
+
|
| 310 |
+
directory = os.path.abspath(directory)
|
| 311 |
+
for path in os.listdir(directory):
|
| 312 |
+
path = os.path.join(directory, path)
|
| 313 |
+
if path in _used_paths:
|
| 314 |
+
# A path shouldn't be inferred twice.
|
| 315 |
+
continue
|
| 316 |
+
_used_paths.add(path)
|
| 317 |
+
|
| 318 |
+
try:
|
| 319 |
+
executable = _get_executable_path(path, safe=safe)
|
| 320 |
+
yield Environment(executable)
|
| 321 |
+
except InvalidPythonEnvironment:
|
| 322 |
+
pass
|
| 323 |
+
|
| 324 |
+
|
| 325 |
+
def find_system_environments(*, env_vars=None):
|
| 326 |
+
"""
|
| 327 |
+
Ignores virtualenvs and returns the Python versions that were installed on
|
| 328 |
+
your system. This might return nothing, if you're running Python e.g. from
|
| 329 |
+
a portable version.
|
| 330 |
+
|
| 331 |
+
The environments are sorted from latest to oldest Python version.
|
| 332 |
+
|
| 333 |
+
:yields: :class:`.Environment`
|
| 334 |
+
"""
|
| 335 |
+
for version_string in _SUPPORTED_PYTHONS:
|
| 336 |
+
try:
|
| 337 |
+
yield get_system_environment(version_string, env_vars=env_vars)
|
| 338 |
+
except InvalidPythonEnvironment:
|
| 339 |
+
pass
|
| 340 |
+
|
| 341 |
+
|
| 342 |
+
# TODO: this function should probably return a list of environments since
|
| 343 |
+
# multiple Python installations can be found on a system for the same version.
|
| 344 |
+
def get_system_environment(version, *, env_vars=None):
|
| 345 |
+
"""
|
| 346 |
+
Return the first Python environment found for a string of the form 'X.Y'
|
| 347 |
+
where X and Y are the major and minor versions of Python.
|
| 348 |
+
|
| 349 |
+
:raises: :exc:`.InvalidPythonEnvironment`
|
| 350 |
+
:returns: :class:`.Environment`
|
| 351 |
+
"""
|
| 352 |
+
exe = which('python' + version)
|
| 353 |
+
if exe:
|
| 354 |
+
if exe == sys.executable:
|
| 355 |
+
return SameEnvironment()
|
| 356 |
+
return Environment(exe)
|
| 357 |
+
|
| 358 |
+
if os.name == 'nt':
|
| 359 |
+
for exe in _get_executables_from_windows_registry(version):
|
| 360 |
+
try:
|
| 361 |
+
return Environment(exe, env_vars=env_vars)
|
| 362 |
+
except InvalidPythonEnvironment:
|
| 363 |
+
pass
|
| 364 |
+
raise InvalidPythonEnvironment("Cannot find executable python%s." % version)
|
| 365 |
+
|
| 366 |
+
|
| 367 |
+
def create_environment(path, *, safe=True, env_vars=None):
|
| 368 |
+
"""
|
| 369 |
+
Make it possible to manually create an Environment object by specifying a
|
| 370 |
+
Virtualenv path or an executable path and optional environment variables.
|
| 371 |
+
|
| 372 |
+
:raises: :exc:`.InvalidPythonEnvironment`
|
| 373 |
+
:returns: :class:`.Environment`
|
| 374 |
+
"""
|
| 375 |
+
if os.path.isfile(path):
|
| 376 |
+
_assert_safe(path, safe)
|
| 377 |
+
return Environment(path, env_vars=env_vars)
|
| 378 |
+
return Environment(_get_executable_path(path, safe=safe), env_vars=env_vars)
|
| 379 |
+
|
| 380 |
+
|
| 381 |
+
def _get_executable_path(path, safe=True):
|
| 382 |
+
"""
|
| 383 |
+
Returns None if it's not actually a virtual env.
|
| 384 |
+
"""
|
| 385 |
+
|
| 386 |
+
if os.name == 'nt':
|
| 387 |
+
pythons = [os.path.join(path, 'Scripts', 'python.exe'), os.path.join(path, 'python.exe')]
|
| 388 |
+
else:
|
| 389 |
+
pythons = [os.path.join(path, 'bin', 'python')]
|
| 390 |
+
for python in pythons:
|
| 391 |
+
if os.path.exists(python):
|
| 392 |
+
break
|
| 393 |
+
else:
|
| 394 |
+
raise InvalidPythonEnvironment("%s seems to be missing." % python)
|
| 395 |
+
|
| 396 |
+
_assert_safe(python, safe)
|
| 397 |
+
return python
|
| 398 |
+
|
| 399 |
+
|
| 400 |
+
def _get_executables_from_windows_registry(version):
|
| 401 |
+
import winreg
|
| 402 |
+
|
| 403 |
+
# TODO: support Python Anaconda.
|
| 404 |
+
sub_keys = [
|
| 405 |
+
r'SOFTWARE\Python\PythonCore\{version}\InstallPath',
|
| 406 |
+
r'SOFTWARE\Wow6432Node\Python\PythonCore\{version}\InstallPath',
|
| 407 |
+
r'SOFTWARE\Python\PythonCore\{version}-32\InstallPath',
|
| 408 |
+
r'SOFTWARE\Wow6432Node\Python\PythonCore\{version}-32\InstallPath'
|
| 409 |
+
]
|
| 410 |
+
for root_key in [winreg.HKEY_CURRENT_USER, winreg.HKEY_LOCAL_MACHINE]:
|
| 411 |
+
for sub_key in sub_keys:
|
| 412 |
+
sub_key = sub_key.format(version=version)
|
| 413 |
+
try:
|
| 414 |
+
with winreg.OpenKey(root_key, sub_key) as key:
|
| 415 |
+
prefix = winreg.QueryValueEx(key, '')[0]
|
| 416 |
+
exe = os.path.join(prefix, 'python.exe')
|
| 417 |
+
if os.path.isfile(exe):
|
| 418 |
+
yield exe
|
| 419 |
+
except WindowsError:
|
| 420 |
+
pass
|
| 421 |
+
|
| 422 |
+
|
| 423 |
+
def _assert_safe(executable_path, safe):
|
| 424 |
+
if safe and not _is_safe(executable_path):
|
| 425 |
+
raise InvalidPythonEnvironment(
|
| 426 |
+
"The python binary is potentially unsafe.")
|
| 427 |
+
|
| 428 |
+
|
| 429 |
+
def _is_safe(executable_path):
|
| 430 |
+
# Resolve sym links. A venv typically is a symlink to a known Python
|
| 431 |
+
# binary. Only virtualenvs copy symlinks around.
|
| 432 |
+
real_path = os.path.realpath(executable_path)
|
| 433 |
+
|
| 434 |
+
if _is_unix_safe_simple(real_path):
|
| 435 |
+
return True
|
| 436 |
+
|
| 437 |
+
# Just check the list of known Python versions. If it's not in there,
|
| 438 |
+
# it's likely an attacker or some Python that was not properly
|
| 439 |
+
# installed in the system.
|
| 440 |
+
for environment in find_system_environments():
|
| 441 |
+
if environment.executable == real_path:
|
| 442 |
+
return True
|
| 443 |
+
|
| 444 |
+
# If the versions don't match, just compare the binary files. If we
|
| 445 |
+
# don't do that, only venvs will be working and not virtualenvs.
|
| 446 |
+
# venvs are symlinks while virtualenvs are actual copies of the
|
| 447 |
+
# Python files.
|
| 448 |
+
# This still means that if the system Python is updated and the
|
| 449 |
+
# virtualenv's Python is not (which is probably never going to get
|
| 450 |
+
# upgraded), it will not work with Jedi. IMO that's fine, because
|
| 451 |
+
# people should just be using venv. ~ dave
|
| 452 |
+
if environment._sha256 == _calculate_sha256_for_file(real_path):
|
| 453 |
+
return True
|
| 454 |
+
return False
|
| 455 |
+
|
| 456 |
+
|
| 457 |
+
def _is_unix_safe_simple(real_path):
|
| 458 |
+
if _is_unix_admin():
|
| 459 |
+
# In case we are root, just be conservative and
|
| 460 |
+
# only execute known paths.
|
| 461 |
+
return any(real_path.startswith(p) for p in _SAFE_PATHS)
|
| 462 |
+
|
| 463 |
+
uid = os.stat(real_path).st_uid
|
| 464 |
+
# The interpreter needs to be owned by root. This means that it wasn't
|
| 465 |
+
# written by a user and therefore attacking Jedi is not as simple.
|
| 466 |
+
# The attack could look like the following:
|
| 467 |
+
# 1. A user clones a repository.
|
| 468 |
+
# 2. The repository has an innocent looking folder called foobar. jedi
|
| 469 |
+
# searches for the folder and executes foobar/bin/python --version if
|
| 470 |
+
# there's also a foobar/bin/activate.
|
| 471 |
+
# 3. The attacker has gained code execution, since he controls
|
| 472 |
+
# foobar/bin/python.
|
| 473 |
+
return uid == 0
|
| 474 |
+
|
| 475 |
+
|
| 476 |
+
def _is_unix_admin():
|
| 477 |
+
try:
|
| 478 |
+
return os.getuid() == 0
|
| 479 |
+
except AttributeError:
|
| 480 |
+
return False # Windows
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/api/exceptions.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
class _JediError(Exception):
|
| 2 |
+
pass
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class InternalError(_JediError):
|
| 6 |
+
"""
|
| 7 |
+
This error might happen a subprocess is crashing. The reason for this is
|
| 8 |
+
usually broken C code in third party libraries. This is not a very common
|
| 9 |
+
thing and it is safe to use Jedi again. However using the same calls might
|
| 10 |
+
result in the same error again.
|
| 11 |
+
"""
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class WrongVersion(_JediError):
|
| 15 |
+
"""
|
| 16 |
+
This error is reserved for the future, shouldn't really be happening at the
|
| 17 |
+
moment.
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class RefactoringError(_JediError):
|
| 22 |
+
"""
|
| 23 |
+
Refactorings can fail for various reasons. So if you work with refactorings
|
| 24 |
+
like :meth:`.Script.rename`, :meth:`.Script.inline`,
|
| 25 |
+
:meth:`.Script.extract_variable` and :meth:`.Script.extract_function`, make
|
| 26 |
+
sure to catch these. The descriptions in the errors are usually valuable
|
| 27 |
+
for end users.
|
| 28 |
+
|
| 29 |
+
A typical ``RefactoringError`` would tell the user that inlining is not
|
| 30 |
+
possible if no name is under the cursor.
|
| 31 |
+
"""
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/api/helpers.py
ADDED
|
@@ -0,0 +1,522 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Helpers for the API
|
| 3 |
+
"""
|
| 4 |
+
import re
|
| 5 |
+
from collections import namedtuple
|
| 6 |
+
from textwrap import dedent
|
| 7 |
+
from itertools import chain
|
| 8 |
+
from functools import wraps
|
| 9 |
+
from inspect import Parameter
|
| 10 |
+
|
| 11 |
+
from parso.python.parser import Parser
|
| 12 |
+
from parso.python import tree
|
| 13 |
+
|
| 14 |
+
from jedi.inference.base_value import NO_VALUES
|
| 15 |
+
from jedi.inference.syntax_tree import infer_atom
|
| 16 |
+
from jedi.inference.helpers import infer_call_of_leaf
|
| 17 |
+
from jedi.inference.compiled import get_string_value_set
|
| 18 |
+
from jedi.cache import signature_time_cache, memoize_method
|
| 19 |
+
from jedi.parser_utils import get_parent_scope
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
CompletionParts = namedtuple('CompletionParts', ['path', 'has_dot', 'name'])
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def _start_match(string, like_name):
|
| 26 |
+
return string.startswith(like_name)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def _fuzzy_match(string, like_name):
|
| 30 |
+
if len(like_name) <= 1:
|
| 31 |
+
return like_name in string
|
| 32 |
+
pos = string.find(like_name[0])
|
| 33 |
+
if pos >= 0:
|
| 34 |
+
return _fuzzy_match(string[pos + 1:], like_name[1:])
|
| 35 |
+
return False
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def match(string, like_name, fuzzy=False):
|
| 39 |
+
if fuzzy:
|
| 40 |
+
return _fuzzy_match(string, like_name)
|
| 41 |
+
else:
|
| 42 |
+
return _start_match(string, like_name)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def sorted_definitions(defs):
|
| 46 |
+
# Note: `or ''` below is required because `module_path` could be
|
| 47 |
+
return sorted(defs, key=lambda x: (str(x.module_path or ''),
|
| 48 |
+
x.line or 0,
|
| 49 |
+
x.column or 0,
|
| 50 |
+
x.name))
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def get_on_completion_name(module_node, lines, position):
|
| 54 |
+
leaf = module_node.get_leaf_for_position(position)
|
| 55 |
+
if leaf is None or leaf.type in ('string', 'error_leaf'):
|
| 56 |
+
# Completions inside strings are a bit special, we need to parse the
|
| 57 |
+
# string. The same is true for comments and error_leafs.
|
| 58 |
+
line = lines[position[0] - 1]
|
| 59 |
+
# The first step of completions is to get the name
|
| 60 |
+
return re.search(r'(?!\d)\w+$|$', line[:position[1]]).group(0)
|
| 61 |
+
elif leaf.type not in ('name', 'keyword'):
|
| 62 |
+
return ''
|
| 63 |
+
|
| 64 |
+
return leaf.value[:position[1] - leaf.start_pos[1]]
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def _get_code(code_lines, start_pos, end_pos):
|
| 68 |
+
# Get relevant lines.
|
| 69 |
+
lines = code_lines[start_pos[0] - 1:end_pos[0]]
|
| 70 |
+
# Remove the parts at the end of the line.
|
| 71 |
+
lines[-1] = lines[-1][:end_pos[1]]
|
| 72 |
+
# Remove first line indentation.
|
| 73 |
+
lines[0] = lines[0][start_pos[1]:]
|
| 74 |
+
return ''.join(lines)
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
class OnErrorLeaf(Exception):
|
| 78 |
+
@property
|
| 79 |
+
def error_leaf(self):
|
| 80 |
+
return self.args[0]
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def _get_code_for_stack(code_lines, leaf, position):
|
| 84 |
+
# It might happen that we're on whitespace or on a comment. This means
|
| 85 |
+
# that we would not get the right leaf.
|
| 86 |
+
if leaf.start_pos >= position:
|
| 87 |
+
# If we're not on a comment simply get the previous leaf and proceed.
|
| 88 |
+
leaf = leaf.get_previous_leaf()
|
| 89 |
+
if leaf is None:
|
| 90 |
+
return '' # At the beginning of the file.
|
| 91 |
+
|
| 92 |
+
is_after_newline = leaf.type == 'newline'
|
| 93 |
+
while leaf.type == 'newline':
|
| 94 |
+
leaf = leaf.get_previous_leaf()
|
| 95 |
+
if leaf is None:
|
| 96 |
+
return ''
|
| 97 |
+
|
| 98 |
+
if leaf.type == 'error_leaf' or leaf.type == 'string':
|
| 99 |
+
if leaf.start_pos[0] < position[0]:
|
| 100 |
+
# On a different line, we just begin anew.
|
| 101 |
+
return ''
|
| 102 |
+
|
| 103 |
+
# Error leafs cannot be parsed, completion in strings is also
|
| 104 |
+
# impossible.
|
| 105 |
+
raise OnErrorLeaf(leaf)
|
| 106 |
+
else:
|
| 107 |
+
user_stmt = leaf
|
| 108 |
+
while True:
|
| 109 |
+
if user_stmt.parent.type in ('file_input', 'suite', 'simple_stmt'):
|
| 110 |
+
break
|
| 111 |
+
user_stmt = user_stmt.parent
|
| 112 |
+
|
| 113 |
+
if is_after_newline:
|
| 114 |
+
if user_stmt.start_pos[1] > position[1]:
|
| 115 |
+
# This means that it's actually a dedent and that means that we
|
| 116 |
+
# start without value (part of a suite).
|
| 117 |
+
return ''
|
| 118 |
+
|
| 119 |
+
# This is basically getting the relevant lines.
|
| 120 |
+
return _get_code(code_lines, user_stmt.get_start_pos_of_prefix(), position)
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
def get_stack_at_position(grammar, code_lines, leaf, pos):
|
| 124 |
+
"""
|
| 125 |
+
Returns the possible node names (e.g. import_from, xor_test or yield_stmt).
|
| 126 |
+
"""
|
| 127 |
+
class EndMarkerReached(Exception):
|
| 128 |
+
pass
|
| 129 |
+
|
| 130 |
+
def tokenize_without_endmarker(code):
|
| 131 |
+
# TODO This is for now not an official parso API that exists purely
|
| 132 |
+
# for Jedi.
|
| 133 |
+
tokens = grammar._tokenize(code)
|
| 134 |
+
for token in tokens:
|
| 135 |
+
if token.string == safeword:
|
| 136 |
+
raise EndMarkerReached()
|
| 137 |
+
elif token.prefix.endswith(safeword):
|
| 138 |
+
# This happens with comments.
|
| 139 |
+
raise EndMarkerReached()
|
| 140 |
+
elif token.string.endswith(safeword):
|
| 141 |
+
yield token # Probably an f-string literal that was not finished.
|
| 142 |
+
raise EndMarkerReached()
|
| 143 |
+
else:
|
| 144 |
+
yield token
|
| 145 |
+
|
| 146 |
+
# The code might be indedented, just remove it.
|
| 147 |
+
code = dedent(_get_code_for_stack(code_lines, leaf, pos))
|
| 148 |
+
# We use a word to tell Jedi when we have reached the start of the
|
| 149 |
+
# completion.
|
| 150 |
+
# Use Z as a prefix because it's not part of a number suffix.
|
| 151 |
+
safeword = 'ZZZ_USER_WANTS_TO_COMPLETE_HERE_WITH_JEDI'
|
| 152 |
+
code = code + ' ' + safeword
|
| 153 |
+
|
| 154 |
+
p = Parser(grammar._pgen_grammar, error_recovery=True)
|
| 155 |
+
try:
|
| 156 |
+
p.parse(tokens=tokenize_without_endmarker(code))
|
| 157 |
+
except EndMarkerReached:
|
| 158 |
+
return p.stack
|
| 159 |
+
raise SystemError(
|
| 160 |
+
"This really shouldn't happen. There's a bug in Jedi:\n%s"
|
| 161 |
+
% list(tokenize_without_endmarker(code))
|
| 162 |
+
)
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
def infer(inference_state, context, leaf):
|
| 166 |
+
if leaf.type == 'name':
|
| 167 |
+
return inference_state.infer(context, leaf)
|
| 168 |
+
|
| 169 |
+
parent = leaf.parent
|
| 170 |
+
definitions = NO_VALUES
|
| 171 |
+
if parent.type == 'atom':
|
| 172 |
+
# e.g. `(a + b)`
|
| 173 |
+
definitions = context.infer_node(leaf.parent)
|
| 174 |
+
elif parent.type == 'trailer':
|
| 175 |
+
# e.g. `a()`
|
| 176 |
+
definitions = infer_call_of_leaf(context, leaf)
|
| 177 |
+
elif isinstance(leaf, tree.Literal):
|
| 178 |
+
# e.g. `"foo"` or `1.0`
|
| 179 |
+
return infer_atom(context, leaf)
|
| 180 |
+
elif leaf.type in ('fstring_string', 'fstring_start', 'fstring_end'):
|
| 181 |
+
return get_string_value_set(inference_state)
|
| 182 |
+
return definitions
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
def filter_follow_imports(names, follow_builtin_imports=False):
|
| 186 |
+
for name in names:
|
| 187 |
+
if name.is_import():
|
| 188 |
+
new_names = list(filter_follow_imports(
|
| 189 |
+
name.goto(),
|
| 190 |
+
follow_builtin_imports=follow_builtin_imports,
|
| 191 |
+
))
|
| 192 |
+
found_builtin = False
|
| 193 |
+
if follow_builtin_imports:
|
| 194 |
+
for new_name in new_names:
|
| 195 |
+
if new_name.start_pos is None:
|
| 196 |
+
found_builtin = True
|
| 197 |
+
|
| 198 |
+
if found_builtin:
|
| 199 |
+
yield name
|
| 200 |
+
else:
|
| 201 |
+
yield from new_names
|
| 202 |
+
else:
|
| 203 |
+
yield name
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
class CallDetails:
|
| 207 |
+
def __init__(self, bracket_leaf, children, position):
|
| 208 |
+
self.bracket_leaf = bracket_leaf
|
| 209 |
+
self._children = children
|
| 210 |
+
self._position = position
|
| 211 |
+
|
| 212 |
+
@property
|
| 213 |
+
def index(self):
|
| 214 |
+
return _get_index_and_key(self._children, self._position)[0]
|
| 215 |
+
|
| 216 |
+
@property
|
| 217 |
+
def keyword_name_str(self):
|
| 218 |
+
return _get_index_and_key(self._children, self._position)[1]
|
| 219 |
+
|
| 220 |
+
@memoize_method
|
| 221 |
+
def _list_arguments(self):
|
| 222 |
+
return list(_iter_arguments(self._children, self._position))
|
| 223 |
+
|
| 224 |
+
def calculate_index(self, param_names):
|
| 225 |
+
positional_count = 0
|
| 226 |
+
used_names = set()
|
| 227 |
+
star_count = -1
|
| 228 |
+
args = self._list_arguments()
|
| 229 |
+
if not args:
|
| 230 |
+
if param_names:
|
| 231 |
+
return 0
|
| 232 |
+
else:
|
| 233 |
+
return None
|
| 234 |
+
|
| 235 |
+
is_kwarg = False
|
| 236 |
+
for i, (star_count, key_start, had_equal) in enumerate(args):
|
| 237 |
+
is_kwarg |= had_equal | (star_count == 2)
|
| 238 |
+
if star_count:
|
| 239 |
+
pass # For now do nothing, we don't know what's in there here.
|
| 240 |
+
else:
|
| 241 |
+
if i + 1 != len(args): # Not last
|
| 242 |
+
if had_equal:
|
| 243 |
+
used_names.add(key_start)
|
| 244 |
+
else:
|
| 245 |
+
positional_count += 1
|
| 246 |
+
|
| 247 |
+
for i, param_name in enumerate(param_names):
|
| 248 |
+
kind = param_name.get_kind()
|
| 249 |
+
|
| 250 |
+
if not is_kwarg:
|
| 251 |
+
if kind == Parameter.VAR_POSITIONAL:
|
| 252 |
+
return i
|
| 253 |
+
if kind in (Parameter.POSITIONAL_OR_KEYWORD, Parameter.POSITIONAL_ONLY):
|
| 254 |
+
if i == positional_count:
|
| 255 |
+
return i
|
| 256 |
+
|
| 257 |
+
if key_start is not None and not star_count == 1 or star_count == 2:
|
| 258 |
+
if param_name.string_name not in used_names \
|
| 259 |
+
and (kind == Parameter.KEYWORD_ONLY
|
| 260 |
+
or kind == Parameter.POSITIONAL_OR_KEYWORD
|
| 261 |
+
and positional_count <= i):
|
| 262 |
+
if star_count:
|
| 263 |
+
return i
|
| 264 |
+
if had_equal:
|
| 265 |
+
if param_name.string_name == key_start:
|
| 266 |
+
return i
|
| 267 |
+
else:
|
| 268 |
+
if param_name.string_name.startswith(key_start):
|
| 269 |
+
return i
|
| 270 |
+
|
| 271 |
+
if kind == Parameter.VAR_KEYWORD:
|
| 272 |
+
return i
|
| 273 |
+
return None
|
| 274 |
+
|
| 275 |
+
def iter_used_keyword_arguments(self):
|
| 276 |
+
for star_count, key_start, had_equal in list(self._list_arguments()):
|
| 277 |
+
if had_equal and key_start:
|
| 278 |
+
yield key_start
|
| 279 |
+
|
| 280 |
+
def count_positional_arguments(self):
|
| 281 |
+
count = 0
|
| 282 |
+
for star_count, key_start, had_equal in self._list_arguments()[:-1]:
|
| 283 |
+
if star_count or key_start:
|
| 284 |
+
break
|
| 285 |
+
count += 1
|
| 286 |
+
return count
|
| 287 |
+
|
| 288 |
+
|
| 289 |
+
def _iter_arguments(nodes, position):
|
| 290 |
+
def remove_after_pos(name):
|
| 291 |
+
if name.type != 'name':
|
| 292 |
+
return None
|
| 293 |
+
return name.value[:position[1] - name.start_pos[1]]
|
| 294 |
+
|
| 295 |
+
# Returns Generator[Tuple[star_count, Optional[key_start: str], had_equal]]
|
| 296 |
+
nodes_before = [c for c in nodes if c.start_pos < position]
|
| 297 |
+
if nodes_before[-1].type == 'arglist':
|
| 298 |
+
yield from _iter_arguments(nodes_before[-1].children, position)
|
| 299 |
+
return
|
| 300 |
+
|
| 301 |
+
previous_node_yielded = False
|
| 302 |
+
stars_seen = 0
|
| 303 |
+
for i, node in enumerate(nodes_before):
|
| 304 |
+
if node.type == 'argument':
|
| 305 |
+
previous_node_yielded = True
|
| 306 |
+
first = node.children[0]
|
| 307 |
+
second = node.children[1]
|
| 308 |
+
if second == '=':
|
| 309 |
+
if second.start_pos < position and first.type == 'name':
|
| 310 |
+
yield 0, first.value, True
|
| 311 |
+
else:
|
| 312 |
+
yield 0, remove_after_pos(first), False
|
| 313 |
+
elif first in ('*', '**'):
|
| 314 |
+
yield len(first.value), remove_after_pos(second), False
|
| 315 |
+
else:
|
| 316 |
+
# Must be a Comprehension
|
| 317 |
+
first_leaf = node.get_first_leaf()
|
| 318 |
+
if first_leaf.type == 'name' and first_leaf.start_pos >= position:
|
| 319 |
+
yield 0, remove_after_pos(first_leaf), False
|
| 320 |
+
else:
|
| 321 |
+
yield 0, None, False
|
| 322 |
+
stars_seen = 0
|
| 323 |
+
elif node.type == 'testlist_star_expr':
|
| 324 |
+
for n in node.children[::2]:
|
| 325 |
+
if n.type == 'star_expr':
|
| 326 |
+
stars_seen = 1
|
| 327 |
+
n = n.children[1]
|
| 328 |
+
yield stars_seen, remove_after_pos(n), False
|
| 329 |
+
stars_seen = 0
|
| 330 |
+
# The count of children is even if there's a comma at the end.
|
| 331 |
+
previous_node_yielded = bool(len(node.children) % 2)
|
| 332 |
+
elif isinstance(node, tree.PythonLeaf) and node.value == ',':
|
| 333 |
+
if not previous_node_yielded:
|
| 334 |
+
yield stars_seen, '', False
|
| 335 |
+
stars_seen = 0
|
| 336 |
+
previous_node_yielded = False
|
| 337 |
+
elif isinstance(node, tree.PythonLeaf) and node.value in ('*', '**'):
|
| 338 |
+
stars_seen = len(node.value)
|
| 339 |
+
elif node == '=' and nodes_before[-1]:
|
| 340 |
+
previous_node_yielded = True
|
| 341 |
+
before = nodes_before[i - 1]
|
| 342 |
+
if before.type == 'name':
|
| 343 |
+
yield 0, before.value, True
|
| 344 |
+
else:
|
| 345 |
+
yield 0, None, False
|
| 346 |
+
# Just ignore the star that is probably a syntax error.
|
| 347 |
+
stars_seen = 0
|
| 348 |
+
|
| 349 |
+
if not previous_node_yielded:
|
| 350 |
+
if nodes_before[-1].type == 'name':
|
| 351 |
+
yield stars_seen, remove_after_pos(nodes_before[-1]), False
|
| 352 |
+
else:
|
| 353 |
+
yield stars_seen, '', False
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
def _get_index_and_key(nodes, position):
|
| 357 |
+
"""
|
| 358 |
+
Returns the amount of commas and the keyword argument string.
|
| 359 |
+
"""
|
| 360 |
+
nodes_before = [c for c in nodes if c.start_pos < position]
|
| 361 |
+
if nodes_before[-1].type == 'arglist':
|
| 362 |
+
return _get_index_and_key(nodes_before[-1].children, position)
|
| 363 |
+
|
| 364 |
+
key_str = None
|
| 365 |
+
|
| 366 |
+
last = nodes_before[-1]
|
| 367 |
+
if last.type == 'argument' and last.children[1] == '=' \
|
| 368 |
+
and last.children[1].end_pos <= position:
|
| 369 |
+
# Checked if the argument
|
| 370 |
+
key_str = last.children[0].value
|
| 371 |
+
elif last == '=':
|
| 372 |
+
key_str = nodes_before[-2].value
|
| 373 |
+
|
| 374 |
+
return nodes_before.count(','), key_str
|
| 375 |
+
|
| 376 |
+
|
| 377 |
+
def _get_signature_details_from_error_node(node, additional_children, position):
|
| 378 |
+
for index, element in reversed(list(enumerate(node.children))):
|
| 379 |
+
# `index > 0` means that it's a trailer and not an atom.
|
| 380 |
+
if element == '(' and element.end_pos <= position and index > 0:
|
| 381 |
+
# It's an error node, we don't want to match too much, just
|
| 382 |
+
# until the parentheses is enough.
|
| 383 |
+
children = node.children[index:]
|
| 384 |
+
name = element.get_previous_leaf()
|
| 385 |
+
if name is None:
|
| 386 |
+
continue
|
| 387 |
+
if name.type == 'name' or name.parent.type in ('trailer', 'atom'):
|
| 388 |
+
return CallDetails(element, children + additional_children, position)
|
| 389 |
+
|
| 390 |
+
|
| 391 |
+
def get_signature_details(module, position):
|
| 392 |
+
leaf = module.get_leaf_for_position(position, include_prefixes=True)
|
| 393 |
+
# It's easier to deal with the previous token than the next one in this
|
| 394 |
+
# case.
|
| 395 |
+
if leaf.start_pos >= position:
|
| 396 |
+
# Whitespace / comments after the leaf count towards the previous leaf.
|
| 397 |
+
leaf = leaf.get_previous_leaf()
|
| 398 |
+
if leaf is None:
|
| 399 |
+
return None
|
| 400 |
+
|
| 401 |
+
# Now that we know where we are in the syntax tree, we start to look at
|
| 402 |
+
# parents for possible function definitions.
|
| 403 |
+
node = leaf.parent
|
| 404 |
+
while node is not None:
|
| 405 |
+
if node.type in ('funcdef', 'classdef', 'decorated', 'async_stmt'):
|
| 406 |
+
# Don't show signatures if there's stuff before it that just
|
| 407 |
+
# makes it feel strange to have a signature.
|
| 408 |
+
return None
|
| 409 |
+
|
| 410 |
+
additional_children = []
|
| 411 |
+
for n in reversed(node.children):
|
| 412 |
+
if n.start_pos < position:
|
| 413 |
+
if n.type == 'error_node':
|
| 414 |
+
result = _get_signature_details_from_error_node(
|
| 415 |
+
n, additional_children, position
|
| 416 |
+
)
|
| 417 |
+
if result is not None:
|
| 418 |
+
return result
|
| 419 |
+
|
| 420 |
+
additional_children[0:0] = n.children
|
| 421 |
+
continue
|
| 422 |
+
additional_children.insert(0, n)
|
| 423 |
+
|
| 424 |
+
# Find a valid trailer
|
| 425 |
+
if node.type == 'trailer' and node.children[0] == '(' \
|
| 426 |
+
or node.type == 'decorator' and node.children[2] == '(':
|
| 427 |
+
# Additionally we have to check that an ending parenthesis isn't
|
| 428 |
+
# interpreted wrong. There are two cases:
|
| 429 |
+
# 1. Cursor before paren -> The current signature is good
|
| 430 |
+
# 2. Cursor after paren -> We need to skip the current signature
|
| 431 |
+
if not (leaf is node.children[-1] and position >= leaf.end_pos):
|
| 432 |
+
leaf = node.get_previous_leaf()
|
| 433 |
+
if leaf is None:
|
| 434 |
+
return None
|
| 435 |
+
return CallDetails(
|
| 436 |
+
node.children[0] if node.type == 'trailer' else node.children[2],
|
| 437 |
+
node.children,
|
| 438 |
+
position
|
| 439 |
+
)
|
| 440 |
+
|
| 441 |
+
node = node.parent
|
| 442 |
+
|
| 443 |
+
return None
|
| 444 |
+
|
| 445 |
+
|
| 446 |
+
@signature_time_cache("call_signatures_validity")
|
| 447 |
+
def cache_signatures(inference_state, context, bracket_leaf, code_lines, user_pos):
|
| 448 |
+
"""This function calculates the cache key."""
|
| 449 |
+
line_index = user_pos[0] - 1
|
| 450 |
+
|
| 451 |
+
before_cursor = code_lines[line_index][:user_pos[1]]
|
| 452 |
+
other_lines = code_lines[bracket_leaf.start_pos[0]:line_index]
|
| 453 |
+
whole = ''.join(other_lines + [before_cursor])
|
| 454 |
+
before_bracket = re.match(r'.*\(', whole, re.DOTALL)
|
| 455 |
+
|
| 456 |
+
module_path = context.get_root_context().py__file__()
|
| 457 |
+
if module_path is None:
|
| 458 |
+
yield None # Don't cache!
|
| 459 |
+
else:
|
| 460 |
+
yield (module_path, before_bracket, bracket_leaf.start_pos)
|
| 461 |
+
yield infer(
|
| 462 |
+
inference_state,
|
| 463 |
+
context,
|
| 464 |
+
bracket_leaf.get_previous_leaf(),
|
| 465 |
+
)
|
| 466 |
+
|
| 467 |
+
|
| 468 |
+
def validate_line_column(func):
|
| 469 |
+
@wraps(func)
|
| 470 |
+
def wrapper(self, line=None, column=None, *args, **kwargs):
|
| 471 |
+
line = max(len(self._code_lines), 1) if line is None else line
|
| 472 |
+
if not (0 < line <= len(self._code_lines)):
|
| 473 |
+
raise ValueError('`line` parameter is not in a valid range.')
|
| 474 |
+
|
| 475 |
+
line_string = self._code_lines[line - 1]
|
| 476 |
+
line_len = len(line_string)
|
| 477 |
+
if line_string.endswith('\r\n'):
|
| 478 |
+
line_len -= 2
|
| 479 |
+
elif line_string.endswith('\n'):
|
| 480 |
+
line_len -= 1
|
| 481 |
+
|
| 482 |
+
column = line_len if column is None else column
|
| 483 |
+
if not (0 <= column <= line_len):
|
| 484 |
+
raise ValueError('`column` parameter (%d) is not in a valid range '
|
| 485 |
+
'(0-%d) for line %d (%r).' % (
|
| 486 |
+
column, line_len, line, line_string))
|
| 487 |
+
return func(self, line, column, *args, **kwargs)
|
| 488 |
+
return wrapper
|
| 489 |
+
|
| 490 |
+
|
| 491 |
+
def get_module_names(module, all_scopes, definitions=True, references=False):
|
| 492 |
+
"""
|
| 493 |
+
Returns a dictionary with name parts as keys and their call paths as
|
| 494 |
+
values.
|
| 495 |
+
"""
|
| 496 |
+
def def_ref_filter(name):
|
| 497 |
+
is_def = name.is_definition()
|
| 498 |
+
return definitions and is_def or references and not is_def
|
| 499 |
+
|
| 500 |
+
names = list(chain.from_iterable(module.get_used_names().values()))
|
| 501 |
+
if not all_scopes:
|
| 502 |
+
# We have to filter all the names that don't have the module as a
|
| 503 |
+
# parent_scope. There's None as a parent, because nodes in the module
|
| 504 |
+
# node have the parent module and not suite as all the others.
|
| 505 |
+
# Therefore it's important to catch that case.
|
| 506 |
+
|
| 507 |
+
def is_module_scope_name(name):
|
| 508 |
+
parent_scope = get_parent_scope(name)
|
| 509 |
+
# async functions have an extra wrapper. Strip it.
|
| 510 |
+
if parent_scope and parent_scope.type == 'async_stmt':
|
| 511 |
+
parent_scope = parent_scope.parent
|
| 512 |
+
return parent_scope in (module, None)
|
| 513 |
+
|
| 514 |
+
names = [n for n in names if is_module_scope_name(n)]
|
| 515 |
+
return filter(def_ref_filter, names)
|
| 516 |
+
|
| 517 |
+
|
| 518 |
+
def split_search_string(name):
|
| 519 |
+
type, _, dotted_names = name.rpartition(' ')
|
| 520 |
+
if type == 'def':
|
| 521 |
+
type = 'function'
|
| 522 |
+
return type, dotted_names.split('.')
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/api/project.py
ADDED
|
@@ -0,0 +1,448 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Projects are a way to handle Python projects within Jedi. For simpler plugins
|
| 3 |
+
you might not want to deal with projects, but if you want to give the user more
|
| 4 |
+
flexibility to define sys paths and Python interpreters for a project,
|
| 5 |
+
:class:`.Project` is the perfect way to allow for that.
|
| 6 |
+
|
| 7 |
+
Projects can be saved to disk and loaded again, to allow project definitions to
|
| 8 |
+
be used across repositories.
|
| 9 |
+
"""
|
| 10 |
+
import json
|
| 11 |
+
from pathlib import Path
|
| 12 |
+
from itertools import chain
|
| 13 |
+
|
| 14 |
+
from jedi import debug
|
| 15 |
+
from jedi.api.environment import get_cached_default_environment, create_environment
|
| 16 |
+
from jedi.api.exceptions import WrongVersion
|
| 17 |
+
from jedi.api.completion import search_in_module
|
| 18 |
+
from jedi.api.helpers import split_search_string, get_module_names
|
| 19 |
+
from jedi.inference.imports import load_module_from_path, \
|
| 20 |
+
load_namespace_from_path, iter_module_names
|
| 21 |
+
from jedi.inference.sys_path import discover_buildout_paths
|
| 22 |
+
from jedi.inference.cache import inference_state_as_method_param_cache
|
| 23 |
+
from jedi.inference.references import recurse_find_python_folders_and_files, search_in_file_ios
|
| 24 |
+
from jedi.file_io import FolderIO
|
| 25 |
+
|
| 26 |
+
_CONFIG_FOLDER = '.jedi'
|
| 27 |
+
_CONTAINS_POTENTIAL_PROJECT = \
|
| 28 |
+
'setup.py', '.git', '.hg', 'requirements.txt', 'MANIFEST.in', 'pyproject.toml'
|
| 29 |
+
|
| 30 |
+
_SERIALIZER_VERSION = 1
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def _try_to_skip_duplicates(func):
|
| 34 |
+
def wrapper(*args, **kwargs):
|
| 35 |
+
found_tree_nodes = []
|
| 36 |
+
found_modules = []
|
| 37 |
+
for definition in func(*args, **kwargs):
|
| 38 |
+
tree_node = definition._name.tree_name
|
| 39 |
+
if tree_node is not None and tree_node in found_tree_nodes:
|
| 40 |
+
continue
|
| 41 |
+
if definition.type == 'module' and definition.module_path is not None:
|
| 42 |
+
if definition.module_path in found_modules:
|
| 43 |
+
continue
|
| 44 |
+
found_modules.append(definition.module_path)
|
| 45 |
+
yield definition
|
| 46 |
+
found_tree_nodes.append(tree_node)
|
| 47 |
+
return wrapper
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def _remove_duplicates_from_path(path):
|
| 51 |
+
used = set()
|
| 52 |
+
for p in path:
|
| 53 |
+
if p in used:
|
| 54 |
+
continue
|
| 55 |
+
used.add(p)
|
| 56 |
+
yield p
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class Project:
|
| 60 |
+
"""
|
| 61 |
+
Projects are a simple way to manage Python folders and define how Jedi does
|
| 62 |
+
import resolution. It is mostly used as a parameter to :class:`.Script`.
|
| 63 |
+
Additionally there are functions to search a whole project.
|
| 64 |
+
"""
|
| 65 |
+
_environment = None
|
| 66 |
+
|
| 67 |
+
@staticmethod
|
| 68 |
+
def _get_config_folder_path(base_path):
|
| 69 |
+
return base_path.joinpath(_CONFIG_FOLDER)
|
| 70 |
+
|
| 71 |
+
@staticmethod
|
| 72 |
+
def _get_json_path(base_path):
|
| 73 |
+
return Project._get_config_folder_path(base_path).joinpath('project.json')
|
| 74 |
+
|
| 75 |
+
@classmethod
|
| 76 |
+
def load(cls, path):
|
| 77 |
+
"""
|
| 78 |
+
Loads a project from a specific path. You should not provide the path
|
| 79 |
+
to ``.jedi/project.json``, but rather the path to the project folder.
|
| 80 |
+
|
| 81 |
+
:param path: The path of the directory you want to use as a project.
|
| 82 |
+
"""
|
| 83 |
+
if isinstance(path, str):
|
| 84 |
+
path = Path(path)
|
| 85 |
+
with open(cls._get_json_path(path)) as f:
|
| 86 |
+
version, data = json.load(f)
|
| 87 |
+
|
| 88 |
+
if version == 1:
|
| 89 |
+
return cls(**data)
|
| 90 |
+
else:
|
| 91 |
+
raise WrongVersion(
|
| 92 |
+
"The Jedi version of this project seems newer than what we can handle."
|
| 93 |
+
)
|
| 94 |
+
|
| 95 |
+
def save(self):
|
| 96 |
+
"""
|
| 97 |
+
Saves the project configuration in the project in ``.jedi/project.json``.
|
| 98 |
+
"""
|
| 99 |
+
data = dict(self.__dict__)
|
| 100 |
+
data.pop('_environment', None)
|
| 101 |
+
data.pop('_django', None) # TODO make django setting public?
|
| 102 |
+
data = {k.lstrip('_'): v for k, v in data.items()}
|
| 103 |
+
data['path'] = str(data['path'])
|
| 104 |
+
|
| 105 |
+
self._get_config_folder_path(self._path).mkdir(parents=True, exist_ok=True)
|
| 106 |
+
with open(self._get_json_path(self._path), 'w') as f:
|
| 107 |
+
return json.dump((_SERIALIZER_VERSION, data), f)
|
| 108 |
+
|
| 109 |
+
def __init__(
|
| 110 |
+
self,
|
| 111 |
+
path,
|
| 112 |
+
*,
|
| 113 |
+
environment_path=None,
|
| 114 |
+
load_unsafe_extensions=False,
|
| 115 |
+
sys_path=None,
|
| 116 |
+
added_sys_path=(),
|
| 117 |
+
smart_sys_path=True,
|
| 118 |
+
) -> None:
|
| 119 |
+
"""
|
| 120 |
+
:param path: The base path for this project.
|
| 121 |
+
:param environment_path: The Python executable path, typically the path
|
| 122 |
+
of a virtual environment.
|
| 123 |
+
:param load_unsafe_extensions: Default False, Loads extensions that are not in the
|
| 124 |
+
sys path and in the local directories. With this option enabled,
|
| 125 |
+
this is potentially unsafe if you clone a git repository and
|
| 126 |
+
analyze it's code, because those compiled extensions will be
|
| 127 |
+
important and therefore have execution privileges.
|
| 128 |
+
:param sys_path: list of str. You can override the sys path if you
|
| 129 |
+
want. By default the ``sys.path.`` is generated by the
|
| 130 |
+
environment (virtualenvs, etc).
|
| 131 |
+
:param added_sys_path: list of str. Adds these paths at the end of the
|
| 132 |
+
sys path.
|
| 133 |
+
:param smart_sys_path: If this is enabled (default), adds paths from
|
| 134 |
+
local directories. Otherwise you will have to rely on your packages
|
| 135 |
+
being properly configured on the ``sys.path``.
|
| 136 |
+
"""
|
| 137 |
+
|
| 138 |
+
if isinstance(path, str):
|
| 139 |
+
path = Path(path).absolute()
|
| 140 |
+
self._path = path
|
| 141 |
+
|
| 142 |
+
self._environment_path = environment_path
|
| 143 |
+
if sys_path is not None:
|
| 144 |
+
# Remap potential pathlib.Path entries
|
| 145 |
+
sys_path = list(map(str, sys_path))
|
| 146 |
+
self._sys_path = sys_path
|
| 147 |
+
self._smart_sys_path = smart_sys_path
|
| 148 |
+
self._load_unsafe_extensions = load_unsafe_extensions
|
| 149 |
+
self._django = False
|
| 150 |
+
# Remap potential pathlib.Path entries
|
| 151 |
+
self.added_sys_path = list(map(str, added_sys_path))
|
| 152 |
+
"""The sys path that is going to be added at the end of the """
|
| 153 |
+
|
| 154 |
+
@property
|
| 155 |
+
def path(self):
|
| 156 |
+
"""
|
| 157 |
+
The base path for this project.
|
| 158 |
+
"""
|
| 159 |
+
return self._path
|
| 160 |
+
|
| 161 |
+
@property
|
| 162 |
+
def sys_path(self):
|
| 163 |
+
"""
|
| 164 |
+
The sys path provided to this project. This can be None and in that
|
| 165 |
+
case will be auto generated.
|
| 166 |
+
"""
|
| 167 |
+
return self._sys_path
|
| 168 |
+
|
| 169 |
+
@property
|
| 170 |
+
def smart_sys_path(self):
|
| 171 |
+
"""
|
| 172 |
+
If the sys path is going to be calculated in a smart way, where
|
| 173 |
+
additional paths are added.
|
| 174 |
+
"""
|
| 175 |
+
return self._smart_sys_path
|
| 176 |
+
|
| 177 |
+
@property
|
| 178 |
+
def load_unsafe_extensions(self):
|
| 179 |
+
"""
|
| 180 |
+
Wheter the project loads unsafe extensions.
|
| 181 |
+
"""
|
| 182 |
+
return self._load_unsafe_extensions
|
| 183 |
+
|
| 184 |
+
@inference_state_as_method_param_cache()
|
| 185 |
+
def _get_base_sys_path(self, inference_state):
|
| 186 |
+
# The sys path has not been set explicitly.
|
| 187 |
+
sys_path = list(inference_state.environment.get_sys_path())
|
| 188 |
+
try:
|
| 189 |
+
sys_path.remove('')
|
| 190 |
+
except ValueError:
|
| 191 |
+
pass
|
| 192 |
+
return sys_path
|
| 193 |
+
|
| 194 |
+
@inference_state_as_method_param_cache()
|
| 195 |
+
def _get_sys_path(self, inference_state, add_parent_paths=True, add_init_paths=False):
|
| 196 |
+
"""
|
| 197 |
+
Keep this method private for all users of jedi. However internally this
|
| 198 |
+
one is used like a public method.
|
| 199 |
+
"""
|
| 200 |
+
suffixed = list(self.added_sys_path)
|
| 201 |
+
prefixed = []
|
| 202 |
+
|
| 203 |
+
if self._sys_path is None:
|
| 204 |
+
sys_path = list(self._get_base_sys_path(inference_state))
|
| 205 |
+
else:
|
| 206 |
+
sys_path = list(self._sys_path)
|
| 207 |
+
|
| 208 |
+
if self._smart_sys_path:
|
| 209 |
+
prefixed.append(str(self._path))
|
| 210 |
+
|
| 211 |
+
if inference_state.script_path is not None:
|
| 212 |
+
suffixed += map(str, discover_buildout_paths(
|
| 213 |
+
inference_state,
|
| 214 |
+
inference_state.script_path
|
| 215 |
+
))
|
| 216 |
+
|
| 217 |
+
if add_parent_paths:
|
| 218 |
+
# Collect directories in upward search by:
|
| 219 |
+
# 1. Skipping directories with __init__.py
|
| 220 |
+
# 2. Stopping immediately when above self._path
|
| 221 |
+
traversed = []
|
| 222 |
+
for parent_path in inference_state.script_path.parents:
|
| 223 |
+
if parent_path == self._path \
|
| 224 |
+
or self._path not in parent_path.parents:
|
| 225 |
+
break
|
| 226 |
+
if not add_init_paths \
|
| 227 |
+
and parent_path.joinpath("__init__.py").is_file():
|
| 228 |
+
continue
|
| 229 |
+
traversed.append(str(parent_path))
|
| 230 |
+
|
| 231 |
+
# AFAIK some libraries have imports like `foo.foo.bar`, which
|
| 232 |
+
# leads to the conclusion to by default prefer longer paths
|
| 233 |
+
# rather than shorter ones by default.
|
| 234 |
+
suffixed += reversed(traversed)
|
| 235 |
+
|
| 236 |
+
if self._django:
|
| 237 |
+
prefixed.append(str(self._path))
|
| 238 |
+
|
| 239 |
+
path = prefixed + sys_path + suffixed
|
| 240 |
+
return list(_remove_duplicates_from_path(path))
|
| 241 |
+
|
| 242 |
+
def get_environment(self):
|
| 243 |
+
if self._environment is None:
|
| 244 |
+
if self._environment_path is not None:
|
| 245 |
+
self._environment = create_environment(self._environment_path, safe=False)
|
| 246 |
+
else:
|
| 247 |
+
self._environment = get_cached_default_environment()
|
| 248 |
+
return self._environment
|
| 249 |
+
|
| 250 |
+
def search(self, string, *, all_scopes=False):
|
| 251 |
+
"""
|
| 252 |
+
Searches a name in the whole project. If the project is very big,
|
| 253 |
+
at some point Jedi will stop searching. However it's also very much
|
| 254 |
+
recommended to not exhaust the generator. Just display the first ten
|
| 255 |
+
results to the user.
|
| 256 |
+
|
| 257 |
+
There are currently three different search patterns:
|
| 258 |
+
|
| 259 |
+
- ``foo`` to search for a definition foo in any file or a file called
|
| 260 |
+
``foo.py`` or ``foo.pyi``.
|
| 261 |
+
- ``foo.bar`` to search for the ``foo`` and then an attribute ``bar``
|
| 262 |
+
in it.
|
| 263 |
+
- ``class foo.bar.Bar`` or ``def foo.bar.baz`` to search for a specific
|
| 264 |
+
API type.
|
| 265 |
+
|
| 266 |
+
:param bool all_scopes: Default False; searches not only for
|
| 267 |
+
definitions on the top level of a module level, but also in
|
| 268 |
+
functions and classes.
|
| 269 |
+
:yields: :class:`.Name`
|
| 270 |
+
"""
|
| 271 |
+
return self._search_func(string, all_scopes=all_scopes)
|
| 272 |
+
|
| 273 |
+
def complete_search(self, string, **kwargs):
|
| 274 |
+
"""
|
| 275 |
+
Like :meth:`.Script.search`, but completes that string. An empty string
|
| 276 |
+
lists all definitions in a project, so be careful with that.
|
| 277 |
+
|
| 278 |
+
:param bool all_scopes: Default False; searches not only for
|
| 279 |
+
definitions on the top level of a module level, but also in
|
| 280 |
+
functions and classes.
|
| 281 |
+
:yields: :class:`.Completion`
|
| 282 |
+
"""
|
| 283 |
+
return self._search_func(string, complete=True, **kwargs)
|
| 284 |
+
|
| 285 |
+
@_try_to_skip_duplicates
|
| 286 |
+
def _search_func(self, string, complete=False, all_scopes=False):
|
| 287 |
+
# Using a Script is they easiest way to get an empty module context.
|
| 288 |
+
from jedi import Script
|
| 289 |
+
s = Script('', project=self)
|
| 290 |
+
inference_state = s._inference_state
|
| 291 |
+
empty_module_context = s._get_module_context()
|
| 292 |
+
|
| 293 |
+
debug.dbg('Search for string %s, complete=%s', string, complete)
|
| 294 |
+
wanted_type, wanted_names = split_search_string(string)
|
| 295 |
+
name = wanted_names[0]
|
| 296 |
+
stub_folder_name = name + '-stubs'
|
| 297 |
+
|
| 298 |
+
ios = recurse_find_python_folders_and_files(FolderIO(str(self._path)))
|
| 299 |
+
file_ios = []
|
| 300 |
+
|
| 301 |
+
# 1. Search for modules in the current project
|
| 302 |
+
for folder_io, file_io in ios:
|
| 303 |
+
if file_io is None:
|
| 304 |
+
file_name = folder_io.get_base_name()
|
| 305 |
+
if file_name == name or file_name == stub_folder_name:
|
| 306 |
+
f = folder_io.get_file_io('__init__.py')
|
| 307 |
+
try:
|
| 308 |
+
m = load_module_from_path(inference_state, f).as_context()
|
| 309 |
+
except FileNotFoundError:
|
| 310 |
+
f = folder_io.get_file_io('__init__.pyi')
|
| 311 |
+
try:
|
| 312 |
+
m = load_module_from_path(inference_state, f).as_context()
|
| 313 |
+
except FileNotFoundError:
|
| 314 |
+
m = load_namespace_from_path(inference_state, folder_io).as_context()
|
| 315 |
+
else:
|
| 316 |
+
continue
|
| 317 |
+
else:
|
| 318 |
+
file_ios.append(file_io)
|
| 319 |
+
if Path(file_io.path).name in (name + '.py', name + '.pyi'):
|
| 320 |
+
m = load_module_from_path(inference_state, file_io).as_context()
|
| 321 |
+
else:
|
| 322 |
+
continue
|
| 323 |
+
|
| 324 |
+
debug.dbg('Search of a specific module %s', m)
|
| 325 |
+
yield from search_in_module(
|
| 326 |
+
inference_state,
|
| 327 |
+
m,
|
| 328 |
+
names=[m.name],
|
| 329 |
+
wanted_type=wanted_type,
|
| 330 |
+
wanted_names=wanted_names,
|
| 331 |
+
complete=complete,
|
| 332 |
+
convert=True,
|
| 333 |
+
ignore_imports=True,
|
| 334 |
+
)
|
| 335 |
+
|
| 336 |
+
# 2. Search for identifiers in the project.
|
| 337 |
+
for module_context in search_in_file_ios(inference_state, file_ios,
|
| 338 |
+
name, complete=complete):
|
| 339 |
+
names = get_module_names(module_context.tree_node, all_scopes=all_scopes)
|
| 340 |
+
names = [module_context.create_name(n) for n in names]
|
| 341 |
+
names = _remove_imports(names)
|
| 342 |
+
yield from search_in_module(
|
| 343 |
+
inference_state,
|
| 344 |
+
module_context,
|
| 345 |
+
names=names,
|
| 346 |
+
wanted_type=wanted_type,
|
| 347 |
+
wanted_names=wanted_names,
|
| 348 |
+
complete=complete,
|
| 349 |
+
ignore_imports=True,
|
| 350 |
+
)
|
| 351 |
+
|
| 352 |
+
# 3. Search for modules on sys.path
|
| 353 |
+
sys_path = [
|
| 354 |
+
p for p in self._get_sys_path(inference_state)
|
| 355 |
+
# Exclude the current folder which is handled by recursing the folders.
|
| 356 |
+
if p != self._path
|
| 357 |
+
]
|
| 358 |
+
names = list(iter_module_names(inference_state, empty_module_context, sys_path))
|
| 359 |
+
yield from search_in_module(
|
| 360 |
+
inference_state,
|
| 361 |
+
empty_module_context,
|
| 362 |
+
names=names,
|
| 363 |
+
wanted_type=wanted_type,
|
| 364 |
+
wanted_names=wanted_names,
|
| 365 |
+
complete=complete,
|
| 366 |
+
convert=True,
|
| 367 |
+
)
|
| 368 |
+
|
| 369 |
+
def __repr__(self):
|
| 370 |
+
return '<%s: %s>' % (self.__class__.__name__, self._path)
|
| 371 |
+
|
| 372 |
+
|
| 373 |
+
def _is_potential_project(path):
|
| 374 |
+
for name in _CONTAINS_POTENTIAL_PROJECT:
|
| 375 |
+
try:
|
| 376 |
+
if path.joinpath(name).exists():
|
| 377 |
+
return True
|
| 378 |
+
except OSError:
|
| 379 |
+
continue
|
| 380 |
+
return False
|
| 381 |
+
|
| 382 |
+
|
| 383 |
+
def _is_django_path(directory):
|
| 384 |
+
""" Detects the path of the very well known Django library (if used) """
|
| 385 |
+
try:
|
| 386 |
+
with open(directory.joinpath('manage.py'), 'rb') as f:
|
| 387 |
+
return b"DJANGO_SETTINGS_MODULE" in f.read()
|
| 388 |
+
except (FileNotFoundError, IsADirectoryError, PermissionError):
|
| 389 |
+
return False
|
| 390 |
+
|
| 391 |
+
|
| 392 |
+
def get_default_project(path=None):
|
| 393 |
+
"""
|
| 394 |
+
If a project is not defined by the user, Jedi tries to define a project by
|
| 395 |
+
itself as well as possible. Jedi traverses folders until it finds one of
|
| 396 |
+
the following:
|
| 397 |
+
|
| 398 |
+
1. A ``.jedi/config.json``
|
| 399 |
+
2. One of the following files: ``setup.py``, ``.git``, ``.hg``,
|
| 400 |
+
``requirements.txt`` and ``MANIFEST.in``.
|
| 401 |
+
"""
|
| 402 |
+
if path is None:
|
| 403 |
+
path = Path.cwd()
|
| 404 |
+
elif isinstance(path, str):
|
| 405 |
+
path = Path(path)
|
| 406 |
+
|
| 407 |
+
check = path.absolute()
|
| 408 |
+
probable_path = None
|
| 409 |
+
first_no_init_file = None
|
| 410 |
+
for dir in chain([check], check.parents):
|
| 411 |
+
try:
|
| 412 |
+
return Project.load(dir)
|
| 413 |
+
except (FileNotFoundError, IsADirectoryError, PermissionError):
|
| 414 |
+
pass
|
| 415 |
+
except NotADirectoryError:
|
| 416 |
+
continue
|
| 417 |
+
|
| 418 |
+
if first_no_init_file is None:
|
| 419 |
+
if dir.joinpath('__init__.py').exists():
|
| 420 |
+
# In the case that a __init__.py exists, it's in 99% just a
|
| 421 |
+
# Python package and the project sits at least one level above.
|
| 422 |
+
continue
|
| 423 |
+
elif not dir.is_file():
|
| 424 |
+
first_no_init_file = dir
|
| 425 |
+
|
| 426 |
+
if _is_django_path(dir):
|
| 427 |
+
project = Project(dir)
|
| 428 |
+
project._django = True
|
| 429 |
+
return project
|
| 430 |
+
|
| 431 |
+
if probable_path is None and _is_potential_project(dir):
|
| 432 |
+
probable_path = dir
|
| 433 |
+
|
| 434 |
+
if probable_path is not None:
|
| 435 |
+
return Project(probable_path)
|
| 436 |
+
|
| 437 |
+
if first_no_init_file is not None:
|
| 438 |
+
return Project(first_no_init_file)
|
| 439 |
+
|
| 440 |
+
curdir = path if path.is_dir() else path.parent
|
| 441 |
+
return Project(curdir)
|
| 442 |
+
|
| 443 |
+
|
| 444 |
+
def _remove_imports(names):
|
| 445 |
+
return [
|
| 446 |
+
n for n in names
|
| 447 |
+
if n.tree_name is None or n.api_type not in ('module', 'namespace')
|
| 448 |
+
]
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/api/refactoring/__init__.py
ADDED
|
@@ -0,0 +1,264 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import difflib
|
| 2 |
+
from pathlib import Path
|
| 3 |
+
from typing import Dict, Iterable, Tuple
|
| 4 |
+
|
| 5 |
+
from parso import split_lines
|
| 6 |
+
|
| 7 |
+
from jedi.api.exceptions import RefactoringError
|
| 8 |
+
from jedi.inference.value.namespace import ImplicitNSName
|
| 9 |
+
|
| 10 |
+
EXPRESSION_PARTS = (
|
| 11 |
+
'or_test and_test not_test comparison '
|
| 12 |
+
'expr xor_expr and_expr shift_expr arith_expr term factor power atom_expr'
|
| 13 |
+
).split()
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class ChangedFile:
|
| 17 |
+
def __init__(self, inference_state, from_path, to_path,
|
| 18 |
+
module_node, node_to_str_map):
|
| 19 |
+
self._inference_state = inference_state
|
| 20 |
+
self._from_path = from_path
|
| 21 |
+
self._to_path = to_path
|
| 22 |
+
self._module_node = module_node
|
| 23 |
+
self._node_to_str_map = node_to_str_map
|
| 24 |
+
|
| 25 |
+
def get_diff(self):
|
| 26 |
+
old_lines = split_lines(self._module_node.get_code(), keepends=True)
|
| 27 |
+
new_lines = split_lines(self.get_new_code(), keepends=True)
|
| 28 |
+
|
| 29 |
+
# Add a newline at the end if it's missing. Otherwise the diff will be
|
| 30 |
+
# very weird. A `diff -u file1 file2` would show the string:
|
| 31 |
+
#
|
| 32 |
+
#
|
| 33 |
+
#
|
| 34 |
+
# This is not necessary IMO, because Jedi does not really play with
|
| 35 |
+
# newlines and the ending newline does not really matter in Python
|
| 36 |
+
# files. ~dave
|
| 37 |
+
if old_lines[-1] != '':
|
| 38 |
+
old_lines[-1] += '\n'
|
| 39 |
+
if new_lines[-1] != '':
|
| 40 |
+
new_lines[-1] += '\n'
|
| 41 |
+
|
| 42 |
+
project_path = self._inference_state.project.path
|
| 43 |
+
if self._from_path is None:
|
| 44 |
+
from_p = ''
|
| 45 |
+
else:
|
| 46 |
+
try:
|
| 47 |
+
from_p = self._from_path.relative_to(project_path)
|
| 48 |
+
except ValueError: # Happens it the path is not on th project_path
|
| 49 |
+
from_p = self._from_path
|
| 50 |
+
if self._to_path is None:
|
| 51 |
+
to_p = ''
|
| 52 |
+
else:
|
| 53 |
+
try:
|
| 54 |
+
to_p = self._to_path.relative_to(project_path)
|
| 55 |
+
except ValueError:
|
| 56 |
+
to_p = self._to_path
|
| 57 |
+
diff = difflib.unified_diff(
|
| 58 |
+
old_lines, new_lines,
|
| 59 |
+
fromfile=str(from_p),
|
| 60 |
+
tofile=str(to_p),
|
| 61 |
+
)
|
| 62 |
+
# Apparently there's a space at the end of the diff - for whatever
|
| 63 |
+
# reason.
|
| 64 |
+
return ''.join(diff).rstrip(' ')
|
| 65 |
+
|
| 66 |
+
def get_new_code(self):
|
| 67 |
+
return self._inference_state.grammar.refactor(self._module_node, self._node_to_str_map)
|
| 68 |
+
|
| 69 |
+
def apply(self):
|
| 70 |
+
if self._from_path is None:
|
| 71 |
+
raise RefactoringError(
|
| 72 |
+
'Cannot apply a refactoring on a Script with path=None'
|
| 73 |
+
)
|
| 74 |
+
|
| 75 |
+
with open(self._from_path, 'w', newline='') as f:
|
| 76 |
+
f.write(self.get_new_code())
|
| 77 |
+
|
| 78 |
+
def __repr__(self):
|
| 79 |
+
return '<%s: %s>' % (self.__class__.__name__, self._from_path)
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
class Refactoring:
|
| 83 |
+
def __init__(self, inference_state, file_to_node_changes, renames=()):
|
| 84 |
+
self._inference_state = inference_state
|
| 85 |
+
self._renames = renames
|
| 86 |
+
self._file_to_node_changes = file_to_node_changes
|
| 87 |
+
|
| 88 |
+
def get_changed_files(self) -> Dict[Path, ChangedFile]:
|
| 89 |
+
def calculate_to_path(p):
|
| 90 |
+
if p is None:
|
| 91 |
+
return p
|
| 92 |
+
p = str(p)
|
| 93 |
+
for from_, to in renames:
|
| 94 |
+
if p.startswith(str(from_)):
|
| 95 |
+
p = str(to) + p[len(str(from_)):]
|
| 96 |
+
return Path(p)
|
| 97 |
+
|
| 98 |
+
renames = self.get_renames()
|
| 99 |
+
return {
|
| 100 |
+
path: ChangedFile(
|
| 101 |
+
self._inference_state,
|
| 102 |
+
from_path=path,
|
| 103 |
+
to_path=calculate_to_path(path),
|
| 104 |
+
module_node=next(iter(map_)).get_root_node(),
|
| 105 |
+
node_to_str_map=map_
|
| 106 |
+
)
|
| 107 |
+
# We need to use `or`, because the path can be None
|
| 108 |
+
for path, map_ in sorted(
|
| 109 |
+
self._file_to_node_changes.items(),
|
| 110 |
+
key=lambda x: x[0] or Path("")
|
| 111 |
+
)
|
| 112 |
+
}
|
| 113 |
+
|
| 114 |
+
def get_renames(self) -> Iterable[Tuple[Path, Path]]:
|
| 115 |
+
"""
|
| 116 |
+
Files can be renamed in a refactoring.
|
| 117 |
+
"""
|
| 118 |
+
return sorted(self._renames)
|
| 119 |
+
|
| 120 |
+
def get_diff(self):
|
| 121 |
+
text = ''
|
| 122 |
+
project_path = self._inference_state.project.path
|
| 123 |
+
for from_, to in self.get_renames():
|
| 124 |
+
text += 'rename from %s\nrename to %s\n' \
|
| 125 |
+
% (_try_relative_to(from_, project_path), _try_relative_to(to, project_path))
|
| 126 |
+
|
| 127 |
+
return text + ''.join(f.get_diff() for f in self.get_changed_files().values())
|
| 128 |
+
|
| 129 |
+
def apply(self):
|
| 130 |
+
"""
|
| 131 |
+
Applies the whole refactoring to the files, which includes renames.
|
| 132 |
+
"""
|
| 133 |
+
for f in self.get_changed_files().values():
|
| 134 |
+
f.apply()
|
| 135 |
+
|
| 136 |
+
for old, new in self.get_renames():
|
| 137 |
+
old.rename(new)
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
def _calculate_rename(path, new_name):
|
| 141 |
+
dir_ = path.parent
|
| 142 |
+
if path.name in ('__init__.py', '__init__.pyi'):
|
| 143 |
+
return dir_, dir_.parent.joinpath(new_name)
|
| 144 |
+
return path, dir_.joinpath(new_name + path.suffix)
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
def rename(inference_state, definitions, new_name):
|
| 148 |
+
file_renames = set()
|
| 149 |
+
file_tree_name_map = {}
|
| 150 |
+
|
| 151 |
+
if not definitions:
|
| 152 |
+
raise RefactoringError("There is no name under the cursor")
|
| 153 |
+
|
| 154 |
+
for d in definitions:
|
| 155 |
+
# This private access is ok in a way. It's not public to
|
| 156 |
+
# protect Jedi users from seeing it.
|
| 157 |
+
tree_name = d._name.tree_name
|
| 158 |
+
if d.type == 'module' and tree_name is None and d.module_path is not None:
|
| 159 |
+
p = Path(d.module_path)
|
| 160 |
+
file_renames.add(_calculate_rename(p, new_name))
|
| 161 |
+
elif isinstance(d._name, ImplicitNSName):
|
| 162 |
+
for p in d._name._value.py__path__():
|
| 163 |
+
file_renames.add(_calculate_rename(Path(p), new_name))
|
| 164 |
+
else:
|
| 165 |
+
if tree_name is not None:
|
| 166 |
+
fmap = file_tree_name_map.setdefault(d.module_path, {})
|
| 167 |
+
fmap[tree_name] = tree_name.prefix + new_name
|
| 168 |
+
return Refactoring(inference_state, file_tree_name_map, file_renames)
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
def inline(inference_state, names):
|
| 172 |
+
if not names:
|
| 173 |
+
raise RefactoringError("There is no name under the cursor")
|
| 174 |
+
if any(n.api_type in ('module', 'namespace') for n in names):
|
| 175 |
+
raise RefactoringError("Cannot inline imports, modules or namespaces")
|
| 176 |
+
if any(n.tree_name is None for n in names):
|
| 177 |
+
raise RefactoringError("Cannot inline builtins/extensions")
|
| 178 |
+
|
| 179 |
+
definitions = [n for n in names if n.tree_name.is_definition()]
|
| 180 |
+
if len(definitions) == 0:
|
| 181 |
+
raise RefactoringError("No definition found to inline")
|
| 182 |
+
if len(definitions) > 1:
|
| 183 |
+
raise RefactoringError("Cannot inline a name with multiple definitions")
|
| 184 |
+
if len(names) == 1:
|
| 185 |
+
raise RefactoringError("There are no references to this name")
|
| 186 |
+
|
| 187 |
+
tree_name = definitions[0].tree_name
|
| 188 |
+
|
| 189 |
+
expr_stmt = tree_name.get_definition()
|
| 190 |
+
if expr_stmt.type != 'expr_stmt':
|
| 191 |
+
type_ = dict(
|
| 192 |
+
funcdef='function',
|
| 193 |
+
classdef='class',
|
| 194 |
+
).get(expr_stmt.type, expr_stmt.type)
|
| 195 |
+
raise RefactoringError("Cannot inline a %s" % type_)
|
| 196 |
+
|
| 197 |
+
if len(expr_stmt.get_defined_names(include_setitem=True)) > 1:
|
| 198 |
+
raise RefactoringError("Cannot inline a statement with multiple definitions")
|
| 199 |
+
first_child = expr_stmt.children[1]
|
| 200 |
+
if first_child.type == 'annassign' and len(first_child.children) == 4:
|
| 201 |
+
first_child = first_child.children[2]
|
| 202 |
+
if first_child != '=':
|
| 203 |
+
if first_child.type == 'annassign':
|
| 204 |
+
raise RefactoringError(
|
| 205 |
+
'Cannot inline a statement that is defined by an annotation'
|
| 206 |
+
)
|
| 207 |
+
else:
|
| 208 |
+
raise RefactoringError(
|
| 209 |
+
'Cannot inline a statement with "%s"'
|
| 210 |
+
% first_child.get_code(include_prefix=False)
|
| 211 |
+
)
|
| 212 |
+
|
| 213 |
+
rhs = expr_stmt.get_rhs()
|
| 214 |
+
replace_code = rhs.get_code(include_prefix=False)
|
| 215 |
+
|
| 216 |
+
references = [n for n in names if not n.tree_name.is_definition()]
|
| 217 |
+
file_to_node_changes = {}
|
| 218 |
+
for name in references:
|
| 219 |
+
tree_name = name.tree_name
|
| 220 |
+
path = name.get_root_context().py__file__()
|
| 221 |
+
s = replace_code
|
| 222 |
+
if rhs.type == 'testlist_star_expr' \
|
| 223 |
+
or tree_name.parent.type in EXPRESSION_PARTS \
|
| 224 |
+
or tree_name.parent.type == 'trailer' \
|
| 225 |
+
and tree_name.parent.get_next_sibling() is not None:
|
| 226 |
+
s = '(' + replace_code + ')'
|
| 227 |
+
|
| 228 |
+
of_path = file_to_node_changes.setdefault(path, {})
|
| 229 |
+
|
| 230 |
+
n = tree_name
|
| 231 |
+
prefix = n.prefix
|
| 232 |
+
par = n.parent
|
| 233 |
+
if par.type == 'trailer' and par.children[0] == '.':
|
| 234 |
+
prefix = par.parent.children[0].prefix
|
| 235 |
+
n = par
|
| 236 |
+
for some_node in par.parent.children[:par.parent.children.index(par)]:
|
| 237 |
+
of_path[some_node] = ''
|
| 238 |
+
of_path[n] = prefix + s
|
| 239 |
+
|
| 240 |
+
path = definitions[0].get_root_context().py__file__()
|
| 241 |
+
changes = file_to_node_changes.setdefault(path, {})
|
| 242 |
+
changes[expr_stmt] = _remove_indent_of_prefix(expr_stmt.get_first_leaf().prefix)
|
| 243 |
+
next_leaf = expr_stmt.get_next_leaf()
|
| 244 |
+
|
| 245 |
+
# Most of the time we have to remove the newline at the end of the
|
| 246 |
+
# statement, but if there's a comment we might not need to.
|
| 247 |
+
if next_leaf.prefix.strip(' \t') == '' \
|
| 248 |
+
and (next_leaf.type == 'newline' or next_leaf == ';'):
|
| 249 |
+
changes[next_leaf] = ''
|
| 250 |
+
return Refactoring(inference_state, file_to_node_changes)
|
| 251 |
+
|
| 252 |
+
|
| 253 |
+
def _remove_indent_of_prefix(prefix):
|
| 254 |
+
r"""
|
| 255 |
+
Removes the last indentation of a prefix, e.g. " \n \n " becomes " \n \n".
|
| 256 |
+
"""
|
| 257 |
+
return ''.join(split_lines(prefix, keepends=True)[:-1])
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
def _try_relative_to(path: Path, base: Path) -> Path:
|
| 261 |
+
try:
|
| 262 |
+
return path.relative_to(base)
|
| 263 |
+
except ValueError:
|
| 264 |
+
return path
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/api/replstartup.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
To use Jedi completion in Python interpreter, add the following in your shell
|
| 3 |
+
setup (e.g., ``.bashrc``). This works only on Linux/Mac, because readline is
|
| 4 |
+
not available on Windows. If you still want Jedi autocompletion in your REPL,
|
| 5 |
+
just use IPython instead::
|
| 6 |
+
|
| 7 |
+
export PYTHONSTARTUP="$(python -m jedi repl)"
|
| 8 |
+
|
| 9 |
+
Then you will be able to use Jedi completer in your Python interpreter::
|
| 10 |
+
|
| 11 |
+
$ python
|
| 12 |
+
Python 3.9.2+ (default, Jul 20 2020, 22:15:08)
|
| 13 |
+
[GCC 4.6.1] on linux2
|
| 14 |
+
Type "help", "copyright", "credits" or "license" for more information.
|
| 15 |
+
>>> import os
|
| 16 |
+
>>> os.path.join('a', 'b').split().in<TAB> # doctest: +SKIP
|
| 17 |
+
..dex ..sert
|
| 18 |
+
|
| 19 |
+
"""
|
| 20 |
+
import jedi.utils
|
| 21 |
+
from jedi import __version__ as __jedi_version__
|
| 22 |
+
|
| 23 |
+
print('REPL completion using Jedi %s' % __jedi_version__)
|
| 24 |
+
jedi.utils.setup_readline(fuzzy=False)
|
| 25 |
+
|
| 26 |
+
del jedi
|
| 27 |
+
|
| 28 |
+
# Note: try not to do many things here, as it will contaminate global
|
| 29 |
+
# namespace of the interpreter.
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/jedi/api/strings.py
ADDED
|
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
This module is here for string completions. This means mostly stuff where
|
| 3 |
+
strings are returned, like `foo = dict(bar=3); foo["ba` would complete to
|
| 4 |
+
`"bar"]`.
|
| 5 |
+
|
| 6 |
+
It however does the same for numbers. The difference between string completions
|
| 7 |
+
and other completions is mostly that this module doesn't return defined
|
| 8 |
+
names in a module, but pretty much an arbitrary string.
|
| 9 |
+
"""
|
| 10 |
+
import re
|
| 11 |
+
|
| 12 |
+
from jedi.inference.names import AbstractArbitraryName
|
| 13 |
+
from jedi.inference.helpers import infer_call_of_leaf
|
| 14 |
+
from jedi.api.classes import Completion
|
| 15 |
+
from jedi.parser_utils import cut_value_at_position
|
| 16 |
+
|
| 17 |
+
_sentinel = object()
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class StringName(AbstractArbitraryName):
|
| 21 |
+
api_type = 'string'
|
| 22 |
+
is_value_name = False
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def complete_dict(module_context, code_lines, leaf, position, string, fuzzy):
|
| 26 |
+
bracket_leaf = leaf
|
| 27 |
+
if bracket_leaf != '[':
|
| 28 |
+
bracket_leaf = leaf.get_previous_leaf()
|
| 29 |
+
|
| 30 |
+
cut_end_quote = ''
|
| 31 |
+
if string:
|
| 32 |
+
cut_end_quote = get_quote_ending(string, code_lines, position, invert_result=True)
|
| 33 |
+
|
| 34 |
+
if bracket_leaf == '[':
|
| 35 |
+
if string is None and leaf is not bracket_leaf:
|
| 36 |
+
string = cut_value_at_position(leaf, position)
|
| 37 |
+
|
| 38 |
+
context = module_context.create_context(bracket_leaf)
|
| 39 |
+
|
| 40 |
+
before_node = before_bracket_leaf = bracket_leaf.get_previous_leaf()
|
| 41 |
+
if before_node in (')', ']', '}'):
|
| 42 |
+
before_node = before_node.parent
|
| 43 |
+
if before_node.type in ('atom', 'trailer', 'name'):
|
| 44 |
+
values = infer_call_of_leaf(context, before_bracket_leaf)
|
| 45 |
+
return list(_completions_for_dicts(
|
| 46 |
+
module_context.inference_state,
|
| 47 |
+
values,
|
| 48 |
+
'' if string is None else string,
|
| 49 |
+
cut_end_quote,
|
| 50 |
+
fuzzy=fuzzy,
|
| 51 |
+
))
|
| 52 |
+
return []
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def _completions_for_dicts(inference_state, dicts, literal_string, cut_end_quote, fuzzy):
|
| 56 |
+
for dict_key in sorted(_get_python_keys(dicts), key=lambda x: repr(x)):
|
| 57 |
+
dict_key_str = _create_repr_string(literal_string, dict_key)
|
| 58 |
+
if dict_key_str.startswith(literal_string):
|
| 59 |
+
name = StringName(inference_state, dict_key_str[:-len(cut_end_quote) or None])
|
| 60 |
+
yield Completion(
|
| 61 |
+
inference_state,
|
| 62 |
+
name,
|
| 63 |
+
stack=None,
|
| 64 |
+
like_name_length=len(literal_string),
|
| 65 |
+
is_fuzzy=fuzzy
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def _create_repr_string(literal_string, dict_key):
|
| 70 |
+
if not isinstance(dict_key, (str, bytes)) or not literal_string:
|
| 71 |
+
return repr(dict_key)
|
| 72 |
+
|
| 73 |
+
r = repr(dict_key)
|
| 74 |
+
prefix, quote = _get_string_prefix_and_quote(literal_string)
|
| 75 |
+
if quote is None:
|
| 76 |
+
return r
|
| 77 |
+
if quote == r[0]:
|
| 78 |
+
return prefix + r
|
| 79 |
+
return prefix + quote + r[1:-1] + quote
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def _get_python_keys(dicts):
|
| 83 |
+
for dct in dicts:
|
| 84 |
+
if dct.array_type == 'dict':
|
| 85 |
+
for key in dct.get_key_values():
|
| 86 |
+
dict_key = key.get_safe_value(default=_sentinel)
|
| 87 |
+
if dict_key is not _sentinel:
|
| 88 |
+
yield dict_key
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def _get_string_prefix_and_quote(string):
|
| 92 |
+
match = re.match(r'(\w*)("""|\'{3}|"|\')', string)
|
| 93 |
+
if match is None:
|
| 94 |
+
return None, None
|
| 95 |
+
return match.group(1), match.group(2)
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
def _matches_quote_at_position(code_lines, quote, position):
|
| 99 |
+
string = code_lines[position[0] - 1][position[1]:position[1] + len(quote)]
|
| 100 |
+
return string == quote
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def get_quote_ending(string, code_lines, position, invert_result=False):
|
| 104 |
+
_, quote = _get_string_prefix_and_quote(string)
|
| 105 |
+
if quote is None:
|
| 106 |
+
return ''
|
| 107 |
+
|
| 108 |
+
# Add a quote only if it's not already there.
|
| 109 |
+
if _matches_quote_at_position(code_lines, quote, position) != invert_result:
|
| 110 |
+
return ''
|
| 111 |
+
return quote
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/joblib/_cloudpickle_wrapper.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Small shim of loky's cloudpickle_wrapper to avoid failure when
|
| 3 |
+
multiprocessing is not available.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
from ._multiprocessing_helpers import mp
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def _my_wrap_non_picklable_objects(obj, keep_wrapper=True):
|
| 11 |
+
return obj
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
if mp is not None:
|
| 15 |
+
from .externals.loky import wrap_non_picklable_objects
|
| 16 |
+
else:
|
| 17 |
+
wrap_non_picklable_objects = _my_wrap_non_picklable_objects
|
| 18 |
+
|
| 19 |
+
__all__ = ["wrap_non_picklable_objects"]
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/joblib/_memmapping_reducer.py
ADDED
|
@@ -0,0 +1,657 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Reducer using memory mapping for numpy arrays
|
| 3 |
+
"""
|
| 4 |
+
# Author: Thomas Moreau <thomas.moreau.2010@gmail.com>
|
| 5 |
+
# Copyright: 2017, Thomas Moreau
|
| 6 |
+
# License: BSD 3 clause
|
| 7 |
+
|
| 8 |
+
from mmap import mmap
|
| 9 |
+
import errno
|
| 10 |
+
import os
|
| 11 |
+
import stat
|
| 12 |
+
import threading
|
| 13 |
+
import atexit
|
| 14 |
+
import tempfile
|
| 15 |
+
import time
|
| 16 |
+
import warnings
|
| 17 |
+
import weakref
|
| 18 |
+
from uuid import uuid4
|
| 19 |
+
from multiprocessing import util
|
| 20 |
+
|
| 21 |
+
from pickle import whichmodule, loads, dumps, HIGHEST_PROTOCOL, PicklingError
|
| 22 |
+
|
| 23 |
+
try:
|
| 24 |
+
WindowsError
|
| 25 |
+
except NameError:
|
| 26 |
+
WindowsError = type(None)
|
| 27 |
+
|
| 28 |
+
try:
|
| 29 |
+
import numpy as np
|
| 30 |
+
from numpy.lib.stride_tricks import as_strided
|
| 31 |
+
except ImportError:
|
| 32 |
+
np = None
|
| 33 |
+
|
| 34 |
+
from .numpy_pickle import dump, load, load_temporary_memmap
|
| 35 |
+
from .backports import make_memmap
|
| 36 |
+
from .disk import delete_folder
|
| 37 |
+
from .externals.loky.backend import resource_tracker
|
| 38 |
+
|
| 39 |
+
# Some system have a ramdisk mounted by default, we can use it instead of /tmp
|
| 40 |
+
# as the default folder to dump big arrays to share with subprocesses.
|
| 41 |
+
SYSTEM_SHARED_MEM_FS = '/dev/shm'
|
| 42 |
+
|
| 43 |
+
# Minimal number of bytes available on SYSTEM_SHARED_MEM_FS to consider using
|
| 44 |
+
# it as the default folder to dump big arrays to share with subprocesses.
|
| 45 |
+
SYSTEM_SHARED_MEM_FS_MIN_SIZE = int(2e9)
|
| 46 |
+
|
| 47 |
+
# Folder and file permissions to chmod temporary files generated by the
|
| 48 |
+
# memmapping pool. Only the owner of the Python process can access the
|
| 49 |
+
# temporary files and folder.
|
| 50 |
+
FOLDER_PERMISSIONS = stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR
|
| 51 |
+
FILE_PERMISSIONS = stat.S_IRUSR | stat.S_IWUSR
|
| 52 |
+
|
| 53 |
+
# Set used in joblib workers, referencing the filenames of temporary memmaps
|
| 54 |
+
# created by joblib to speed up data communication. In child processes, we add
|
| 55 |
+
# a finalizer to these memmaps that sends a maybe_unlink call to the
|
| 56 |
+
# resource_tracker, in order to free main memory as fast as possible.
|
| 57 |
+
JOBLIB_MMAPS = set()
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def _log_and_unlink(filename):
|
| 61 |
+
from .externals.loky.backend.resource_tracker import _resource_tracker
|
| 62 |
+
util.debug(
|
| 63 |
+
"[FINALIZER CALL] object mapping to {} about to be deleted,"
|
| 64 |
+
" decrementing the refcount of the file (pid: {})".format(
|
| 65 |
+
os.path.basename(filename), os.getpid()))
|
| 66 |
+
_resource_tracker.maybe_unlink(filename, "file")
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def add_maybe_unlink_finalizer(memmap):
|
| 70 |
+
util.debug(
|
| 71 |
+
"[FINALIZER ADD] adding finalizer to {} (id {}, filename {}, pid {})"
|
| 72 |
+
"".format(type(memmap), id(memmap), os.path.basename(memmap.filename),
|
| 73 |
+
os.getpid()))
|
| 74 |
+
weakref.finalize(memmap, _log_and_unlink, memmap.filename)
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def unlink_file(filename):
|
| 78 |
+
"""Wrapper around os.unlink with a retry mechanism.
|
| 79 |
+
|
| 80 |
+
The retry mechanism has been implemented primarily to overcome a race
|
| 81 |
+
condition happening during the finalizer of a np.memmap: when a process
|
| 82 |
+
holding the last reference to a mmap-backed np.memmap/np.array is about to
|
| 83 |
+
delete this array (and close the reference), it sends a maybe_unlink
|
| 84 |
+
request to the resource_tracker. This request can be processed faster than
|
| 85 |
+
it takes for the last reference of the memmap to be closed, yielding (on
|
| 86 |
+
Windows) a PermissionError in the resource_tracker loop.
|
| 87 |
+
"""
|
| 88 |
+
NUM_RETRIES = 10
|
| 89 |
+
for retry_no in range(1, NUM_RETRIES + 1):
|
| 90 |
+
try:
|
| 91 |
+
os.unlink(filename)
|
| 92 |
+
break
|
| 93 |
+
except PermissionError:
|
| 94 |
+
util.debug(
|
| 95 |
+
'[ResourceTracker] tried to unlink {}, got '
|
| 96 |
+
'PermissionError'.format(filename)
|
| 97 |
+
)
|
| 98 |
+
if retry_no == NUM_RETRIES:
|
| 99 |
+
raise
|
| 100 |
+
else:
|
| 101 |
+
time.sleep(.2)
|
| 102 |
+
except FileNotFoundError:
|
| 103 |
+
# In case of a race condition when deleting the temporary folder,
|
| 104 |
+
# avoid noisy FileNotFoundError exception in the resource tracker.
|
| 105 |
+
pass
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
resource_tracker._CLEANUP_FUNCS['file'] = unlink_file
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
class _WeakArrayKeyMap:
|
| 112 |
+
"""A variant of weakref.WeakKeyDictionary for unhashable numpy arrays.
|
| 113 |
+
|
| 114 |
+
This datastructure will be used with numpy arrays as obj keys, therefore we
|
| 115 |
+
do not use the __get__ / __set__ methods to avoid any conflict with the
|
| 116 |
+
numpy fancy indexing syntax.
|
| 117 |
+
"""
|
| 118 |
+
|
| 119 |
+
def __init__(self):
|
| 120 |
+
self._data = {}
|
| 121 |
+
|
| 122 |
+
def get(self, obj):
|
| 123 |
+
ref, val = self._data[id(obj)]
|
| 124 |
+
if ref() is not obj:
|
| 125 |
+
# In case of race condition with on_destroy: could never be
|
| 126 |
+
# triggered by the joblib tests with CPython.
|
| 127 |
+
raise KeyError(obj)
|
| 128 |
+
return val
|
| 129 |
+
|
| 130 |
+
def set(self, obj, value):
|
| 131 |
+
key = id(obj)
|
| 132 |
+
try:
|
| 133 |
+
ref, _ = self._data[key]
|
| 134 |
+
if ref() is not obj:
|
| 135 |
+
# In case of race condition with on_destroy: could never be
|
| 136 |
+
# triggered by the joblib tests with CPython.
|
| 137 |
+
raise KeyError(obj)
|
| 138 |
+
except KeyError:
|
| 139 |
+
# Insert the new entry in the mapping along with a weakref
|
| 140 |
+
# callback to automatically delete the entry from the mapping
|
| 141 |
+
# as soon as the object used as key is garbage collected.
|
| 142 |
+
def on_destroy(_):
|
| 143 |
+
del self._data[key]
|
| 144 |
+
ref = weakref.ref(obj, on_destroy)
|
| 145 |
+
self._data[key] = ref, value
|
| 146 |
+
|
| 147 |
+
def __getstate__(self):
|
| 148 |
+
raise PicklingError("_WeakArrayKeyMap is not pickleable")
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
###############################################################################
|
| 152 |
+
# Support for efficient transient pickling of numpy data structures
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
def _get_backing_memmap(a):
|
| 156 |
+
"""Recursively look up the original np.memmap instance base if any."""
|
| 157 |
+
b = getattr(a, 'base', None)
|
| 158 |
+
if b is None:
|
| 159 |
+
# TODO: check scipy sparse datastructure if scipy is installed
|
| 160 |
+
# a nor its descendants do not have a memmap base
|
| 161 |
+
return None
|
| 162 |
+
|
| 163 |
+
elif isinstance(b, mmap):
|
| 164 |
+
# a is already a real memmap instance.
|
| 165 |
+
return a
|
| 166 |
+
|
| 167 |
+
else:
|
| 168 |
+
# Recursive exploration of the base ancestry
|
| 169 |
+
return _get_backing_memmap(b)
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
def _get_temp_dir(pool_folder_name, temp_folder=None):
|
| 173 |
+
"""Get the full path to a subfolder inside the temporary folder.
|
| 174 |
+
|
| 175 |
+
Parameters
|
| 176 |
+
----------
|
| 177 |
+
pool_folder_name : str
|
| 178 |
+
Sub-folder name used for the serialization of a pool instance.
|
| 179 |
+
|
| 180 |
+
temp_folder: str, optional
|
| 181 |
+
Folder to be used by the pool for memmapping large arrays
|
| 182 |
+
for sharing memory with worker processes. If None, this will try in
|
| 183 |
+
order:
|
| 184 |
+
|
| 185 |
+
- a folder pointed by the JOBLIB_TEMP_FOLDER environment
|
| 186 |
+
variable,
|
| 187 |
+
- /dev/shm if the folder exists and is writable: this is a
|
| 188 |
+
RAMdisk filesystem available by default on modern Linux
|
| 189 |
+
distributions,
|
| 190 |
+
- the default system temporary folder that can be
|
| 191 |
+
overridden with TMP, TMPDIR or TEMP environment
|
| 192 |
+
variables, typically /tmp under Unix operating systems.
|
| 193 |
+
|
| 194 |
+
Returns
|
| 195 |
+
-------
|
| 196 |
+
pool_folder : str
|
| 197 |
+
full path to the temporary folder
|
| 198 |
+
use_shared_mem : bool
|
| 199 |
+
whether the temporary folder is written to the system shared memory
|
| 200 |
+
folder or some other temporary folder.
|
| 201 |
+
"""
|
| 202 |
+
use_shared_mem = False
|
| 203 |
+
if temp_folder is None:
|
| 204 |
+
temp_folder = os.environ.get('JOBLIB_TEMP_FOLDER', None)
|
| 205 |
+
if temp_folder is None:
|
| 206 |
+
if os.path.exists(SYSTEM_SHARED_MEM_FS) and hasattr(os, 'statvfs'):
|
| 207 |
+
try:
|
| 208 |
+
shm_stats = os.statvfs(SYSTEM_SHARED_MEM_FS)
|
| 209 |
+
available_nbytes = shm_stats.f_bsize * shm_stats.f_bavail
|
| 210 |
+
if available_nbytes > SYSTEM_SHARED_MEM_FS_MIN_SIZE:
|
| 211 |
+
# Try to see if we have write access to the shared mem
|
| 212 |
+
# folder only if it is reasonably large (that is 2GB or
|
| 213 |
+
# more).
|
| 214 |
+
temp_folder = SYSTEM_SHARED_MEM_FS
|
| 215 |
+
pool_folder = os.path.join(temp_folder, pool_folder_name)
|
| 216 |
+
if not os.path.exists(pool_folder):
|
| 217 |
+
os.makedirs(pool_folder)
|
| 218 |
+
use_shared_mem = True
|
| 219 |
+
except (IOError, OSError):
|
| 220 |
+
# Missing rights in the /dev/shm partition, fallback to regular
|
| 221 |
+
# temp folder.
|
| 222 |
+
temp_folder = None
|
| 223 |
+
if temp_folder is None:
|
| 224 |
+
# Fallback to the default tmp folder, typically /tmp
|
| 225 |
+
temp_folder = tempfile.gettempdir()
|
| 226 |
+
temp_folder = os.path.abspath(os.path.expanduser(temp_folder))
|
| 227 |
+
pool_folder = os.path.join(temp_folder, pool_folder_name)
|
| 228 |
+
return pool_folder, use_shared_mem
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
def has_shareable_memory(a):
|
| 232 |
+
"""Return True if a is backed by some mmap buffer directly or not."""
|
| 233 |
+
return _get_backing_memmap(a) is not None
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
def _strided_from_memmap(filename, dtype, mode, offset, order, shape, strides,
|
| 237 |
+
total_buffer_len, unlink_on_gc_collect):
|
| 238 |
+
"""Reconstruct an array view on a memory mapped file."""
|
| 239 |
+
if mode == 'w+':
|
| 240 |
+
# Do not zero the original data when unpickling
|
| 241 |
+
mode = 'r+'
|
| 242 |
+
|
| 243 |
+
if strides is None:
|
| 244 |
+
# Simple, contiguous memmap
|
| 245 |
+
return make_memmap(
|
| 246 |
+
filename, dtype=dtype, shape=shape, mode=mode, offset=offset,
|
| 247 |
+
order=order, unlink_on_gc_collect=unlink_on_gc_collect
|
| 248 |
+
)
|
| 249 |
+
else:
|
| 250 |
+
# For non-contiguous data, memmap the total enclosing buffer and then
|
| 251 |
+
# extract the non-contiguous view with the stride-tricks API
|
| 252 |
+
base = make_memmap(
|
| 253 |
+
filename, dtype=dtype, shape=total_buffer_len, offset=offset,
|
| 254 |
+
mode=mode, order=order, unlink_on_gc_collect=unlink_on_gc_collect
|
| 255 |
+
)
|
| 256 |
+
return as_strided(base, shape=shape, strides=strides)
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
def _reduce_memmap_backed(a, m):
|
| 260 |
+
"""Pickling reduction for memmap backed arrays.
|
| 261 |
+
|
| 262 |
+
a is expected to be an instance of np.ndarray (or np.memmap)
|
| 263 |
+
m is expected to be an instance of np.memmap on the top of the ``base``
|
| 264 |
+
attribute ancestry of a. ``m.base`` should be the real python mmap object.
|
| 265 |
+
"""
|
| 266 |
+
# offset that comes from the striding differences between a and m
|
| 267 |
+
util.debug('[MEMMAP REDUCE] reducing a memmap-backed array '
|
| 268 |
+
'(shape, {}, pid: {})'.format(a.shape, os.getpid()))
|
| 269 |
+
try:
|
| 270 |
+
from numpy.lib.array_utils import byte_bounds
|
| 271 |
+
except (ModuleNotFoundError, ImportError):
|
| 272 |
+
# Backward-compat for numpy < 2.0
|
| 273 |
+
from numpy import byte_bounds
|
| 274 |
+
a_start, a_end = byte_bounds(a)
|
| 275 |
+
m_start = byte_bounds(m)[0]
|
| 276 |
+
offset = a_start - m_start
|
| 277 |
+
|
| 278 |
+
# offset from the backing memmap
|
| 279 |
+
offset += m.offset
|
| 280 |
+
|
| 281 |
+
if m.flags['F_CONTIGUOUS']:
|
| 282 |
+
order = 'F'
|
| 283 |
+
else:
|
| 284 |
+
# The backing memmap buffer is necessarily contiguous hence C if not
|
| 285 |
+
# Fortran
|
| 286 |
+
order = 'C'
|
| 287 |
+
|
| 288 |
+
if a.flags['F_CONTIGUOUS'] or a.flags['C_CONTIGUOUS']:
|
| 289 |
+
# If the array is a contiguous view, no need to pass the strides
|
| 290 |
+
strides = None
|
| 291 |
+
total_buffer_len = None
|
| 292 |
+
else:
|
| 293 |
+
# Compute the total number of items to map from which the strided
|
| 294 |
+
# view will be extracted.
|
| 295 |
+
strides = a.strides
|
| 296 |
+
total_buffer_len = (a_end - a_start) // a.itemsize
|
| 297 |
+
|
| 298 |
+
return (_strided_from_memmap,
|
| 299 |
+
(m.filename, a.dtype, m.mode, offset, order, a.shape, strides,
|
| 300 |
+
total_buffer_len, False))
|
| 301 |
+
|
| 302 |
+
|
| 303 |
+
def reduce_array_memmap_backward(a):
|
| 304 |
+
"""reduce a np.array or a np.memmap from a child process"""
|
| 305 |
+
m = _get_backing_memmap(a)
|
| 306 |
+
if isinstance(m, np.memmap) and m.filename not in JOBLIB_MMAPS:
|
| 307 |
+
# if a is backed by a memmaped file, reconstruct a using the
|
| 308 |
+
# memmaped file.
|
| 309 |
+
return _reduce_memmap_backed(a, m)
|
| 310 |
+
else:
|
| 311 |
+
# a is either a regular (not memmap-backed) numpy array, or an array
|
| 312 |
+
# backed by a shared temporary file created by joblib. In the latter
|
| 313 |
+
# case, in order to limit the lifespan of these temporary files, we
|
| 314 |
+
# serialize the memmap as a regular numpy array, and decref the
|
| 315 |
+
# file backing the memmap (done implicitly in a previously registered
|
| 316 |
+
# finalizer, see ``unlink_on_gc_collect`` for more details)
|
| 317 |
+
return (
|
| 318 |
+
loads, (dumps(np.asarray(a), protocol=HIGHEST_PROTOCOL), )
|
| 319 |
+
)
|
| 320 |
+
|
| 321 |
+
|
| 322 |
+
class ArrayMemmapForwardReducer(object):
|
| 323 |
+
"""Reducer callable to dump large arrays to memmap files.
|
| 324 |
+
|
| 325 |
+
Parameters
|
| 326 |
+
----------
|
| 327 |
+
max_nbytes: int
|
| 328 |
+
Threshold to trigger memmapping of large arrays to files created
|
| 329 |
+
a folder.
|
| 330 |
+
temp_folder_resolver: callable
|
| 331 |
+
An callable in charge of resolving a temporary folder name where files
|
| 332 |
+
for backing memmapped arrays are created.
|
| 333 |
+
mmap_mode: 'r', 'r+' or 'c'
|
| 334 |
+
Mode for the created memmap datastructure. See the documentation of
|
| 335 |
+
numpy.memmap for more details. Note: 'w+' is coerced to 'r+'
|
| 336 |
+
automatically to avoid zeroing the data on unpickling.
|
| 337 |
+
verbose: int, optional, 0 by default
|
| 338 |
+
If verbose > 0, memmap creations are logged.
|
| 339 |
+
If verbose > 1, both memmap creations, reuse and array pickling are
|
| 340 |
+
logged.
|
| 341 |
+
prewarm: bool, optional, False by default.
|
| 342 |
+
Force a read on newly memmapped array to make sure that OS pre-cache it
|
| 343 |
+
memory. This can be useful to avoid concurrent disk access when the
|
| 344 |
+
same data array is passed to different worker processes.
|
| 345 |
+
"""
|
| 346 |
+
|
| 347 |
+
def __init__(self, max_nbytes, temp_folder_resolver, mmap_mode,
|
| 348 |
+
unlink_on_gc_collect, verbose=0, prewarm=True):
|
| 349 |
+
self._max_nbytes = max_nbytes
|
| 350 |
+
self._temp_folder_resolver = temp_folder_resolver
|
| 351 |
+
self._mmap_mode = mmap_mode
|
| 352 |
+
self.verbose = int(verbose)
|
| 353 |
+
if prewarm == "auto":
|
| 354 |
+
self._prewarm = not self._temp_folder.startswith(
|
| 355 |
+
SYSTEM_SHARED_MEM_FS
|
| 356 |
+
)
|
| 357 |
+
else:
|
| 358 |
+
self._prewarm = prewarm
|
| 359 |
+
self._prewarm = prewarm
|
| 360 |
+
self._memmaped_arrays = _WeakArrayKeyMap()
|
| 361 |
+
self._temporary_memmaped_filenames = set()
|
| 362 |
+
self._unlink_on_gc_collect = unlink_on_gc_collect
|
| 363 |
+
|
| 364 |
+
@property
|
| 365 |
+
def _temp_folder(self):
|
| 366 |
+
return self._temp_folder_resolver()
|
| 367 |
+
|
| 368 |
+
def __reduce__(self):
|
| 369 |
+
# The ArrayMemmapForwardReducer is passed to the children processes: it
|
| 370 |
+
# needs to be pickled but the _WeakArrayKeyMap need to be skipped as
|
| 371 |
+
# it's only guaranteed to be consistent with the parent process memory
|
| 372 |
+
# garbage collection.
|
| 373 |
+
# Although this reducer is pickled, it is not needed in its destination
|
| 374 |
+
# process (child processes), as we only use this reducer to send
|
| 375 |
+
# memmaps from the parent process to the children processes. For this
|
| 376 |
+
# reason, we can afford skipping the resolver, (which would otherwise
|
| 377 |
+
# be unpicklable), and pass it as None instead.
|
| 378 |
+
args = (self._max_nbytes, None, self._mmap_mode,
|
| 379 |
+
self._unlink_on_gc_collect)
|
| 380 |
+
kwargs = {
|
| 381 |
+
'verbose': self.verbose,
|
| 382 |
+
'prewarm': self._prewarm,
|
| 383 |
+
}
|
| 384 |
+
return ArrayMemmapForwardReducer, args, kwargs
|
| 385 |
+
|
| 386 |
+
def __call__(self, a):
|
| 387 |
+
m = _get_backing_memmap(a)
|
| 388 |
+
if m is not None and isinstance(m, np.memmap):
|
| 389 |
+
# a is already backed by a memmap file, let's reuse it directly
|
| 390 |
+
return _reduce_memmap_backed(a, m)
|
| 391 |
+
|
| 392 |
+
if (not a.dtype.hasobject and self._max_nbytes is not None and
|
| 393 |
+
a.nbytes > self._max_nbytes):
|
| 394 |
+
# check that the folder exists (lazily create the pool temp folder
|
| 395 |
+
# if required)
|
| 396 |
+
try:
|
| 397 |
+
os.makedirs(self._temp_folder)
|
| 398 |
+
os.chmod(self._temp_folder, FOLDER_PERMISSIONS)
|
| 399 |
+
except OSError as e:
|
| 400 |
+
if e.errno != errno.EEXIST:
|
| 401 |
+
raise e
|
| 402 |
+
|
| 403 |
+
try:
|
| 404 |
+
basename = self._memmaped_arrays.get(a)
|
| 405 |
+
except KeyError:
|
| 406 |
+
# Generate a new unique random filename. The process and thread
|
| 407 |
+
# ids are only useful for debugging purpose and to make it
|
| 408 |
+
# easier to cleanup orphaned files in case of hard process
|
| 409 |
+
# kill (e.g. by "kill -9" or segfault).
|
| 410 |
+
basename = "{}-{}-{}.pkl".format(
|
| 411 |
+
os.getpid(), id(threading.current_thread()), uuid4().hex)
|
| 412 |
+
self._memmaped_arrays.set(a, basename)
|
| 413 |
+
filename = os.path.join(self._temp_folder, basename)
|
| 414 |
+
|
| 415 |
+
# In case the same array with the same content is passed several
|
| 416 |
+
# times to the pool subprocess children, serialize it only once
|
| 417 |
+
|
| 418 |
+
is_new_memmap = filename not in self._temporary_memmaped_filenames
|
| 419 |
+
|
| 420 |
+
# add the memmap to the list of temporary memmaps created by joblib
|
| 421 |
+
self._temporary_memmaped_filenames.add(filename)
|
| 422 |
+
|
| 423 |
+
if self._unlink_on_gc_collect:
|
| 424 |
+
# Bump reference count of the memmap by 1 to account for
|
| 425 |
+
# shared usage of the memmap by a child process. The
|
| 426 |
+
# corresponding decref call will be executed upon calling
|
| 427 |
+
# resource_tracker.maybe_unlink, registered as a finalizer in
|
| 428 |
+
# the child.
|
| 429 |
+
# the incref/decref calls here are only possible when the child
|
| 430 |
+
# and the parent share the same resource_tracker. It is not the
|
| 431 |
+
# case for the multiprocessing backend, but it does not matter
|
| 432 |
+
# because unlinking a memmap from a child process is only
|
| 433 |
+
# useful to control the memory usage of long-lasting child
|
| 434 |
+
# processes, while the multiprocessing-based pools terminate
|
| 435 |
+
# their workers at the end of a map() call.
|
| 436 |
+
resource_tracker.register(filename, "file")
|
| 437 |
+
|
| 438 |
+
if is_new_memmap:
|
| 439 |
+
# Incref each temporary memmap created by joblib one extra
|
| 440 |
+
# time. This means that these memmaps will only be deleted
|
| 441 |
+
# once an extra maybe_unlink() is called, which is done once
|
| 442 |
+
# all the jobs have completed (or been canceled) in the
|
| 443 |
+
# Parallel._terminate_backend() method.
|
| 444 |
+
resource_tracker.register(filename, "file")
|
| 445 |
+
|
| 446 |
+
if not os.path.exists(filename):
|
| 447 |
+
util.debug(
|
| 448 |
+
"[ARRAY DUMP] Pickling new array (shape={}, dtype={}) "
|
| 449 |
+
"creating a new memmap at {}".format(
|
| 450 |
+
a.shape, a.dtype, filename))
|
| 451 |
+
for dumped_filename in dump(a, filename):
|
| 452 |
+
os.chmod(dumped_filename, FILE_PERMISSIONS)
|
| 453 |
+
|
| 454 |
+
if self._prewarm:
|
| 455 |
+
# Warm up the data by accessing it. This operation ensures
|
| 456 |
+
# that the disk access required to create the memmapping
|
| 457 |
+
# file are performed in the reducing process and avoids
|
| 458 |
+
# concurrent memmap creation in multiple children
|
| 459 |
+
# processes.
|
| 460 |
+
load(filename, mmap_mode=self._mmap_mode).max()
|
| 461 |
+
|
| 462 |
+
else:
|
| 463 |
+
util.debug(
|
| 464 |
+
"[ARRAY DUMP] Pickling known array (shape={}, dtype={}) "
|
| 465 |
+
"reusing memmap file: {}".format(
|
| 466 |
+
a.shape, a.dtype, os.path.basename(filename)))
|
| 467 |
+
|
| 468 |
+
# The worker process will use joblib.load to memmap the data
|
| 469 |
+
return (
|
| 470 |
+
(load_temporary_memmap, (filename, self._mmap_mode,
|
| 471 |
+
self._unlink_on_gc_collect))
|
| 472 |
+
)
|
| 473 |
+
else:
|
| 474 |
+
# do not convert a into memmap, let pickler do its usual copy with
|
| 475 |
+
# the default system pickler
|
| 476 |
+
util.debug(
|
| 477 |
+
'[ARRAY DUMP] Pickling array (NO MEMMAPPING) (shape={}, '
|
| 478 |
+
' dtype={}).'.format(a.shape, a.dtype))
|
| 479 |
+
return (loads, (dumps(a, protocol=HIGHEST_PROTOCOL),))
|
| 480 |
+
|
| 481 |
+
|
| 482 |
+
def get_memmapping_reducers(
|
| 483 |
+
forward_reducers=None, backward_reducers=None,
|
| 484 |
+
temp_folder_resolver=None, max_nbytes=1e6, mmap_mode='r', verbose=0,
|
| 485 |
+
prewarm=False, unlink_on_gc_collect=True, **kwargs):
|
| 486 |
+
"""Construct a pair of memmapping reducer linked to a tmpdir.
|
| 487 |
+
|
| 488 |
+
This function manage the creation and the clean up of the temporary folders
|
| 489 |
+
underlying the memory maps and should be use to get the reducers necessary
|
| 490 |
+
to construct joblib pool or executor.
|
| 491 |
+
"""
|
| 492 |
+
if forward_reducers is None:
|
| 493 |
+
forward_reducers = dict()
|
| 494 |
+
if backward_reducers is None:
|
| 495 |
+
backward_reducers = dict()
|
| 496 |
+
|
| 497 |
+
if np is not None:
|
| 498 |
+
# Register smart numpy.ndarray reducers that detects memmap backed
|
| 499 |
+
# arrays and that is also able to dump to memmap large in-memory
|
| 500 |
+
# arrays over the max_nbytes threshold
|
| 501 |
+
forward_reduce_ndarray = ArrayMemmapForwardReducer(
|
| 502 |
+
max_nbytes, temp_folder_resolver, mmap_mode, unlink_on_gc_collect,
|
| 503 |
+
verbose, prewarm=prewarm)
|
| 504 |
+
forward_reducers[np.ndarray] = forward_reduce_ndarray
|
| 505 |
+
forward_reducers[np.memmap] = forward_reduce_ndarray
|
| 506 |
+
|
| 507 |
+
# Communication from child process to the parent process always
|
| 508 |
+
# pickles in-memory numpy.ndarray without dumping them as memmap
|
| 509 |
+
# to avoid confusing the caller and make it tricky to collect the
|
| 510 |
+
# temporary folder
|
| 511 |
+
backward_reducers[np.ndarray] = reduce_array_memmap_backward
|
| 512 |
+
backward_reducers[np.memmap] = reduce_array_memmap_backward
|
| 513 |
+
|
| 514 |
+
return forward_reducers, backward_reducers
|
| 515 |
+
|
| 516 |
+
|
| 517 |
+
class TemporaryResourcesManager(object):
|
| 518 |
+
"""Stateful object able to manage temporary folder and pickles
|
| 519 |
+
|
| 520 |
+
It exposes:
|
| 521 |
+
- a per-context folder name resolving API that memmap-based reducers will
|
| 522 |
+
rely on to know where to pickle the temporary memmaps
|
| 523 |
+
- a temporary file/folder management API that internally uses the
|
| 524 |
+
resource_tracker.
|
| 525 |
+
"""
|
| 526 |
+
|
| 527 |
+
def __init__(self, temp_folder_root=None, context_id=None):
|
| 528 |
+
self._current_temp_folder = None
|
| 529 |
+
self._temp_folder_root = temp_folder_root
|
| 530 |
+
self._use_shared_mem = None
|
| 531 |
+
self._cached_temp_folders = dict()
|
| 532 |
+
self._id = uuid4().hex
|
| 533 |
+
self._finalizers = {}
|
| 534 |
+
if context_id is None:
|
| 535 |
+
# It would be safer to not assign a default context id (less silent
|
| 536 |
+
# bugs), but doing this while maintaining backward compatibility
|
| 537 |
+
# with the previous, context-unaware version get_memmaping_executor
|
| 538 |
+
# exposes too many low-level details.
|
| 539 |
+
context_id = uuid4().hex
|
| 540 |
+
self.set_current_context(context_id)
|
| 541 |
+
|
| 542 |
+
def set_current_context(self, context_id):
|
| 543 |
+
self._current_context_id = context_id
|
| 544 |
+
self.register_new_context(context_id)
|
| 545 |
+
|
| 546 |
+
def register_new_context(self, context_id):
|
| 547 |
+
# Prepare a sub-folder name specific to a context (usually a unique id
|
| 548 |
+
# generated by each instance of the Parallel class). Do not create in
|
| 549 |
+
# advance to spare FS write access if no array is to be dumped).
|
| 550 |
+
if context_id in self._cached_temp_folders:
|
| 551 |
+
return
|
| 552 |
+
else:
|
| 553 |
+
# During its lifecycle, one Parallel object can have several
|
| 554 |
+
# executors associated to it (for instance, if a loky worker raises
|
| 555 |
+
# an exception, joblib shutdowns the executor and instantly
|
| 556 |
+
# recreates a new one before raising the error - see
|
| 557 |
+
# ``ensure_ready``. Because we don't want two executors tied to
|
| 558 |
+
# the same Parallel object (and thus the same context id) to
|
| 559 |
+
# register/use/delete the same folder, we also add an id specific
|
| 560 |
+
# to the current Manager (and thus specific to its associated
|
| 561 |
+
# executor) to the folder name.
|
| 562 |
+
new_folder_name = (
|
| 563 |
+
"joblib_memmapping_folder_{}_{}_{}".format(
|
| 564 |
+
os.getpid(), self._id, context_id)
|
| 565 |
+
)
|
| 566 |
+
new_folder_path, _ = _get_temp_dir(
|
| 567 |
+
new_folder_name, self._temp_folder_root
|
| 568 |
+
)
|
| 569 |
+
self.register_folder_finalizer(new_folder_path, context_id)
|
| 570 |
+
self._cached_temp_folders[context_id] = new_folder_path
|
| 571 |
+
|
| 572 |
+
def resolve_temp_folder_name(self):
|
| 573 |
+
"""Return a folder name specific to the currently activated context"""
|
| 574 |
+
return self._cached_temp_folders[self._current_context_id]
|
| 575 |
+
|
| 576 |
+
# resource management API
|
| 577 |
+
|
| 578 |
+
def register_folder_finalizer(self, pool_subfolder, context_id):
|
| 579 |
+
# Register the garbage collector at program exit in case caller forgets
|
| 580 |
+
# to call terminate explicitly: note we do not pass any reference to
|
| 581 |
+
# ensure that this callback won't prevent garbage collection of
|
| 582 |
+
# parallel instance and related file handler resources such as POSIX
|
| 583 |
+
# semaphores and pipes
|
| 584 |
+
pool_module_name = whichmodule(delete_folder, 'delete_folder')
|
| 585 |
+
resource_tracker.register(pool_subfolder, "folder")
|
| 586 |
+
|
| 587 |
+
def _cleanup():
|
| 588 |
+
# In some cases the Python runtime seems to set delete_folder to
|
| 589 |
+
# None just before exiting when accessing the delete_folder
|
| 590 |
+
# function from the closure namespace. So instead we reimport
|
| 591 |
+
# the delete_folder function explicitly.
|
| 592 |
+
# https://github.com/joblib/joblib/issues/328
|
| 593 |
+
# We cannot just use from 'joblib.pool import delete_folder'
|
| 594 |
+
# because joblib should only use relative imports to allow
|
| 595 |
+
# easy vendoring.
|
| 596 |
+
delete_folder = __import__(
|
| 597 |
+
pool_module_name, fromlist=['delete_folder']
|
| 598 |
+
).delete_folder
|
| 599 |
+
try:
|
| 600 |
+
delete_folder(pool_subfolder, allow_non_empty=True)
|
| 601 |
+
resource_tracker.unregister(pool_subfolder, "folder")
|
| 602 |
+
except OSError:
|
| 603 |
+
warnings.warn("Failed to delete temporary folder: {}"
|
| 604 |
+
.format(pool_subfolder))
|
| 605 |
+
|
| 606 |
+
self._finalizers[context_id] = atexit.register(_cleanup)
|
| 607 |
+
|
| 608 |
+
def _clean_temporary_resources(self, context_id=None, force=False,
|
| 609 |
+
allow_non_empty=False):
|
| 610 |
+
"""Clean temporary resources created by a process-based pool"""
|
| 611 |
+
if context_id is None:
|
| 612 |
+
# Iterates over a copy of the cache keys to avoid Error due to
|
| 613 |
+
# iterating over a changing size dictionary.
|
| 614 |
+
for context_id in list(self._cached_temp_folders):
|
| 615 |
+
self._clean_temporary_resources(
|
| 616 |
+
context_id, force=force, allow_non_empty=allow_non_empty
|
| 617 |
+
)
|
| 618 |
+
else:
|
| 619 |
+
temp_folder = self._cached_temp_folders.get(context_id)
|
| 620 |
+
if temp_folder and os.path.exists(temp_folder):
|
| 621 |
+
for filename in os.listdir(temp_folder):
|
| 622 |
+
if force:
|
| 623 |
+
# Some workers have failed and the ref counted might
|
| 624 |
+
# be off. The workers should have shut down by this
|
| 625 |
+
# time so forcefully clean up the files.
|
| 626 |
+
resource_tracker.unregister(
|
| 627 |
+
os.path.join(temp_folder, filename), "file"
|
| 628 |
+
)
|
| 629 |
+
else:
|
| 630 |
+
resource_tracker.maybe_unlink(
|
| 631 |
+
os.path.join(temp_folder, filename), "file"
|
| 632 |
+
)
|
| 633 |
+
|
| 634 |
+
# When forcing clean-up, try to delete the folder even if some
|
| 635 |
+
# files are still in it. Otherwise, try to delete the folder
|
| 636 |
+
allow_non_empty |= force
|
| 637 |
+
|
| 638 |
+
# Clean up the folder if possible, either if it is empty or
|
| 639 |
+
# if none of the files in it are in used and allow_non_empty.
|
| 640 |
+
try:
|
| 641 |
+
delete_folder(
|
| 642 |
+
temp_folder, allow_non_empty=allow_non_empty
|
| 643 |
+
)
|
| 644 |
+
# Forget the folder once it has been deleted
|
| 645 |
+
self._cached_temp_folders.pop(context_id, None)
|
| 646 |
+
resource_tracker.unregister(temp_folder, "folder")
|
| 647 |
+
|
| 648 |
+
# Also cancel the finalizers that gets triggered at gc.
|
| 649 |
+
finalizer = self._finalizers.pop(context_id, None)
|
| 650 |
+
if finalizer is not None:
|
| 651 |
+
atexit.unregister(finalizer)
|
| 652 |
+
|
| 653 |
+
except OSError:
|
| 654 |
+
# Temporary folder cannot be deleted right now.
|
| 655 |
+
# This folder will be cleaned up by an atexit
|
| 656 |
+
# finalizer registered by the memmapping_reducer.
|
| 657 |
+
pass
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/joblib/_parallel_backends.py
ADDED
|
@@ -0,0 +1,649 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Backends for embarrassingly parallel code.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import gc
|
| 6 |
+
import os
|
| 7 |
+
import warnings
|
| 8 |
+
import threading
|
| 9 |
+
import contextlib
|
| 10 |
+
from abc import ABCMeta, abstractmethod
|
| 11 |
+
|
| 12 |
+
from ._utils import (
|
| 13 |
+
_TracebackCapturingWrapper,
|
| 14 |
+
_retrieve_traceback_capturing_wrapped_call
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
from ._multiprocessing_helpers import mp
|
| 18 |
+
|
| 19 |
+
if mp is not None:
|
| 20 |
+
from .pool import MemmappingPool
|
| 21 |
+
from multiprocessing.pool import ThreadPool
|
| 22 |
+
from .executor import get_memmapping_executor
|
| 23 |
+
|
| 24 |
+
# Import loky only if multiprocessing is present
|
| 25 |
+
from .externals.loky import process_executor, cpu_count
|
| 26 |
+
from .externals.loky.process_executor import ShutdownExecutorError
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class ParallelBackendBase(metaclass=ABCMeta):
|
| 30 |
+
"""Helper abc which defines all methods a ParallelBackend must implement"""
|
| 31 |
+
|
| 32 |
+
supports_inner_max_num_threads = False
|
| 33 |
+
supports_retrieve_callback = False
|
| 34 |
+
default_n_jobs = 1
|
| 35 |
+
|
| 36 |
+
@property
|
| 37 |
+
def supports_return_generator(self):
|
| 38 |
+
return self.supports_retrieve_callback
|
| 39 |
+
|
| 40 |
+
@property
|
| 41 |
+
def supports_timeout(self):
|
| 42 |
+
return self.supports_retrieve_callback
|
| 43 |
+
|
| 44 |
+
nesting_level = None
|
| 45 |
+
|
| 46 |
+
def __init__(self, nesting_level=None, inner_max_num_threads=None,
|
| 47 |
+
**kwargs):
|
| 48 |
+
super().__init__(**kwargs)
|
| 49 |
+
self.nesting_level = nesting_level
|
| 50 |
+
self.inner_max_num_threads = inner_max_num_threads
|
| 51 |
+
|
| 52 |
+
MAX_NUM_THREADS_VARS = [
|
| 53 |
+
'OMP_NUM_THREADS', 'OPENBLAS_NUM_THREADS', 'MKL_NUM_THREADS',
|
| 54 |
+
'BLIS_NUM_THREADS', 'VECLIB_MAXIMUM_THREADS', 'NUMBA_NUM_THREADS',
|
| 55 |
+
'NUMEXPR_NUM_THREADS',
|
| 56 |
+
]
|
| 57 |
+
|
| 58 |
+
TBB_ENABLE_IPC_VAR = "ENABLE_IPC"
|
| 59 |
+
|
| 60 |
+
@abstractmethod
|
| 61 |
+
def effective_n_jobs(self, n_jobs):
|
| 62 |
+
"""Determine the number of jobs that can actually run in parallel
|
| 63 |
+
|
| 64 |
+
n_jobs is the number of workers requested by the callers. Passing
|
| 65 |
+
n_jobs=-1 means requesting all available workers for instance matching
|
| 66 |
+
the number of CPU cores on the worker host(s).
|
| 67 |
+
|
| 68 |
+
This method should return a guesstimate of the number of workers that
|
| 69 |
+
can actually perform work concurrently. The primary use case is to make
|
| 70 |
+
it possible for the caller to know in how many chunks to slice the
|
| 71 |
+
work.
|
| 72 |
+
|
| 73 |
+
In general working on larger data chunks is more efficient (less
|
| 74 |
+
scheduling overhead and better use of CPU cache prefetching heuristics)
|
| 75 |
+
as long as all the workers have enough work to do.
|
| 76 |
+
"""
|
| 77 |
+
|
| 78 |
+
@abstractmethod
|
| 79 |
+
def apply_async(self, func, callback=None):
|
| 80 |
+
"""Schedule a func to be run"""
|
| 81 |
+
|
| 82 |
+
def retrieve_result_callback(self, out):
|
| 83 |
+
"""Called within the callback function passed in apply_async.
|
| 84 |
+
|
| 85 |
+
The argument of this function is the argument given to a callback in
|
| 86 |
+
the considered backend. It is supposed to return the outcome of a task
|
| 87 |
+
if it succeeded or raise the exception if it failed.
|
| 88 |
+
"""
|
| 89 |
+
|
| 90 |
+
def configure(self, n_jobs=1, parallel=None, prefer=None, require=None,
|
| 91 |
+
**backend_args):
|
| 92 |
+
"""Reconfigure the backend and return the number of workers.
|
| 93 |
+
|
| 94 |
+
This makes it possible to reuse an existing backend instance for
|
| 95 |
+
successive independent calls to Parallel with different parameters.
|
| 96 |
+
"""
|
| 97 |
+
self.parallel = parallel
|
| 98 |
+
return self.effective_n_jobs(n_jobs)
|
| 99 |
+
|
| 100 |
+
def start_call(self):
|
| 101 |
+
"""Call-back method called at the beginning of a Parallel call"""
|
| 102 |
+
|
| 103 |
+
def stop_call(self):
|
| 104 |
+
"""Call-back method called at the end of a Parallel call"""
|
| 105 |
+
|
| 106 |
+
def terminate(self):
|
| 107 |
+
"""Shutdown the workers and free the shared memory."""
|
| 108 |
+
|
| 109 |
+
def compute_batch_size(self):
|
| 110 |
+
"""Determine the optimal batch size"""
|
| 111 |
+
return 1
|
| 112 |
+
|
| 113 |
+
def batch_completed(self, batch_size, duration):
|
| 114 |
+
"""Callback indicate how long it took to run a batch"""
|
| 115 |
+
|
| 116 |
+
def get_exceptions(self):
|
| 117 |
+
"""List of exception types to be captured."""
|
| 118 |
+
return []
|
| 119 |
+
|
| 120 |
+
def abort_everything(self, ensure_ready=True):
|
| 121 |
+
"""Abort any running tasks
|
| 122 |
+
|
| 123 |
+
This is called when an exception has been raised when executing a task
|
| 124 |
+
and all the remaining tasks will be ignored and can therefore be
|
| 125 |
+
aborted to spare computation resources.
|
| 126 |
+
|
| 127 |
+
If ensure_ready is True, the backend should be left in an operating
|
| 128 |
+
state as future tasks might be re-submitted via that same backend
|
| 129 |
+
instance.
|
| 130 |
+
|
| 131 |
+
If ensure_ready is False, the implementer of this method can decide
|
| 132 |
+
to leave the backend in a closed / terminated state as no new task
|
| 133 |
+
are expected to be submitted to this backend.
|
| 134 |
+
|
| 135 |
+
Setting ensure_ready to False is an optimization that can be leveraged
|
| 136 |
+
when aborting tasks via killing processes from a local process pool
|
| 137 |
+
managed by the backend it-self: if we expect no new tasks, there is no
|
| 138 |
+
point in re-creating new workers.
|
| 139 |
+
"""
|
| 140 |
+
# Does nothing by default: to be overridden in subclasses when
|
| 141 |
+
# canceling tasks is possible.
|
| 142 |
+
pass
|
| 143 |
+
|
| 144 |
+
def get_nested_backend(self):
|
| 145 |
+
"""Backend instance to be used by nested Parallel calls.
|
| 146 |
+
|
| 147 |
+
By default a thread-based backend is used for the first level of
|
| 148 |
+
nesting. Beyond, switch to sequential backend to avoid spawning too
|
| 149 |
+
many threads on the host.
|
| 150 |
+
"""
|
| 151 |
+
nesting_level = getattr(self, 'nesting_level', 0) + 1
|
| 152 |
+
if nesting_level > 1:
|
| 153 |
+
return SequentialBackend(nesting_level=nesting_level), None
|
| 154 |
+
else:
|
| 155 |
+
return ThreadingBackend(nesting_level=nesting_level), None
|
| 156 |
+
|
| 157 |
+
@contextlib.contextmanager
|
| 158 |
+
def retrieval_context(self):
|
| 159 |
+
"""Context manager to manage an execution context.
|
| 160 |
+
|
| 161 |
+
Calls to Parallel.retrieve will be made inside this context.
|
| 162 |
+
|
| 163 |
+
By default, this does nothing. It may be useful for subclasses to
|
| 164 |
+
handle nested parallelism. In particular, it may be required to avoid
|
| 165 |
+
deadlocks if a backend manages a fixed number of workers, when those
|
| 166 |
+
workers may be asked to do nested Parallel calls. Without
|
| 167 |
+
'retrieval_context' this could lead to deadlock, as all the workers
|
| 168 |
+
managed by the backend may be "busy" waiting for the nested parallel
|
| 169 |
+
calls to finish, but the backend has no free workers to execute those
|
| 170 |
+
tasks.
|
| 171 |
+
"""
|
| 172 |
+
yield
|
| 173 |
+
|
| 174 |
+
def _prepare_worker_env(self, n_jobs):
|
| 175 |
+
"""Return environment variables limiting threadpools in external libs.
|
| 176 |
+
|
| 177 |
+
This function return a dict containing environment variables to pass
|
| 178 |
+
when creating a pool of process. These environment variables limit the
|
| 179 |
+
number of threads to `n_threads` for OpenMP, MKL, Accelerated and
|
| 180 |
+
OpenBLAS libraries in the child processes.
|
| 181 |
+
"""
|
| 182 |
+
explicit_n_threads = self.inner_max_num_threads
|
| 183 |
+
default_n_threads = max(cpu_count() // n_jobs, 1)
|
| 184 |
+
|
| 185 |
+
# Set the inner environment variables to self.inner_max_num_threads if
|
| 186 |
+
# it is given. Else, default to cpu_count // n_jobs unless the variable
|
| 187 |
+
# is already present in the parent process environment.
|
| 188 |
+
env = {}
|
| 189 |
+
for var in self.MAX_NUM_THREADS_VARS:
|
| 190 |
+
if explicit_n_threads is None:
|
| 191 |
+
var_value = os.environ.get(var, default_n_threads)
|
| 192 |
+
else:
|
| 193 |
+
var_value = explicit_n_threads
|
| 194 |
+
|
| 195 |
+
env[var] = str(var_value)
|
| 196 |
+
|
| 197 |
+
if self.TBB_ENABLE_IPC_VAR not in os.environ:
|
| 198 |
+
# To avoid over-subscription when using TBB, let the TBB schedulers
|
| 199 |
+
# use Inter Process Communication to coordinate:
|
| 200 |
+
env[self.TBB_ENABLE_IPC_VAR] = "1"
|
| 201 |
+
return env
|
| 202 |
+
|
| 203 |
+
@staticmethod
|
| 204 |
+
def in_main_thread():
|
| 205 |
+
return isinstance(threading.current_thread(), threading._MainThread)
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
class SequentialBackend(ParallelBackendBase):
|
| 209 |
+
"""A ParallelBackend which will execute all batches sequentially.
|
| 210 |
+
|
| 211 |
+
Does not use/create any threading objects, and hence has minimal
|
| 212 |
+
overhead. Used when n_jobs == 1.
|
| 213 |
+
"""
|
| 214 |
+
|
| 215 |
+
uses_threads = True
|
| 216 |
+
supports_timeout = False
|
| 217 |
+
supports_retrieve_callback = False
|
| 218 |
+
supports_sharedmem = True
|
| 219 |
+
|
| 220 |
+
def effective_n_jobs(self, n_jobs):
|
| 221 |
+
"""Determine the number of jobs which are going to run in parallel"""
|
| 222 |
+
if n_jobs == 0:
|
| 223 |
+
raise ValueError('n_jobs == 0 in Parallel has no meaning')
|
| 224 |
+
return 1
|
| 225 |
+
|
| 226 |
+
def apply_async(self, func, callback=None):
|
| 227 |
+
"""Schedule a func to be run"""
|
| 228 |
+
raise RuntimeError("Should never be called for SequentialBackend.")
|
| 229 |
+
|
| 230 |
+
def retrieve_result_callback(self, out):
|
| 231 |
+
raise RuntimeError("Should never be called for SequentialBackend.")
|
| 232 |
+
|
| 233 |
+
def get_nested_backend(self):
|
| 234 |
+
# import is not top level to avoid cyclic import errors.
|
| 235 |
+
from .parallel import get_active_backend
|
| 236 |
+
|
| 237 |
+
# SequentialBackend should neither change the nesting level, the
|
| 238 |
+
# default backend or the number of jobs. Just return the current one.
|
| 239 |
+
return get_active_backend()
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
class PoolManagerMixin(object):
|
| 243 |
+
"""A helper class for managing pool of workers."""
|
| 244 |
+
|
| 245 |
+
_pool = None
|
| 246 |
+
|
| 247 |
+
def effective_n_jobs(self, n_jobs):
|
| 248 |
+
"""Determine the number of jobs which are going to run in parallel"""
|
| 249 |
+
if n_jobs == 0:
|
| 250 |
+
raise ValueError('n_jobs == 0 in Parallel has no meaning')
|
| 251 |
+
elif mp is None or n_jobs is None:
|
| 252 |
+
# multiprocessing is not available or disabled, fallback
|
| 253 |
+
# to sequential mode
|
| 254 |
+
return 1
|
| 255 |
+
elif n_jobs < 0:
|
| 256 |
+
n_jobs = max(cpu_count() + 1 + n_jobs, 1)
|
| 257 |
+
return n_jobs
|
| 258 |
+
|
| 259 |
+
def terminate(self):
|
| 260 |
+
"""Shutdown the process or thread pool"""
|
| 261 |
+
if self._pool is not None:
|
| 262 |
+
self._pool.close()
|
| 263 |
+
self._pool.terminate() # terminate does a join()
|
| 264 |
+
self._pool = None
|
| 265 |
+
|
| 266 |
+
def _get_pool(self):
|
| 267 |
+
"""Used by apply_async to make it possible to implement lazy init"""
|
| 268 |
+
return self._pool
|
| 269 |
+
|
| 270 |
+
def apply_async(self, func, callback=None):
|
| 271 |
+
"""Schedule a func to be run"""
|
| 272 |
+
# Here, we need a wrapper to avoid crashes on KeyboardInterruptErrors.
|
| 273 |
+
# We also call the callback on error, to make sure the pool does not
|
| 274 |
+
# wait on crashed jobs.
|
| 275 |
+
return self._get_pool().apply_async(
|
| 276 |
+
_TracebackCapturingWrapper(func), (),
|
| 277 |
+
callback=callback, error_callback=callback
|
| 278 |
+
)
|
| 279 |
+
|
| 280 |
+
def retrieve_result_callback(self, out):
|
| 281 |
+
"""Mimic concurrent.futures results, raising an error if needed."""
|
| 282 |
+
return _retrieve_traceback_capturing_wrapped_call(out)
|
| 283 |
+
|
| 284 |
+
def abort_everything(self, ensure_ready=True):
|
| 285 |
+
"""Shutdown the pool and restart a new one with the same parameters"""
|
| 286 |
+
self.terminate()
|
| 287 |
+
if ensure_ready:
|
| 288 |
+
self.configure(n_jobs=self.parallel.n_jobs, parallel=self.parallel,
|
| 289 |
+
**self.parallel._backend_args)
|
| 290 |
+
|
| 291 |
+
|
| 292 |
+
class AutoBatchingMixin(object):
|
| 293 |
+
"""A helper class for automagically batching jobs."""
|
| 294 |
+
|
| 295 |
+
# In seconds, should be big enough to hide multiprocessing dispatching
|
| 296 |
+
# overhead.
|
| 297 |
+
# This settings was found by running benchmarks/bench_auto_batching.py
|
| 298 |
+
# with various parameters on various platforms.
|
| 299 |
+
MIN_IDEAL_BATCH_DURATION = .2
|
| 300 |
+
|
| 301 |
+
# Should not be too high to avoid stragglers: long jobs running alone
|
| 302 |
+
# on a single worker while other workers have no work to process any more.
|
| 303 |
+
MAX_IDEAL_BATCH_DURATION = 2
|
| 304 |
+
|
| 305 |
+
# Batching counters default values
|
| 306 |
+
_DEFAULT_EFFECTIVE_BATCH_SIZE = 1
|
| 307 |
+
_DEFAULT_SMOOTHED_BATCH_DURATION = 0.0
|
| 308 |
+
|
| 309 |
+
def __init__(self, **kwargs):
|
| 310 |
+
super().__init__(**kwargs)
|
| 311 |
+
self._effective_batch_size = self._DEFAULT_EFFECTIVE_BATCH_SIZE
|
| 312 |
+
self._smoothed_batch_duration = self._DEFAULT_SMOOTHED_BATCH_DURATION
|
| 313 |
+
|
| 314 |
+
def compute_batch_size(self):
|
| 315 |
+
"""Determine the optimal batch size"""
|
| 316 |
+
old_batch_size = self._effective_batch_size
|
| 317 |
+
batch_duration = self._smoothed_batch_duration
|
| 318 |
+
if (batch_duration > 0 and
|
| 319 |
+
batch_duration < self.MIN_IDEAL_BATCH_DURATION):
|
| 320 |
+
# The current batch size is too small: the duration of the
|
| 321 |
+
# processing of a batch of task is not large enough to hide
|
| 322 |
+
# the scheduling overhead.
|
| 323 |
+
ideal_batch_size = int(old_batch_size *
|
| 324 |
+
self.MIN_IDEAL_BATCH_DURATION /
|
| 325 |
+
batch_duration)
|
| 326 |
+
# Multiply by two to limit oscilations between min and max.
|
| 327 |
+
ideal_batch_size *= 2
|
| 328 |
+
|
| 329 |
+
# dont increase the batch size too fast to limit huge batch sizes
|
| 330 |
+
# potentially leading to starving worker
|
| 331 |
+
batch_size = min(2 * old_batch_size, ideal_batch_size)
|
| 332 |
+
|
| 333 |
+
batch_size = max(batch_size, 1)
|
| 334 |
+
|
| 335 |
+
self._effective_batch_size = batch_size
|
| 336 |
+
if self.parallel.verbose >= 10:
|
| 337 |
+
self.parallel._print(
|
| 338 |
+
f"Batch computation too fast ({batch_duration}s.) "
|
| 339 |
+
f"Setting batch_size={batch_size}."
|
| 340 |
+
)
|
| 341 |
+
elif (batch_duration > self.MAX_IDEAL_BATCH_DURATION and
|
| 342 |
+
old_batch_size >= 2):
|
| 343 |
+
# The current batch size is too big. If we schedule overly long
|
| 344 |
+
# running batches some CPUs might wait with nothing left to do
|
| 345 |
+
# while a couple of CPUs a left processing a few long running
|
| 346 |
+
# batches. Better reduce the batch size a bit to limit the
|
| 347 |
+
# likelihood of scheduling such stragglers.
|
| 348 |
+
|
| 349 |
+
# decrease the batch size quickly to limit potential starving
|
| 350 |
+
ideal_batch_size = int(
|
| 351 |
+
old_batch_size * self.MIN_IDEAL_BATCH_DURATION / batch_duration
|
| 352 |
+
)
|
| 353 |
+
# Multiply by two to limit oscilations between min and max.
|
| 354 |
+
batch_size = max(2 * ideal_batch_size, 1)
|
| 355 |
+
self._effective_batch_size = batch_size
|
| 356 |
+
if self.parallel.verbose >= 10:
|
| 357 |
+
self.parallel._print(
|
| 358 |
+
f"Batch computation too slow ({batch_duration}s.) "
|
| 359 |
+
f"Setting batch_size={batch_size}."
|
| 360 |
+
)
|
| 361 |
+
else:
|
| 362 |
+
# No batch size adjustment
|
| 363 |
+
batch_size = old_batch_size
|
| 364 |
+
|
| 365 |
+
if batch_size != old_batch_size:
|
| 366 |
+
# Reset estimation of the smoothed mean batch duration: this
|
| 367 |
+
# estimate is updated in the multiprocessing apply_async
|
| 368 |
+
# CallBack as long as the batch_size is constant. Therefore
|
| 369 |
+
# we need to reset the estimate whenever we re-tune the batch
|
| 370 |
+
# size.
|
| 371 |
+
self._smoothed_batch_duration = \
|
| 372 |
+
self._DEFAULT_SMOOTHED_BATCH_DURATION
|
| 373 |
+
|
| 374 |
+
return batch_size
|
| 375 |
+
|
| 376 |
+
def batch_completed(self, batch_size, duration):
|
| 377 |
+
"""Callback indicate how long it took to run a batch"""
|
| 378 |
+
if batch_size == self._effective_batch_size:
|
| 379 |
+
# Update the smoothed streaming estimate of the duration of a batch
|
| 380 |
+
# from dispatch to completion
|
| 381 |
+
old_duration = self._smoothed_batch_duration
|
| 382 |
+
if old_duration == self._DEFAULT_SMOOTHED_BATCH_DURATION:
|
| 383 |
+
# First record of duration for this batch size after the last
|
| 384 |
+
# reset.
|
| 385 |
+
new_duration = duration
|
| 386 |
+
else:
|
| 387 |
+
# Update the exponentially weighted average of the duration of
|
| 388 |
+
# batch for the current effective size.
|
| 389 |
+
new_duration = 0.8 * old_duration + 0.2 * duration
|
| 390 |
+
self._smoothed_batch_duration = new_duration
|
| 391 |
+
|
| 392 |
+
def reset_batch_stats(self):
|
| 393 |
+
"""Reset batch statistics to default values.
|
| 394 |
+
|
| 395 |
+
This avoids interferences with future jobs.
|
| 396 |
+
"""
|
| 397 |
+
self._effective_batch_size = self._DEFAULT_EFFECTIVE_BATCH_SIZE
|
| 398 |
+
self._smoothed_batch_duration = self._DEFAULT_SMOOTHED_BATCH_DURATION
|
| 399 |
+
|
| 400 |
+
|
| 401 |
+
class ThreadingBackend(PoolManagerMixin, ParallelBackendBase):
|
| 402 |
+
"""A ParallelBackend which will use a thread pool to execute batches in.
|
| 403 |
+
|
| 404 |
+
This is a low-overhead backend but it suffers from the Python Global
|
| 405 |
+
Interpreter Lock if the called function relies a lot on Python objects.
|
| 406 |
+
Mostly useful when the execution bottleneck is a compiled extension that
|
| 407 |
+
explicitly releases the GIL (for instance a Cython loop wrapped in a "with
|
| 408 |
+
nogil" block or an expensive call to a library such as NumPy).
|
| 409 |
+
|
| 410 |
+
The actual thread pool is lazily initialized: the actual thread pool
|
| 411 |
+
construction is delayed to the first call to apply_async.
|
| 412 |
+
|
| 413 |
+
ThreadingBackend is used as the default backend for nested calls.
|
| 414 |
+
"""
|
| 415 |
+
|
| 416 |
+
supports_retrieve_callback = True
|
| 417 |
+
uses_threads = True
|
| 418 |
+
supports_sharedmem = True
|
| 419 |
+
|
| 420 |
+
def configure(self, n_jobs=1, parallel=None, **backend_args):
|
| 421 |
+
"""Build a process or thread pool and return the number of workers"""
|
| 422 |
+
n_jobs = self.effective_n_jobs(n_jobs)
|
| 423 |
+
if n_jobs == 1:
|
| 424 |
+
# Avoid unnecessary overhead and use sequential backend instead.
|
| 425 |
+
raise FallbackToBackend(
|
| 426 |
+
SequentialBackend(nesting_level=self.nesting_level))
|
| 427 |
+
self.parallel = parallel
|
| 428 |
+
self._n_jobs = n_jobs
|
| 429 |
+
return n_jobs
|
| 430 |
+
|
| 431 |
+
def _get_pool(self):
|
| 432 |
+
"""Lazily initialize the thread pool
|
| 433 |
+
|
| 434 |
+
The actual pool of worker threads is only initialized at the first
|
| 435 |
+
call to apply_async.
|
| 436 |
+
"""
|
| 437 |
+
if self._pool is None:
|
| 438 |
+
self._pool = ThreadPool(self._n_jobs)
|
| 439 |
+
return self._pool
|
| 440 |
+
|
| 441 |
+
|
| 442 |
+
class MultiprocessingBackend(PoolManagerMixin, AutoBatchingMixin,
|
| 443 |
+
ParallelBackendBase):
|
| 444 |
+
"""A ParallelBackend which will use a multiprocessing.Pool.
|
| 445 |
+
|
| 446 |
+
Will introduce some communication and memory overhead when exchanging
|
| 447 |
+
input and output data with the with the worker Python processes.
|
| 448 |
+
However, does not suffer from the Python Global Interpreter Lock.
|
| 449 |
+
"""
|
| 450 |
+
|
| 451 |
+
supports_retrieve_callback = True
|
| 452 |
+
supports_return_generator = False
|
| 453 |
+
|
| 454 |
+
def effective_n_jobs(self, n_jobs):
|
| 455 |
+
"""Determine the number of jobs which are going to run in parallel.
|
| 456 |
+
|
| 457 |
+
This also checks if we are attempting to create a nested parallel
|
| 458 |
+
loop.
|
| 459 |
+
"""
|
| 460 |
+
if mp is None:
|
| 461 |
+
return 1
|
| 462 |
+
|
| 463 |
+
if mp.current_process().daemon:
|
| 464 |
+
# Daemonic processes cannot have children
|
| 465 |
+
if n_jobs != 1:
|
| 466 |
+
if inside_dask_worker():
|
| 467 |
+
msg = (
|
| 468 |
+
"Inside a Dask worker with daemon=True, "
|
| 469 |
+
"setting n_jobs=1.\nPossible work-arounds:\n"
|
| 470 |
+
"- dask.config.set("
|
| 471 |
+
"{'distributed.worker.daemon': False})"
|
| 472 |
+
"- set the environment variable "
|
| 473 |
+
"DASK_DISTRIBUTED__WORKER__DAEMON=False\n"
|
| 474 |
+
"before creating your Dask cluster."
|
| 475 |
+
)
|
| 476 |
+
else:
|
| 477 |
+
msg = (
|
| 478 |
+
'Multiprocessing-backed parallel loops '
|
| 479 |
+
'cannot be nested, setting n_jobs=1'
|
| 480 |
+
)
|
| 481 |
+
warnings.warn(msg, stacklevel=3)
|
| 482 |
+
return 1
|
| 483 |
+
|
| 484 |
+
if process_executor._CURRENT_DEPTH > 0:
|
| 485 |
+
# Mixing loky and multiprocessing in nested loop is not supported
|
| 486 |
+
if n_jobs != 1:
|
| 487 |
+
warnings.warn(
|
| 488 |
+
'Multiprocessing-backed parallel loops cannot be nested,'
|
| 489 |
+
' below loky, setting n_jobs=1',
|
| 490 |
+
stacklevel=3)
|
| 491 |
+
return 1
|
| 492 |
+
|
| 493 |
+
elif not (self.in_main_thread() or self.nesting_level == 0):
|
| 494 |
+
# Prevent posix fork inside in non-main posix threads
|
| 495 |
+
if n_jobs != 1:
|
| 496 |
+
warnings.warn(
|
| 497 |
+
'Multiprocessing-backed parallel loops cannot be nested'
|
| 498 |
+
' below threads, setting n_jobs=1',
|
| 499 |
+
stacklevel=3)
|
| 500 |
+
return 1
|
| 501 |
+
|
| 502 |
+
return super(MultiprocessingBackend, self).effective_n_jobs(n_jobs)
|
| 503 |
+
|
| 504 |
+
def configure(self, n_jobs=1, parallel=None, prefer=None, require=None,
|
| 505 |
+
**memmappingpool_args):
|
| 506 |
+
"""Build a process or thread pool and return the number of workers"""
|
| 507 |
+
n_jobs = self.effective_n_jobs(n_jobs)
|
| 508 |
+
if n_jobs == 1:
|
| 509 |
+
raise FallbackToBackend(
|
| 510 |
+
SequentialBackend(nesting_level=self.nesting_level))
|
| 511 |
+
|
| 512 |
+
# Make sure to free as much memory as possible before forking
|
| 513 |
+
gc.collect()
|
| 514 |
+
self._pool = MemmappingPool(n_jobs, **memmappingpool_args)
|
| 515 |
+
self.parallel = parallel
|
| 516 |
+
return n_jobs
|
| 517 |
+
|
| 518 |
+
def terminate(self):
|
| 519 |
+
"""Shutdown the process or thread pool"""
|
| 520 |
+
super(MultiprocessingBackend, self).terminate()
|
| 521 |
+
self.reset_batch_stats()
|
| 522 |
+
|
| 523 |
+
|
| 524 |
+
class LokyBackend(AutoBatchingMixin, ParallelBackendBase):
|
| 525 |
+
"""Managing pool of workers with loky instead of multiprocessing."""
|
| 526 |
+
|
| 527 |
+
supports_retrieve_callback = True
|
| 528 |
+
supports_inner_max_num_threads = True
|
| 529 |
+
|
| 530 |
+
def configure(self, n_jobs=1, parallel=None, prefer=None, require=None,
|
| 531 |
+
idle_worker_timeout=300, **memmappingexecutor_args):
|
| 532 |
+
"""Build a process executor and return the number of workers"""
|
| 533 |
+
n_jobs = self.effective_n_jobs(n_jobs)
|
| 534 |
+
if n_jobs == 1:
|
| 535 |
+
raise FallbackToBackend(
|
| 536 |
+
SequentialBackend(nesting_level=self.nesting_level))
|
| 537 |
+
|
| 538 |
+
self._workers = get_memmapping_executor(
|
| 539 |
+
n_jobs, timeout=idle_worker_timeout,
|
| 540 |
+
env=self._prepare_worker_env(n_jobs=n_jobs),
|
| 541 |
+
context_id=parallel._id, **memmappingexecutor_args)
|
| 542 |
+
self.parallel = parallel
|
| 543 |
+
return n_jobs
|
| 544 |
+
|
| 545 |
+
def effective_n_jobs(self, n_jobs):
|
| 546 |
+
"""Determine the number of jobs which are going to run in parallel"""
|
| 547 |
+
if n_jobs == 0:
|
| 548 |
+
raise ValueError('n_jobs == 0 in Parallel has no meaning')
|
| 549 |
+
elif mp is None or n_jobs is None:
|
| 550 |
+
# multiprocessing is not available or disabled, fallback
|
| 551 |
+
# to sequential mode
|
| 552 |
+
return 1
|
| 553 |
+
elif mp.current_process().daemon:
|
| 554 |
+
# Daemonic processes cannot have children
|
| 555 |
+
if n_jobs != 1:
|
| 556 |
+
if inside_dask_worker():
|
| 557 |
+
msg = (
|
| 558 |
+
"Inside a Dask worker with daemon=True, "
|
| 559 |
+
"setting n_jobs=1.\nPossible work-arounds:\n"
|
| 560 |
+
"- dask.config.set("
|
| 561 |
+
"{'distributed.worker.daemon': False})\n"
|
| 562 |
+
"- set the environment variable "
|
| 563 |
+
"DASK_DISTRIBUTED__WORKER__DAEMON=False\n"
|
| 564 |
+
"before creating your Dask cluster."
|
| 565 |
+
)
|
| 566 |
+
else:
|
| 567 |
+
msg = (
|
| 568 |
+
'Loky-backed parallel loops cannot be called in a'
|
| 569 |
+
' multiprocessing, setting n_jobs=1'
|
| 570 |
+
)
|
| 571 |
+
warnings.warn(msg, stacklevel=3)
|
| 572 |
+
|
| 573 |
+
return 1
|
| 574 |
+
elif not (self.in_main_thread() or self.nesting_level == 0):
|
| 575 |
+
# Prevent posix fork inside in non-main posix threads
|
| 576 |
+
if n_jobs != 1:
|
| 577 |
+
warnings.warn(
|
| 578 |
+
'Loky-backed parallel loops cannot be nested below '
|
| 579 |
+
'threads, setting n_jobs=1',
|
| 580 |
+
stacklevel=3)
|
| 581 |
+
return 1
|
| 582 |
+
elif n_jobs < 0:
|
| 583 |
+
n_jobs = max(cpu_count() + 1 + n_jobs, 1)
|
| 584 |
+
return n_jobs
|
| 585 |
+
|
| 586 |
+
def apply_async(self, func, callback=None):
|
| 587 |
+
"""Schedule a func to be run"""
|
| 588 |
+
future = self._workers.submit(func)
|
| 589 |
+
if callback is not None:
|
| 590 |
+
future.add_done_callback(callback)
|
| 591 |
+
return future
|
| 592 |
+
|
| 593 |
+
def retrieve_result_callback(self, out):
|
| 594 |
+
try:
|
| 595 |
+
return out.result()
|
| 596 |
+
except ShutdownExecutorError:
|
| 597 |
+
raise RuntimeError(
|
| 598 |
+
"The executor underlying Parallel has been shutdown. "
|
| 599 |
+
"This is likely due to the garbage collection of a previous "
|
| 600 |
+
"generator from a call to Parallel with return_as='generator'."
|
| 601 |
+
" Make sure the generator is not garbage collected when "
|
| 602 |
+
"submitting a new job or that it is first properly exhausted."
|
| 603 |
+
)
|
| 604 |
+
|
| 605 |
+
def terminate(self):
|
| 606 |
+
if self._workers is not None:
|
| 607 |
+
# Don't terminate the workers as we want to reuse them in later
|
| 608 |
+
# calls, but cleanup the temporary resources that the Parallel call
|
| 609 |
+
# created. This 'hack' requires a private, low-level operation.
|
| 610 |
+
self._workers._temp_folder_manager._clean_temporary_resources(
|
| 611 |
+
context_id=self.parallel._id, force=False
|
| 612 |
+
)
|
| 613 |
+
self._workers = None
|
| 614 |
+
|
| 615 |
+
self.reset_batch_stats()
|
| 616 |
+
|
| 617 |
+
def abort_everything(self, ensure_ready=True):
|
| 618 |
+
"""Shutdown the workers and restart a new one with the same parameters
|
| 619 |
+
"""
|
| 620 |
+
self._workers.terminate(kill_workers=True)
|
| 621 |
+
self._workers = None
|
| 622 |
+
|
| 623 |
+
if ensure_ready:
|
| 624 |
+
self.configure(n_jobs=self.parallel.n_jobs, parallel=self.parallel)
|
| 625 |
+
|
| 626 |
+
|
| 627 |
+
class FallbackToBackend(Exception):
|
| 628 |
+
"""Raised when configuration should fallback to another backend"""
|
| 629 |
+
|
| 630 |
+
def __init__(self, backend):
|
| 631 |
+
self.backend = backend
|
| 632 |
+
|
| 633 |
+
|
| 634 |
+
def inside_dask_worker():
|
| 635 |
+
"""Check whether the current function is executed inside a Dask worker.
|
| 636 |
+
"""
|
| 637 |
+
# This function can not be in joblib._dask because there would be a
|
| 638 |
+
# circular import:
|
| 639 |
+
# _dask imports _parallel_backend that imports _dask ...
|
| 640 |
+
try:
|
| 641 |
+
from distributed import get_worker
|
| 642 |
+
except ImportError:
|
| 643 |
+
return False
|
| 644 |
+
|
| 645 |
+
try:
|
| 646 |
+
get_worker()
|
| 647 |
+
return True
|
| 648 |
+
except ValueError:
|
| 649 |
+
return False
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/joblib/_store_backends.py
ADDED
|
@@ -0,0 +1,474 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Storage providers backends for Memory caching."""
|
| 2 |
+
|
| 3 |
+
from pickle import PicklingError
|
| 4 |
+
import re
|
| 5 |
+
import os
|
| 6 |
+
import os.path
|
| 7 |
+
import datetime
|
| 8 |
+
import json
|
| 9 |
+
import shutil
|
| 10 |
+
import time
|
| 11 |
+
import warnings
|
| 12 |
+
import collections
|
| 13 |
+
import operator
|
| 14 |
+
import threading
|
| 15 |
+
from abc import ABCMeta, abstractmethod
|
| 16 |
+
|
| 17 |
+
from .backports import concurrency_safe_rename
|
| 18 |
+
from .disk import mkdirp, memstr_to_bytes, rm_subdirs
|
| 19 |
+
from .logger import format_time
|
| 20 |
+
from . import numpy_pickle
|
| 21 |
+
|
| 22 |
+
CacheItemInfo = collections.namedtuple('CacheItemInfo',
|
| 23 |
+
'path size last_access')
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class CacheWarning(Warning):
|
| 27 |
+
"""Warning to capture dump failures except for PicklingError."""
|
| 28 |
+
pass
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def concurrency_safe_write(object_to_write, filename, write_func):
|
| 32 |
+
"""Writes an object into a unique file in a concurrency-safe way."""
|
| 33 |
+
thread_id = id(threading.current_thread())
|
| 34 |
+
temporary_filename = '{}.thread-{}-pid-{}'.format(
|
| 35 |
+
filename, thread_id, os.getpid())
|
| 36 |
+
write_func(object_to_write, temporary_filename)
|
| 37 |
+
|
| 38 |
+
return temporary_filename
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class StoreBackendBase(metaclass=ABCMeta):
|
| 42 |
+
"""Helper Abstract Base Class which defines all methods that
|
| 43 |
+
a StorageBackend must implement."""
|
| 44 |
+
|
| 45 |
+
location = None
|
| 46 |
+
|
| 47 |
+
@abstractmethod
|
| 48 |
+
def _open_item(self, f, mode):
|
| 49 |
+
"""Opens an item on the store and return a file-like object.
|
| 50 |
+
|
| 51 |
+
This method is private and only used by the StoreBackendMixin object.
|
| 52 |
+
|
| 53 |
+
Parameters
|
| 54 |
+
----------
|
| 55 |
+
f: a file-like object
|
| 56 |
+
The file-like object where an item is stored and retrieved
|
| 57 |
+
mode: string, optional
|
| 58 |
+
the mode in which the file-like object is opened allowed valued are
|
| 59 |
+
'rb', 'wb'
|
| 60 |
+
|
| 61 |
+
Returns
|
| 62 |
+
-------
|
| 63 |
+
a file-like object
|
| 64 |
+
"""
|
| 65 |
+
|
| 66 |
+
@abstractmethod
|
| 67 |
+
def _item_exists(self, location):
|
| 68 |
+
"""Checks if an item location exists in the store.
|
| 69 |
+
|
| 70 |
+
This method is private and only used by the StoreBackendMixin object.
|
| 71 |
+
|
| 72 |
+
Parameters
|
| 73 |
+
----------
|
| 74 |
+
location: string
|
| 75 |
+
The location of an item. On a filesystem, this corresponds to the
|
| 76 |
+
absolute path, including the filename, of a file.
|
| 77 |
+
|
| 78 |
+
Returns
|
| 79 |
+
-------
|
| 80 |
+
True if the item exists, False otherwise
|
| 81 |
+
"""
|
| 82 |
+
|
| 83 |
+
@abstractmethod
|
| 84 |
+
def _move_item(self, src, dst):
|
| 85 |
+
"""Moves an item from src to dst in the store.
|
| 86 |
+
|
| 87 |
+
This method is private and only used by the StoreBackendMixin object.
|
| 88 |
+
|
| 89 |
+
Parameters
|
| 90 |
+
----------
|
| 91 |
+
src: string
|
| 92 |
+
The source location of an item
|
| 93 |
+
dst: string
|
| 94 |
+
The destination location of an item
|
| 95 |
+
"""
|
| 96 |
+
|
| 97 |
+
@abstractmethod
|
| 98 |
+
def create_location(self, location):
|
| 99 |
+
"""Creates a location on the store.
|
| 100 |
+
|
| 101 |
+
Parameters
|
| 102 |
+
----------
|
| 103 |
+
location: string
|
| 104 |
+
The location in the store. On a filesystem, this corresponds to a
|
| 105 |
+
directory.
|
| 106 |
+
"""
|
| 107 |
+
|
| 108 |
+
@abstractmethod
|
| 109 |
+
def clear_location(self, location):
|
| 110 |
+
"""Clears a location on the store.
|
| 111 |
+
|
| 112 |
+
Parameters
|
| 113 |
+
----------
|
| 114 |
+
location: string
|
| 115 |
+
The location in the store. On a filesystem, this corresponds to a
|
| 116 |
+
directory or a filename absolute path
|
| 117 |
+
"""
|
| 118 |
+
|
| 119 |
+
@abstractmethod
|
| 120 |
+
def get_items(self):
|
| 121 |
+
"""Returns the whole list of items available in the store.
|
| 122 |
+
|
| 123 |
+
Returns
|
| 124 |
+
-------
|
| 125 |
+
The list of items identified by their ids (e.g filename in a
|
| 126 |
+
filesystem).
|
| 127 |
+
"""
|
| 128 |
+
|
| 129 |
+
@abstractmethod
|
| 130 |
+
def configure(self, location, verbose=0, backend_options=dict()):
|
| 131 |
+
"""Configures the store.
|
| 132 |
+
|
| 133 |
+
Parameters
|
| 134 |
+
----------
|
| 135 |
+
location: string
|
| 136 |
+
The base location used by the store. On a filesystem, this
|
| 137 |
+
corresponds to a directory.
|
| 138 |
+
verbose: int
|
| 139 |
+
The level of verbosity of the store
|
| 140 |
+
backend_options: dict
|
| 141 |
+
Contains a dictionary of named parameters used to configure the
|
| 142 |
+
store backend.
|
| 143 |
+
"""
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
class StoreBackendMixin(object):
|
| 147 |
+
"""Class providing all logic for managing the store in a generic way.
|
| 148 |
+
|
| 149 |
+
The StoreBackend subclass has to implement 3 methods: create_location,
|
| 150 |
+
clear_location and configure. The StoreBackend also has to provide
|
| 151 |
+
a private _open_item, _item_exists and _move_item methods. The _open_item
|
| 152 |
+
method has to have the same signature as the builtin open and return a
|
| 153 |
+
file-like object.
|
| 154 |
+
"""
|
| 155 |
+
|
| 156 |
+
def load_item(self, call_id, verbose=1, timestamp=None, metadata=None):
|
| 157 |
+
"""Load an item from the store given its id as a list of str."""
|
| 158 |
+
full_path = os.path.join(self.location, *call_id)
|
| 159 |
+
|
| 160 |
+
if verbose > 1:
|
| 161 |
+
ts_string = ('{: <16}'.format(format_time(time.time() - timestamp))
|
| 162 |
+
if timestamp is not None else '')
|
| 163 |
+
signature = os.path.basename(call_id[0])
|
| 164 |
+
if metadata is not None and 'input_args' in metadata:
|
| 165 |
+
kwargs = ', '.join('{}={}'.format(*item)
|
| 166 |
+
for item in metadata['input_args'].items())
|
| 167 |
+
signature += '({})'.format(kwargs)
|
| 168 |
+
msg = '[Memory]{}: Loading {}'.format(ts_string, signature)
|
| 169 |
+
if verbose < 10:
|
| 170 |
+
print('{0}...'.format(msg))
|
| 171 |
+
else:
|
| 172 |
+
print('{0} from {1}'.format(msg, full_path))
|
| 173 |
+
|
| 174 |
+
mmap_mode = (None if not hasattr(self, 'mmap_mode')
|
| 175 |
+
else self.mmap_mode)
|
| 176 |
+
|
| 177 |
+
filename = os.path.join(full_path, 'output.pkl')
|
| 178 |
+
if not self._item_exists(filename):
|
| 179 |
+
raise KeyError("Non-existing item (may have been "
|
| 180 |
+
"cleared).\nFile %s does not exist" % filename)
|
| 181 |
+
|
| 182 |
+
# file-like object cannot be used when mmap_mode is set
|
| 183 |
+
if mmap_mode is None:
|
| 184 |
+
with self._open_item(filename, "rb") as f:
|
| 185 |
+
item = numpy_pickle.load(f)
|
| 186 |
+
else:
|
| 187 |
+
item = numpy_pickle.load(filename, mmap_mode=mmap_mode)
|
| 188 |
+
return item
|
| 189 |
+
|
| 190 |
+
def dump_item(self, call_id, item, verbose=1):
|
| 191 |
+
"""Dump an item in the store at the id given as a list of str."""
|
| 192 |
+
try:
|
| 193 |
+
item_path = os.path.join(self.location, *call_id)
|
| 194 |
+
if not self._item_exists(item_path):
|
| 195 |
+
self.create_location(item_path)
|
| 196 |
+
filename = os.path.join(item_path, 'output.pkl')
|
| 197 |
+
if verbose > 10:
|
| 198 |
+
print('Persisting in %s' % item_path)
|
| 199 |
+
|
| 200 |
+
def write_func(to_write, dest_filename):
|
| 201 |
+
with self._open_item(dest_filename, "wb") as f:
|
| 202 |
+
try:
|
| 203 |
+
numpy_pickle.dump(to_write, f, compress=self.compress)
|
| 204 |
+
except PicklingError as e:
|
| 205 |
+
# TODO(1.5) turn into error
|
| 206 |
+
warnings.warn(
|
| 207 |
+
"Unable to cache to disk: failed to pickle "
|
| 208 |
+
"output. In version 1.5 this will raise an "
|
| 209 |
+
f"exception. Exception: {e}.",
|
| 210 |
+
FutureWarning
|
| 211 |
+
)
|
| 212 |
+
|
| 213 |
+
self._concurrency_safe_write(item, filename, write_func)
|
| 214 |
+
except Exception as e: # noqa: E722
|
| 215 |
+
warnings.warn(
|
| 216 |
+
"Unable to cache to disk. Possibly a race condition in the "
|
| 217 |
+
f"creation of the directory. Exception: {e}.",
|
| 218 |
+
CacheWarning
|
| 219 |
+
)
|
| 220 |
+
|
| 221 |
+
def clear_item(self, call_id):
|
| 222 |
+
"""Clear the item at the id, given as a list of str."""
|
| 223 |
+
item_path = os.path.join(self.location, *call_id)
|
| 224 |
+
if self._item_exists(item_path):
|
| 225 |
+
self.clear_location(item_path)
|
| 226 |
+
|
| 227 |
+
def contains_item(self, call_id):
|
| 228 |
+
"""Check if there is an item at the id, given as a list of str."""
|
| 229 |
+
item_path = os.path.join(self.location, *call_id)
|
| 230 |
+
filename = os.path.join(item_path, 'output.pkl')
|
| 231 |
+
|
| 232 |
+
return self._item_exists(filename)
|
| 233 |
+
|
| 234 |
+
def get_item_info(self, call_id):
|
| 235 |
+
"""Return information about item."""
|
| 236 |
+
return {'location': os.path.join(self.location, *call_id)}
|
| 237 |
+
|
| 238 |
+
def get_metadata(self, call_id):
|
| 239 |
+
"""Return actual metadata of an item."""
|
| 240 |
+
try:
|
| 241 |
+
item_path = os.path.join(self.location, *call_id)
|
| 242 |
+
filename = os.path.join(item_path, 'metadata.json')
|
| 243 |
+
with self._open_item(filename, 'rb') as f:
|
| 244 |
+
return json.loads(f.read().decode('utf-8'))
|
| 245 |
+
except: # noqa: E722
|
| 246 |
+
return {}
|
| 247 |
+
|
| 248 |
+
def store_metadata(self, call_id, metadata):
|
| 249 |
+
"""Store metadata of a computation."""
|
| 250 |
+
try:
|
| 251 |
+
item_path = os.path.join(self.location, *call_id)
|
| 252 |
+
self.create_location(item_path)
|
| 253 |
+
filename = os.path.join(item_path, 'metadata.json')
|
| 254 |
+
|
| 255 |
+
def write_func(to_write, dest_filename):
|
| 256 |
+
with self._open_item(dest_filename, "wb") as f:
|
| 257 |
+
f.write(json.dumps(to_write).encode('utf-8'))
|
| 258 |
+
|
| 259 |
+
self._concurrency_safe_write(metadata, filename, write_func)
|
| 260 |
+
except: # noqa: E722
|
| 261 |
+
pass
|
| 262 |
+
|
| 263 |
+
def contains_path(self, call_id):
|
| 264 |
+
"""Check cached function is available in store."""
|
| 265 |
+
func_path = os.path.join(self.location, *call_id)
|
| 266 |
+
return self.object_exists(func_path)
|
| 267 |
+
|
| 268 |
+
def clear_path(self, call_id):
|
| 269 |
+
"""Clear all items with a common path in the store."""
|
| 270 |
+
func_path = os.path.join(self.location, *call_id)
|
| 271 |
+
if self._item_exists(func_path):
|
| 272 |
+
self.clear_location(func_path)
|
| 273 |
+
|
| 274 |
+
def store_cached_func_code(self, call_id, func_code=None):
|
| 275 |
+
"""Store the code of the cached function."""
|
| 276 |
+
func_path = os.path.join(self.location, *call_id)
|
| 277 |
+
if not self._item_exists(func_path):
|
| 278 |
+
self.create_location(func_path)
|
| 279 |
+
|
| 280 |
+
if func_code is not None:
|
| 281 |
+
filename = os.path.join(func_path, "func_code.py")
|
| 282 |
+
with self._open_item(filename, 'wb') as f:
|
| 283 |
+
f.write(func_code.encode('utf-8'))
|
| 284 |
+
|
| 285 |
+
def get_cached_func_code(self, call_id):
|
| 286 |
+
"""Store the code of the cached function."""
|
| 287 |
+
filename = os.path.join(self.location, *call_id, 'func_code.py')
|
| 288 |
+
try:
|
| 289 |
+
with self._open_item(filename, 'rb') as f:
|
| 290 |
+
return f.read().decode('utf-8')
|
| 291 |
+
except: # noqa: E722
|
| 292 |
+
raise
|
| 293 |
+
|
| 294 |
+
def get_cached_func_info(self, call_id):
|
| 295 |
+
"""Return information related to the cached function if it exists."""
|
| 296 |
+
return {'location': os.path.join(self.location, *call_id)}
|
| 297 |
+
|
| 298 |
+
def clear(self):
|
| 299 |
+
"""Clear the whole store content."""
|
| 300 |
+
self.clear_location(self.location)
|
| 301 |
+
|
| 302 |
+
def enforce_store_limits(
|
| 303 |
+
self, bytes_limit, items_limit=None, age_limit=None
|
| 304 |
+
):
|
| 305 |
+
"""
|
| 306 |
+
Remove the store's oldest files to enforce item, byte, and age limits.
|
| 307 |
+
"""
|
| 308 |
+
items_to_delete = self._get_items_to_delete(
|
| 309 |
+
bytes_limit, items_limit, age_limit
|
| 310 |
+
)
|
| 311 |
+
|
| 312 |
+
for item in items_to_delete:
|
| 313 |
+
if self.verbose > 10:
|
| 314 |
+
print('Deleting item {0}'.format(item))
|
| 315 |
+
try:
|
| 316 |
+
self.clear_location(item.path)
|
| 317 |
+
except OSError:
|
| 318 |
+
# Even with ignore_errors=True shutil.rmtree can raise OSError
|
| 319 |
+
# with:
|
| 320 |
+
# [Errno 116] Stale file handle if another process has deleted
|
| 321 |
+
# the folder already.
|
| 322 |
+
pass
|
| 323 |
+
|
| 324 |
+
def _get_items_to_delete(
|
| 325 |
+
self, bytes_limit, items_limit=None, age_limit=None
|
| 326 |
+
):
|
| 327 |
+
"""
|
| 328 |
+
Get items to delete to keep the store under size, file, & age limits.
|
| 329 |
+
"""
|
| 330 |
+
if isinstance(bytes_limit, str):
|
| 331 |
+
bytes_limit = memstr_to_bytes(bytes_limit)
|
| 332 |
+
|
| 333 |
+
items = self.get_items()
|
| 334 |
+
if not items:
|
| 335 |
+
return []
|
| 336 |
+
|
| 337 |
+
size = sum(item.size for item in items)
|
| 338 |
+
|
| 339 |
+
if bytes_limit is not None:
|
| 340 |
+
to_delete_size = size - bytes_limit
|
| 341 |
+
else:
|
| 342 |
+
to_delete_size = 0
|
| 343 |
+
|
| 344 |
+
if items_limit is not None:
|
| 345 |
+
to_delete_items = len(items) - items_limit
|
| 346 |
+
else:
|
| 347 |
+
to_delete_items = 0
|
| 348 |
+
|
| 349 |
+
if age_limit is not None:
|
| 350 |
+
older_item = min(item.last_access for item in items)
|
| 351 |
+
deadline = datetime.datetime.now() - age_limit
|
| 352 |
+
else:
|
| 353 |
+
deadline = None
|
| 354 |
+
|
| 355 |
+
if (
|
| 356 |
+
to_delete_size <= 0 and to_delete_items <= 0
|
| 357 |
+
and (deadline is None or older_item > deadline)
|
| 358 |
+
):
|
| 359 |
+
return []
|
| 360 |
+
|
| 361 |
+
# We want to delete first the cache items that were accessed a
|
| 362 |
+
# long time ago
|
| 363 |
+
items.sort(key=operator.attrgetter('last_access'))
|
| 364 |
+
|
| 365 |
+
items_to_delete = []
|
| 366 |
+
size_so_far = 0
|
| 367 |
+
items_so_far = 0
|
| 368 |
+
|
| 369 |
+
for item in items:
|
| 370 |
+
if (
|
| 371 |
+
(size_so_far >= to_delete_size)
|
| 372 |
+
and items_so_far >= to_delete_items
|
| 373 |
+
and (deadline is None or deadline < item.last_access)
|
| 374 |
+
):
|
| 375 |
+
break
|
| 376 |
+
|
| 377 |
+
items_to_delete.append(item)
|
| 378 |
+
size_so_far += item.size
|
| 379 |
+
items_so_far += 1
|
| 380 |
+
|
| 381 |
+
return items_to_delete
|
| 382 |
+
|
| 383 |
+
def _concurrency_safe_write(self, to_write, filename, write_func):
|
| 384 |
+
"""Writes an object into a file in a concurrency-safe way."""
|
| 385 |
+
temporary_filename = concurrency_safe_write(to_write,
|
| 386 |
+
filename, write_func)
|
| 387 |
+
self._move_item(temporary_filename, filename)
|
| 388 |
+
|
| 389 |
+
def __repr__(self):
|
| 390 |
+
"""Printable representation of the store location."""
|
| 391 |
+
return '{class_name}(location="{location}")'.format(
|
| 392 |
+
class_name=self.__class__.__name__, location=self.location)
|
| 393 |
+
|
| 394 |
+
|
| 395 |
+
class FileSystemStoreBackend(StoreBackendBase, StoreBackendMixin):
|
| 396 |
+
"""A StoreBackend used with local or network file systems."""
|
| 397 |
+
|
| 398 |
+
_open_item = staticmethod(open)
|
| 399 |
+
_item_exists = staticmethod(os.path.exists)
|
| 400 |
+
_move_item = staticmethod(concurrency_safe_rename)
|
| 401 |
+
|
| 402 |
+
def clear_location(self, location):
|
| 403 |
+
"""Delete location on store."""
|
| 404 |
+
if (location == self.location):
|
| 405 |
+
rm_subdirs(location)
|
| 406 |
+
else:
|
| 407 |
+
shutil.rmtree(location, ignore_errors=True)
|
| 408 |
+
|
| 409 |
+
def create_location(self, location):
|
| 410 |
+
"""Create object location on store"""
|
| 411 |
+
mkdirp(location)
|
| 412 |
+
|
| 413 |
+
def get_items(self):
|
| 414 |
+
"""Returns the whole list of items available in the store."""
|
| 415 |
+
items = []
|
| 416 |
+
|
| 417 |
+
for dirpath, _, filenames in os.walk(self.location):
|
| 418 |
+
is_cache_hash_dir = re.match('[a-f0-9]{32}',
|
| 419 |
+
os.path.basename(dirpath))
|
| 420 |
+
|
| 421 |
+
if is_cache_hash_dir:
|
| 422 |
+
output_filename = os.path.join(dirpath, 'output.pkl')
|
| 423 |
+
try:
|
| 424 |
+
last_access = os.path.getatime(output_filename)
|
| 425 |
+
except OSError:
|
| 426 |
+
try:
|
| 427 |
+
last_access = os.path.getatime(dirpath)
|
| 428 |
+
except OSError:
|
| 429 |
+
# The directory has already been deleted
|
| 430 |
+
continue
|
| 431 |
+
|
| 432 |
+
last_access = datetime.datetime.fromtimestamp(last_access)
|
| 433 |
+
try:
|
| 434 |
+
full_filenames = [os.path.join(dirpath, fn)
|
| 435 |
+
for fn in filenames]
|
| 436 |
+
dirsize = sum(os.path.getsize(fn)
|
| 437 |
+
for fn in full_filenames)
|
| 438 |
+
except OSError:
|
| 439 |
+
# Either output_filename or one of the files in
|
| 440 |
+
# dirpath does not exist any more. We assume this
|
| 441 |
+
# directory is being cleaned by another process already
|
| 442 |
+
continue
|
| 443 |
+
|
| 444 |
+
items.append(CacheItemInfo(dirpath, dirsize,
|
| 445 |
+
last_access))
|
| 446 |
+
|
| 447 |
+
return items
|
| 448 |
+
|
| 449 |
+
def configure(self, location, verbose=1, backend_options=None):
|
| 450 |
+
"""Configure the store backend.
|
| 451 |
+
|
| 452 |
+
For this backend, valid store options are 'compress' and 'mmap_mode'
|
| 453 |
+
"""
|
| 454 |
+
if backend_options is None:
|
| 455 |
+
backend_options = {}
|
| 456 |
+
|
| 457 |
+
# setup location directory
|
| 458 |
+
self.location = location
|
| 459 |
+
if not os.path.exists(self.location):
|
| 460 |
+
mkdirp(self.location)
|
| 461 |
+
|
| 462 |
+
# item can be stored compressed for faster I/O
|
| 463 |
+
self.compress = backend_options.get('compress', False)
|
| 464 |
+
|
| 465 |
+
# FileSystemStoreBackend can be used with mmap_mode options under
|
| 466 |
+
# certain conditions.
|
| 467 |
+
mmap_mode = backend_options.get('mmap_mode')
|
| 468 |
+
if self.compress and mmap_mode is not None:
|
| 469 |
+
warnings.warn('Compressed items cannot be memmapped in a '
|
| 470 |
+
'filesystem store. Option will be ignored.',
|
| 471 |
+
stacklevel=2)
|
| 472 |
+
|
| 473 |
+
self.mmap_mode = mmap_mode
|
| 474 |
+
self.verbose = verbose
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/joblib/_utils.py
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Adapted from https://stackoverflow.com/a/9558001/2536294
|
| 2 |
+
|
| 3 |
+
import ast
|
| 4 |
+
from dataclasses import dataclass
|
| 5 |
+
import operator as op
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
from ._multiprocessing_helpers import mp
|
| 9 |
+
|
| 10 |
+
if mp is not None:
|
| 11 |
+
from .externals.loky.process_executor import _ExceptionWithTraceback
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
# supported operators
|
| 15 |
+
operators = {
|
| 16 |
+
ast.Add: op.add,
|
| 17 |
+
ast.Sub: op.sub,
|
| 18 |
+
ast.Mult: op.mul,
|
| 19 |
+
ast.Div: op.truediv,
|
| 20 |
+
ast.FloorDiv: op.floordiv,
|
| 21 |
+
ast.Mod: op.mod,
|
| 22 |
+
ast.Pow: op.pow,
|
| 23 |
+
ast.USub: op.neg,
|
| 24 |
+
}
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def eval_expr(expr):
|
| 28 |
+
"""
|
| 29 |
+
>>> eval_expr('2*6')
|
| 30 |
+
12
|
| 31 |
+
>>> eval_expr('2**6')
|
| 32 |
+
64
|
| 33 |
+
>>> eval_expr('1 + 2*3**(4) / (6 + -7)')
|
| 34 |
+
-161.0
|
| 35 |
+
"""
|
| 36 |
+
try:
|
| 37 |
+
return eval_(ast.parse(expr, mode="eval").body)
|
| 38 |
+
except (TypeError, SyntaxError, KeyError) as e:
|
| 39 |
+
raise ValueError(
|
| 40 |
+
f"{expr!r} is not a valid or supported arithmetic expression."
|
| 41 |
+
) from e
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def eval_(node):
|
| 45 |
+
if isinstance(node, ast.Constant): # <constant>
|
| 46 |
+
return node.value
|
| 47 |
+
elif isinstance(node, ast.BinOp): # <left> <operator> <right>
|
| 48 |
+
return operators[type(node.op)](eval_(node.left), eval_(node.right))
|
| 49 |
+
elif isinstance(node, ast.UnaryOp): # <operator> <operand> e.g., -1
|
| 50 |
+
return operators[type(node.op)](eval_(node.operand))
|
| 51 |
+
else:
|
| 52 |
+
raise TypeError(node)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
@dataclass(frozen=True)
|
| 56 |
+
class _Sentinel:
|
| 57 |
+
"""A sentinel to mark a parameter as not explicitly set"""
|
| 58 |
+
default_value: object
|
| 59 |
+
|
| 60 |
+
def __repr__(self):
|
| 61 |
+
return f"default({self.default_value!r})"
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
class _TracebackCapturingWrapper:
|
| 65 |
+
"""Protect function call and return error with traceback."""
|
| 66 |
+
|
| 67 |
+
def __init__(self, func):
|
| 68 |
+
self.func = func
|
| 69 |
+
|
| 70 |
+
def __call__(self, **kwargs):
|
| 71 |
+
try:
|
| 72 |
+
return self.func(**kwargs)
|
| 73 |
+
except BaseException as e:
|
| 74 |
+
return _ExceptionWithTraceback(e)
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def _retrieve_traceback_capturing_wrapped_call(out):
|
| 78 |
+
if isinstance(out, _ExceptionWithTraceback):
|
| 79 |
+
rebuild, args = out.__reduce__()
|
| 80 |
+
out = rebuild(*args)
|
| 81 |
+
if isinstance(out, BaseException):
|
| 82 |
+
raise out
|
| 83 |
+
return out
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/joblib/backports.py
ADDED
|
@@ -0,0 +1,177 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Backports of fixes for joblib dependencies
|
| 3 |
+
"""
|
| 4 |
+
import os
|
| 5 |
+
import re
|
| 6 |
+
import time
|
| 7 |
+
|
| 8 |
+
from os.path import basename
|
| 9 |
+
from multiprocessing import util
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class Version:
|
| 13 |
+
"""Backport from deprecated distutils
|
| 14 |
+
|
| 15 |
+
We maintain this backport to avoid introducing a new dependency on
|
| 16 |
+
`packaging`.
|
| 17 |
+
|
| 18 |
+
We might rexplore this choice in the future if all major Python projects
|
| 19 |
+
introduce a dependency on packaging anyway.
|
| 20 |
+
"""
|
| 21 |
+
|
| 22 |
+
def __init__(self, vstring=None):
|
| 23 |
+
if vstring:
|
| 24 |
+
self.parse(vstring)
|
| 25 |
+
|
| 26 |
+
def __repr__(self):
|
| 27 |
+
return "%s ('%s')" % (self.__class__.__name__, str(self))
|
| 28 |
+
|
| 29 |
+
def __eq__(self, other):
|
| 30 |
+
c = self._cmp(other)
|
| 31 |
+
if c is NotImplemented:
|
| 32 |
+
return c
|
| 33 |
+
return c == 0
|
| 34 |
+
|
| 35 |
+
def __lt__(self, other):
|
| 36 |
+
c = self._cmp(other)
|
| 37 |
+
if c is NotImplemented:
|
| 38 |
+
return c
|
| 39 |
+
return c < 0
|
| 40 |
+
|
| 41 |
+
def __le__(self, other):
|
| 42 |
+
c = self._cmp(other)
|
| 43 |
+
if c is NotImplemented:
|
| 44 |
+
return c
|
| 45 |
+
return c <= 0
|
| 46 |
+
|
| 47 |
+
def __gt__(self, other):
|
| 48 |
+
c = self._cmp(other)
|
| 49 |
+
if c is NotImplemented:
|
| 50 |
+
return c
|
| 51 |
+
return c > 0
|
| 52 |
+
|
| 53 |
+
def __ge__(self, other):
|
| 54 |
+
c = self._cmp(other)
|
| 55 |
+
if c is NotImplemented:
|
| 56 |
+
return c
|
| 57 |
+
return c >= 0
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class LooseVersion(Version):
|
| 61 |
+
"""Backport from deprecated distutils
|
| 62 |
+
|
| 63 |
+
We maintain this backport to avoid introducing a new dependency on
|
| 64 |
+
`packaging`.
|
| 65 |
+
|
| 66 |
+
We might rexplore this choice in the future if all major Python projects
|
| 67 |
+
introduce a dependency on packaging anyway.
|
| 68 |
+
"""
|
| 69 |
+
|
| 70 |
+
component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
|
| 71 |
+
|
| 72 |
+
def __init__(self, vstring=None):
|
| 73 |
+
if vstring:
|
| 74 |
+
self.parse(vstring)
|
| 75 |
+
|
| 76 |
+
def parse(self, vstring):
|
| 77 |
+
# I've given up on thinking I can reconstruct the version string
|
| 78 |
+
# from the parsed tuple -- so I just store the string here for
|
| 79 |
+
# use by __str__
|
| 80 |
+
self.vstring = vstring
|
| 81 |
+
components = [x for x in self.component_re.split(vstring)
|
| 82 |
+
if x and x != '.']
|
| 83 |
+
for i, obj in enumerate(components):
|
| 84 |
+
try:
|
| 85 |
+
components[i] = int(obj)
|
| 86 |
+
except ValueError:
|
| 87 |
+
pass
|
| 88 |
+
|
| 89 |
+
self.version = components
|
| 90 |
+
|
| 91 |
+
def __str__(self):
|
| 92 |
+
return self.vstring
|
| 93 |
+
|
| 94 |
+
def __repr__(self):
|
| 95 |
+
return "LooseVersion ('%s')" % str(self)
|
| 96 |
+
|
| 97 |
+
def _cmp(self, other):
|
| 98 |
+
if isinstance(other, str):
|
| 99 |
+
other = LooseVersion(other)
|
| 100 |
+
elif not isinstance(other, LooseVersion):
|
| 101 |
+
return NotImplemented
|
| 102 |
+
|
| 103 |
+
if self.version == other.version:
|
| 104 |
+
return 0
|
| 105 |
+
if self.version < other.version:
|
| 106 |
+
return -1
|
| 107 |
+
if self.version > other.version:
|
| 108 |
+
return 1
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
try:
|
| 112 |
+
import numpy as np
|
| 113 |
+
|
| 114 |
+
def make_memmap(filename, dtype='uint8', mode='r+', offset=0,
|
| 115 |
+
shape=None, order='C', unlink_on_gc_collect=False):
|
| 116 |
+
"""Custom memmap constructor compatible with numpy.memmap.
|
| 117 |
+
|
| 118 |
+
This function:
|
| 119 |
+
- is a backport the numpy memmap offset fix (See
|
| 120 |
+
https://github.com/numpy/numpy/pull/8443 for more details.
|
| 121 |
+
The numpy fix is available starting numpy 1.13)
|
| 122 |
+
- adds ``unlink_on_gc_collect``, which specifies explicitly whether
|
| 123 |
+
the process re-constructing the memmap owns a reference to the
|
| 124 |
+
underlying file. If set to True, it adds a finalizer to the
|
| 125 |
+
newly-created memmap that sends a maybe_unlink request for the
|
| 126 |
+
memmaped file to resource_tracker.
|
| 127 |
+
"""
|
| 128 |
+
util.debug(
|
| 129 |
+
"[MEMMAP READ] creating a memmap (shape {}, filename {}, "
|
| 130 |
+
"pid {})".format(shape, basename(filename), os.getpid())
|
| 131 |
+
)
|
| 132 |
+
|
| 133 |
+
mm = np.memmap(filename, dtype=dtype, mode=mode, offset=offset,
|
| 134 |
+
shape=shape, order=order)
|
| 135 |
+
if LooseVersion(np.__version__) < '1.13':
|
| 136 |
+
mm.offset = offset
|
| 137 |
+
if unlink_on_gc_collect:
|
| 138 |
+
from ._memmapping_reducer import add_maybe_unlink_finalizer
|
| 139 |
+
add_maybe_unlink_finalizer(mm)
|
| 140 |
+
return mm
|
| 141 |
+
except ImportError:
|
| 142 |
+
def make_memmap(filename, dtype='uint8', mode='r+', offset=0,
|
| 143 |
+
shape=None, order='C', unlink_on_gc_collect=False):
|
| 144 |
+
raise NotImplementedError(
|
| 145 |
+
"'joblib.backports.make_memmap' should not be used "
|
| 146 |
+
'if numpy is not installed.')
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
if os.name == 'nt':
|
| 150 |
+
# https://github.com/joblib/joblib/issues/540
|
| 151 |
+
access_denied_errors = (5, 13)
|
| 152 |
+
from os import replace
|
| 153 |
+
|
| 154 |
+
def concurrency_safe_rename(src, dst):
|
| 155 |
+
"""Renames ``src`` into ``dst`` overwriting ``dst`` if it exists.
|
| 156 |
+
|
| 157 |
+
On Windows os.replace can yield permission errors if executed by two
|
| 158 |
+
different processes.
|
| 159 |
+
"""
|
| 160 |
+
max_sleep_time = 1
|
| 161 |
+
total_sleep_time = 0
|
| 162 |
+
sleep_time = 0.001
|
| 163 |
+
while total_sleep_time < max_sleep_time:
|
| 164 |
+
try:
|
| 165 |
+
replace(src, dst)
|
| 166 |
+
break
|
| 167 |
+
except Exception as exc:
|
| 168 |
+
if getattr(exc, 'winerror', None) in access_denied_errors:
|
| 169 |
+
time.sleep(sleep_time)
|
| 170 |
+
total_sleep_time += sleep_time
|
| 171 |
+
sleep_time *= 2
|
| 172 |
+
else:
|
| 173 |
+
raise
|
| 174 |
+
else:
|
| 175 |
+
raise
|
| 176 |
+
else:
|
| 177 |
+
from os import replace as concurrency_safe_rename # noqa
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/joblib/executor.py
ADDED
|
@@ -0,0 +1,117 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Utility function to construct a loky.ReusableExecutor with custom pickler.
|
| 2 |
+
|
| 3 |
+
This module provides efficient ways of working with data stored in
|
| 4 |
+
shared memory with numpy.memmap arrays without inducing any memory
|
| 5 |
+
copy between the parent and child processes.
|
| 6 |
+
"""
|
| 7 |
+
# Author: Thomas Moreau <thomas.moreau.2010@gmail.com>
|
| 8 |
+
# Copyright: 2017, Thomas Moreau
|
| 9 |
+
# License: BSD 3 clause
|
| 10 |
+
|
| 11 |
+
from ._memmapping_reducer import get_memmapping_reducers
|
| 12 |
+
from ._memmapping_reducer import TemporaryResourcesManager
|
| 13 |
+
from .externals.loky.reusable_executor import _ReusablePoolExecutor
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
_executor_args = None
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def get_memmapping_executor(n_jobs, **kwargs):
|
| 20 |
+
return MemmappingExecutor.get_memmapping_executor(n_jobs, **kwargs)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class MemmappingExecutor(_ReusablePoolExecutor):
|
| 24 |
+
|
| 25 |
+
@classmethod
|
| 26 |
+
def get_memmapping_executor(cls, n_jobs, timeout=300, initializer=None,
|
| 27 |
+
initargs=(), env=None, temp_folder=None,
|
| 28 |
+
context_id=None, **backend_args):
|
| 29 |
+
"""Factory for ReusableExecutor with automatic memmapping for large
|
| 30 |
+
numpy arrays.
|
| 31 |
+
"""
|
| 32 |
+
global _executor_args
|
| 33 |
+
# Check if we can reuse the executor here instead of deferring the test
|
| 34 |
+
# to loky as the reducers are objects that changes at each call.
|
| 35 |
+
executor_args = backend_args.copy()
|
| 36 |
+
executor_args.update(env if env else {})
|
| 37 |
+
executor_args.update(dict(
|
| 38 |
+
timeout=timeout, initializer=initializer, initargs=initargs))
|
| 39 |
+
reuse = _executor_args is None or _executor_args == executor_args
|
| 40 |
+
_executor_args = executor_args
|
| 41 |
+
|
| 42 |
+
manager = TemporaryResourcesManager(temp_folder)
|
| 43 |
+
|
| 44 |
+
# reducers access the temporary folder in which to store temporary
|
| 45 |
+
# pickles through a call to manager.resolve_temp_folder_name. resolving
|
| 46 |
+
# the folder name dynamically is useful to use different folders across
|
| 47 |
+
# calls of a same reusable executor
|
| 48 |
+
job_reducers, result_reducers = get_memmapping_reducers(
|
| 49 |
+
unlink_on_gc_collect=True,
|
| 50 |
+
temp_folder_resolver=manager.resolve_temp_folder_name,
|
| 51 |
+
**backend_args)
|
| 52 |
+
_executor, executor_is_reused = super().get_reusable_executor(
|
| 53 |
+
n_jobs, job_reducers=job_reducers, result_reducers=result_reducers,
|
| 54 |
+
reuse=reuse, timeout=timeout, initializer=initializer,
|
| 55 |
+
initargs=initargs, env=env
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
if not executor_is_reused:
|
| 59 |
+
# Only set a _temp_folder_manager for new executors. Reused
|
| 60 |
+
# executors already have a _temporary_folder_manager that must not
|
| 61 |
+
# be re-assigned like that because it is referenced in various
|
| 62 |
+
# places in the reducing machinery of the executor.
|
| 63 |
+
_executor._temp_folder_manager = manager
|
| 64 |
+
|
| 65 |
+
if context_id is not None:
|
| 66 |
+
# Only register the specified context once we know which manager
|
| 67 |
+
# the current executor is using, in order to not register an atexit
|
| 68 |
+
# finalizer twice for the same folder.
|
| 69 |
+
_executor._temp_folder_manager.register_new_context(context_id)
|
| 70 |
+
|
| 71 |
+
return _executor
|
| 72 |
+
|
| 73 |
+
def terminate(self, kill_workers=False):
|
| 74 |
+
|
| 75 |
+
self.shutdown(kill_workers=kill_workers)
|
| 76 |
+
|
| 77 |
+
# When workers are killed in a brutal manner, they cannot execute the
|
| 78 |
+
# finalizer of their shared memmaps. The refcount of those memmaps may
|
| 79 |
+
# be off by an unknown number, so instead of decref'ing them, we force
|
| 80 |
+
# delete the whole temporary folder, and unregister them. There is no
|
| 81 |
+
# risk of PermissionError at folder deletion because at this
|
| 82 |
+
# point, all child processes are dead, so all references to temporary
|
| 83 |
+
# memmaps are closed. Otherwise, just try to delete as much as possible
|
| 84 |
+
# with allow_non_empty=True but if we can't, it will be clean up later
|
| 85 |
+
# on by the resource_tracker.
|
| 86 |
+
with self._submit_resize_lock:
|
| 87 |
+
self._temp_folder_manager._clean_temporary_resources(
|
| 88 |
+
force=kill_workers, allow_non_empty=True
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
@property
|
| 92 |
+
def _temp_folder(self):
|
| 93 |
+
# Legacy property in tests. could be removed if we refactored the
|
| 94 |
+
# memmapping tests. SHOULD ONLY BE USED IN TESTS!
|
| 95 |
+
# We cache this property because it is called late in the tests - at
|
| 96 |
+
# this point, all context have been unregistered, and
|
| 97 |
+
# resolve_temp_folder_name raises an error.
|
| 98 |
+
if getattr(self, '_cached_temp_folder', None) is not None:
|
| 99 |
+
return self._cached_temp_folder
|
| 100 |
+
else:
|
| 101 |
+
self._cached_temp_folder = self._temp_folder_manager.resolve_temp_folder_name() # noqa
|
| 102 |
+
return self._cached_temp_folder
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
class _TestingMemmappingExecutor(MemmappingExecutor):
|
| 106 |
+
"""Wrapper around ReusableExecutor to ease memmapping testing with Pool
|
| 107 |
+
and Executor. This is only for testing purposes.
|
| 108 |
+
|
| 109 |
+
"""
|
| 110 |
+
def apply_async(self, func, args):
|
| 111 |
+
"""Schedule a func to be run"""
|
| 112 |
+
future = self.submit(func, *args)
|
| 113 |
+
future.get = future.result
|
| 114 |
+
return future
|
| 115 |
+
|
| 116 |
+
def map(self, f, *args):
|
| 117 |
+
return list(super().map(f, *args))
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/joblib/hashing.py
ADDED
|
@@ -0,0 +1,265 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Fast cryptographic hash of Python objects, with a special case for fast
|
| 3 |
+
hashing of numpy arrays.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
|
| 7 |
+
# Copyright (c) 2009 Gael Varoquaux
|
| 8 |
+
# License: BSD Style, 3 clauses.
|
| 9 |
+
|
| 10 |
+
import pickle
|
| 11 |
+
import hashlib
|
| 12 |
+
import sys
|
| 13 |
+
import types
|
| 14 |
+
import struct
|
| 15 |
+
import io
|
| 16 |
+
import decimal
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
Pickler = pickle._Pickler
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class _ConsistentSet(object):
|
| 23 |
+
""" Class used to ensure the hash of Sets is preserved
|
| 24 |
+
whatever the order of its items.
|
| 25 |
+
"""
|
| 26 |
+
def __init__(self, set_sequence):
|
| 27 |
+
# Forces order of elements in set to ensure consistent hash.
|
| 28 |
+
try:
|
| 29 |
+
# Trying first to order the set assuming the type of elements is
|
| 30 |
+
# consistent and orderable.
|
| 31 |
+
# This fails on python 3 when elements are unorderable
|
| 32 |
+
# but we keep it in a try as it's faster.
|
| 33 |
+
self._sequence = sorted(set_sequence)
|
| 34 |
+
except (TypeError, decimal.InvalidOperation):
|
| 35 |
+
# If elements are unorderable, sorting them using their hash.
|
| 36 |
+
# This is slower but works in any case.
|
| 37 |
+
self._sequence = sorted((hash(e) for e in set_sequence))
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class _MyHash(object):
|
| 41 |
+
""" Class used to hash objects that won't normally pickle """
|
| 42 |
+
|
| 43 |
+
def __init__(self, *args):
|
| 44 |
+
self.args = args
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class Hasher(Pickler):
|
| 48 |
+
""" A subclass of pickler, to do cryptographic hashing, rather than
|
| 49 |
+
pickling.
|
| 50 |
+
"""
|
| 51 |
+
|
| 52 |
+
def __init__(self, hash_name='md5'):
|
| 53 |
+
self.stream = io.BytesIO()
|
| 54 |
+
# By default we want a pickle protocol that only changes with
|
| 55 |
+
# the major python version and not the minor one
|
| 56 |
+
protocol = 3
|
| 57 |
+
Pickler.__init__(self, self.stream, protocol=protocol)
|
| 58 |
+
# Initialise the hash obj
|
| 59 |
+
self._hash = hashlib.new(hash_name)
|
| 60 |
+
|
| 61 |
+
def hash(self, obj, return_digest=True):
|
| 62 |
+
try:
|
| 63 |
+
self.dump(obj)
|
| 64 |
+
except pickle.PicklingError as e:
|
| 65 |
+
e.args += ('PicklingError while hashing %r: %r' % (obj, e),)
|
| 66 |
+
raise
|
| 67 |
+
dumps = self.stream.getvalue()
|
| 68 |
+
self._hash.update(dumps)
|
| 69 |
+
if return_digest:
|
| 70 |
+
return self._hash.hexdigest()
|
| 71 |
+
|
| 72 |
+
def save(self, obj):
|
| 73 |
+
if isinstance(obj, (types.MethodType, type({}.pop))):
|
| 74 |
+
# the Pickler cannot pickle instance methods; here we decompose
|
| 75 |
+
# them into components that make them uniquely identifiable
|
| 76 |
+
if hasattr(obj, '__func__'):
|
| 77 |
+
func_name = obj.__func__.__name__
|
| 78 |
+
else:
|
| 79 |
+
func_name = obj.__name__
|
| 80 |
+
inst = obj.__self__
|
| 81 |
+
if type(inst) is type(pickle):
|
| 82 |
+
obj = _MyHash(func_name, inst.__name__)
|
| 83 |
+
elif inst is None:
|
| 84 |
+
# type(None) or type(module) do not pickle
|
| 85 |
+
obj = _MyHash(func_name, inst)
|
| 86 |
+
else:
|
| 87 |
+
cls = obj.__self__.__class__
|
| 88 |
+
obj = _MyHash(func_name, inst, cls)
|
| 89 |
+
Pickler.save(self, obj)
|
| 90 |
+
|
| 91 |
+
def memoize(self, obj):
|
| 92 |
+
# We want hashing to be sensitive to value instead of reference.
|
| 93 |
+
# For example we want ['aa', 'aa'] and ['aa', 'aaZ'[:2]]
|
| 94 |
+
# to hash to the same value and that's why we disable memoization
|
| 95 |
+
# for strings
|
| 96 |
+
if isinstance(obj, (bytes, str)):
|
| 97 |
+
return
|
| 98 |
+
Pickler.memoize(self, obj)
|
| 99 |
+
|
| 100 |
+
# The dispatch table of the pickler is not accessible in Python
|
| 101 |
+
# 3, as these lines are only bugware for IPython, we skip them.
|
| 102 |
+
def save_global(self, obj, name=None, pack=struct.pack):
|
| 103 |
+
# We have to override this method in order to deal with objects
|
| 104 |
+
# defined interactively in IPython that are not injected in
|
| 105 |
+
# __main__
|
| 106 |
+
kwargs = dict(name=name, pack=pack)
|
| 107 |
+
del kwargs['pack']
|
| 108 |
+
try:
|
| 109 |
+
Pickler.save_global(self, obj, **kwargs)
|
| 110 |
+
except pickle.PicklingError:
|
| 111 |
+
Pickler.save_global(self, obj, **kwargs)
|
| 112 |
+
module = getattr(obj, "__module__", None)
|
| 113 |
+
if module == '__main__':
|
| 114 |
+
my_name = name
|
| 115 |
+
if my_name is None:
|
| 116 |
+
my_name = obj.__name__
|
| 117 |
+
mod = sys.modules[module]
|
| 118 |
+
if not hasattr(mod, my_name):
|
| 119 |
+
# IPython doesn't inject the variables define
|
| 120 |
+
# interactively in __main__
|
| 121 |
+
setattr(mod, my_name, obj)
|
| 122 |
+
|
| 123 |
+
dispatch = Pickler.dispatch.copy()
|
| 124 |
+
# builtin
|
| 125 |
+
dispatch[type(len)] = save_global
|
| 126 |
+
# type
|
| 127 |
+
dispatch[type(object)] = save_global
|
| 128 |
+
# classobj
|
| 129 |
+
dispatch[type(Pickler)] = save_global
|
| 130 |
+
# function
|
| 131 |
+
dispatch[type(pickle.dump)] = save_global
|
| 132 |
+
|
| 133 |
+
def _batch_setitems(self, items):
|
| 134 |
+
# forces order of keys in dict to ensure consistent hash.
|
| 135 |
+
try:
|
| 136 |
+
# Trying first to compare dict assuming the type of keys is
|
| 137 |
+
# consistent and orderable.
|
| 138 |
+
# This fails on python 3 when keys are unorderable
|
| 139 |
+
# but we keep it in a try as it's faster.
|
| 140 |
+
Pickler._batch_setitems(self, iter(sorted(items)))
|
| 141 |
+
except TypeError:
|
| 142 |
+
# If keys are unorderable, sorting them using their hash. This is
|
| 143 |
+
# slower but works in any case.
|
| 144 |
+
Pickler._batch_setitems(self, iter(sorted((hash(k), v)
|
| 145 |
+
for k, v in items)))
|
| 146 |
+
|
| 147 |
+
def save_set(self, set_items):
|
| 148 |
+
# forces order of items in Set to ensure consistent hash
|
| 149 |
+
Pickler.save(self, _ConsistentSet(set_items))
|
| 150 |
+
|
| 151 |
+
dispatch[type(set())] = save_set
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
class NumpyHasher(Hasher):
|
| 155 |
+
""" Special case the hasher for when numpy is loaded.
|
| 156 |
+
"""
|
| 157 |
+
|
| 158 |
+
def __init__(self, hash_name='md5', coerce_mmap=False):
|
| 159 |
+
"""
|
| 160 |
+
Parameters
|
| 161 |
+
----------
|
| 162 |
+
hash_name: string
|
| 163 |
+
The hash algorithm to be used
|
| 164 |
+
coerce_mmap: boolean
|
| 165 |
+
Make no difference between np.memmap and np.ndarray
|
| 166 |
+
objects.
|
| 167 |
+
"""
|
| 168 |
+
self.coerce_mmap = coerce_mmap
|
| 169 |
+
Hasher.__init__(self, hash_name=hash_name)
|
| 170 |
+
# delayed import of numpy, to avoid tight coupling
|
| 171 |
+
import numpy as np
|
| 172 |
+
self.np = np
|
| 173 |
+
if hasattr(np, 'getbuffer'):
|
| 174 |
+
self._getbuffer = np.getbuffer
|
| 175 |
+
else:
|
| 176 |
+
self._getbuffer = memoryview
|
| 177 |
+
|
| 178 |
+
def save(self, obj):
|
| 179 |
+
""" Subclass the save method, to hash ndarray subclass, rather
|
| 180 |
+
than pickling them. Off course, this is a total abuse of
|
| 181 |
+
the Pickler class.
|
| 182 |
+
"""
|
| 183 |
+
if isinstance(obj, self.np.ndarray) and not obj.dtype.hasobject:
|
| 184 |
+
# Compute a hash of the object
|
| 185 |
+
# The update function of the hash requires a c_contiguous buffer.
|
| 186 |
+
if obj.shape == ():
|
| 187 |
+
# 0d arrays need to be flattened because viewing them as bytes
|
| 188 |
+
# raises a ValueError exception.
|
| 189 |
+
obj_c_contiguous = obj.flatten()
|
| 190 |
+
elif obj.flags.c_contiguous:
|
| 191 |
+
obj_c_contiguous = obj
|
| 192 |
+
elif obj.flags.f_contiguous:
|
| 193 |
+
obj_c_contiguous = obj.T
|
| 194 |
+
else:
|
| 195 |
+
# Cater for non-single-segment arrays: this creates a
|
| 196 |
+
# copy, and thus alleviates this issue.
|
| 197 |
+
# XXX: There might be a more efficient way of doing this
|
| 198 |
+
obj_c_contiguous = obj.flatten()
|
| 199 |
+
|
| 200 |
+
# memoryview is not supported for some dtypes, e.g. datetime64, see
|
| 201 |
+
# https://github.com/numpy/numpy/issues/4983. The
|
| 202 |
+
# workaround is to view the array as bytes before
|
| 203 |
+
# taking the memoryview.
|
| 204 |
+
self._hash.update(
|
| 205 |
+
self._getbuffer(obj_c_contiguous.view(self.np.uint8)))
|
| 206 |
+
|
| 207 |
+
# We store the class, to be able to distinguish between
|
| 208 |
+
# Objects with the same binary content, but different
|
| 209 |
+
# classes.
|
| 210 |
+
if self.coerce_mmap and isinstance(obj, self.np.memmap):
|
| 211 |
+
# We don't make the difference between memmap and
|
| 212 |
+
# normal ndarrays, to be able to reload previously
|
| 213 |
+
# computed results with memmap.
|
| 214 |
+
klass = self.np.ndarray
|
| 215 |
+
else:
|
| 216 |
+
klass = obj.__class__
|
| 217 |
+
# We also return the dtype and the shape, to distinguish
|
| 218 |
+
# different views on the same data with different dtypes.
|
| 219 |
+
|
| 220 |
+
# The object will be pickled by the pickler hashed at the end.
|
| 221 |
+
obj = (klass, ('HASHED', obj.dtype, obj.shape, obj.strides))
|
| 222 |
+
elif isinstance(obj, self.np.dtype):
|
| 223 |
+
# numpy.dtype consistent hashing is tricky to get right. This comes
|
| 224 |
+
# from the fact that atomic np.dtype objects are interned:
|
| 225 |
+
# ``np.dtype('f4') is np.dtype('f4')``. The situation is
|
| 226 |
+
# complicated by the fact that this interning does not resist a
|
| 227 |
+
# simple pickle.load/dump roundtrip:
|
| 228 |
+
# ``pickle.loads(pickle.dumps(np.dtype('f4'))) is not
|
| 229 |
+
# np.dtype('f4') Because pickle relies on memoization during
|
| 230 |
+
# pickling, it is easy to
|
| 231 |
+
# produce different hashes for seemingly identical objects, such as
|
| 232 |
+
# ``[np.dtype('f4'), np.dtype('f4')]``
|
| 233 |
+
# and ``[np.dtype('f4'), pickle.loads(pickle.dumps('f4'))]``.
|
| 234 |
+
# To prevent memoization from interfering with hashing, we isolate
|
| 235 |
+
# the serialization (and thus the pickle memoization) of each dtype
|
| 236 |
+
# using each time a different ``pickle.dumps`` call unrelated to
|
| 237 |
+
# the current Hasher instance.
|
| 238 |
+
self._hash.update("_HASHED_DTYPE".encode('utf-8'))
|
| 239 |
+
self._hash.update(pickle.dumps(obj))
|
| 240 |
+
return
|
| 241 |
+
Hasher.save(self, obj)
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
def hash(obj, hash_name='md5', coerce_mmap=False):
|
| 245 |
+
""" Quick calculation of a hash to identify uniquely Python objects
|
| 246 |
+
containing numpy arrays.
|
| 247 |
+
|
| 248 |
+
Parameters
|
| 249 |
+
----------
|
| 250 |
+
hash_name: 'md5' or 'sha1'
|
| 251 |
+
Hashing algorithm used. sha1 is supposedly safer, but md5 is
|
| 252 |
+
faster.
|
| 253 |
+
coerce_mmap: boolean
|
| 254 |
+
Make no difference between np.memmap and np.ndarray
|
| 255 |
+
"""
|
| 256 |
+
valid_hash_names = ('md5', 'sha1')
|
| 257 |
+
if hash_name not in valid_hash_names:
|
| 258 |
+
raise ValueError("Valid options for 'hash_name' are {}. "
|
| 259 |
+
"Got hash_name={!r} instead."
|
| 260 |
+
.format(valid_hash_names, hash_name))
|
| 261 |
+
if 'numpy' in sys.modules:
|
| 262 |
+
hasher = NumpyHasher(hash_name=hash_name, coerce_mmap=coerce_mmap)
|
| 263 |
+
else:
|
| 264 |
+
hasher = Hasher(hash_name=hash_name)
|
| 265 |
+
return hasher.hash(obj)
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/joblib/logger.py
ADDED
|
@@ -0,0 +1,162 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Helpers for logging.
|
| 3 |
+
|
| 4 |
+
This module needs much love to become useful.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
|
| 8 |
+
# Copyright (c) 2008 Gael Varoquaux
|
| 9 |
+
# License: BSD Style, 3 clauses.
|
| 10 |
+
|
| 11 |
+
from __future__ import print_function
|
| 12 |
+
|
| 13 |
+
import time
|
| 14 |
+
import sys
|
| 15 |
+
import os
|
| 16 |
+
import shutil
|
| 17 |
+
import logging
|
| 18 |
+
import pprint
|
| 19 |
+
|
| 20 |
+
from .disk import mkdirp
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def _squeeze_time(t):
|
| 24 |
+
"""Remove .1s to the time under Windows: this is the time it take to
|
| 25 |
+
stat files. This is needed to make results similar to timings under
|
| 26 |
+
Unix, for tests
|
| 27 |
+
"""
|
| 28 |
+
if sys.platform.startswith('win'):
|
| 29 |
+
return max(0, t - .1)
|
| 30 |
+
else:
|
| 31 |
+
return t
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def format_time(t):
|
| 35 |
+
t = _squeeze_time(t)
|
| 36 |
+
return "%.1fs, %.1fmin" % (t, t / 60.)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def short_format_time(t):
|
| 40 |
+
t = _squeeze_time(t)
|
| 41 |
+
if t > 60:
|
| 42 |
+
return "%4.1fmin" % (t / 60.)
|
| 43 |
+
else:
|
| 44 |
+
return " %5.1fs" % (t)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def pformat(obj, indent=0, depth=3):
|
| 48 |
+
if 'numpy' in sys.modules:
|
| 49 |
+
import numpy as np
|
| 50 |
+
print_options = np.get_printoptions()
|
| 51 |
+
np.set_printoptions(precision=6, threshold=64, edgeitems=1)
|
| 52 |
+
else:
|
| 53 |
+
print_options = None
|
| 54 |
+
out = pprint.pformat(obj, depth=depth, indent=indent)
|
| 55 |
+
if print_options:
|
| 56 |
+
np.set_printoptions(**print_options)
|
| 57 |
+
return out
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
###############################################################################
|
| 61 |
+
# class `Logger`
|
| 62 |
+
###############################################################################
|
| 63 |
+
class Logger(object):
|
| 64 |
+
""" Base class for logging messages.
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
def __init__(self, depth=3, name=None):
|
| 68 |
+
"""
|
| 69 |
+
Parameters
|
| 70 |
+
----------
|
| 71 |
+
depth: int, optional
|
| 72 |
+
The depth of objects printed.
|
| 73 |
+
name: str, optional
|
| 74 |
+
The namespace to log to. If None, defaults to joblib.
|
| 75 |
+
"""
|
| 76 |
+
self.depth = depth
|
| 77 |
+
self._name = name if name else 'joblib'
|
| 78 |
+
|
| 79 |
+
def warn(self, msg):
|
| 80 |
+
logging.getLogger(self._name).warning("[%s]: %s" % (self, msg))
|
| 81 |
+
|
| 82 |
+
def info(self, msg):
|
| 83 |
+
logging.info("[%s]: %s" % (self, msg))
|
| 84 |
+
|
| 85 |
+
def debug(self, msg):
|
| 86 |
+
# XXX: This conflicts with the debug flag used in children class
|
| 87 |
+
logging.getLogger(self._name).debug("[%s]: %s" % (self, msg))
|
| 88 |
+
|
| 89 |
+
def format(self, obj, indent=0):
|
| 90 |
+
"""Return the formatted representation of the object."""
|
| 91 |
+
return pformat(obj, indent=indent, depth=self.depth)
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
###############################################################################
|
| 95 |
+
# class `PrintTime`
|
| 96 |
+
###############################################################################
|
| 97 |
+
class PrintTime(object):
|
| 98 |
+
""" Print and log messages while keeping track of time.
|
| 99 |
+
"""
|
| 100 |
+
|
| 101 |
+
def __init__(self, logfile=None, logdir=None):
|
| 102 |
+
if logfile is not None and logdir is not None:
|
| 103 |
+
raise ValueError('Cannot specify both logfile and logdir')
|
| 104 |
+
# XXX: Need argument docstring
|
| 105 |
+
self.last_time = time.time()
|
| 106 |
+
self.start_time = self.last_time
|
| 107 |
+
if logdir is not None:
|
| 108 |
+
logfile = os.path.join(logdir, 'joblib.log')
|
| 109 |
+
self.logfile = logfile
|
| 110 |
+
if logfile is not None:
|
| 111 |
+
mkdirp(os.path.dirname(logfile))
|
| 112 |
+
if os.path.exists(logfile):
|
| 113 |
+
# Rotate the logs
|
| 114 |
+
for i in range(1, 9):
|
| 115 |
+
try:
|
| 116 |
+
shutil.move(logfile + '.%i' % i,
|
| 117 |
+
logfile + '.%i' % (i + 1))
|
| 118 |
+
except: # noqa: E722
|
| 119 |
+
"No reason failing here"
|
| 120 |
+
# Use a copy rather than a move, so that a process
|
| 121 |
+
# monitoring this file does not get lost.
|
| 122 |
+
try:
|
| 123 |
+
shutil.copy(logfile, logfile + '.1')
|
| 124 |
+
except: # noqa: E722
|
| 125 |
+
"No reason failing here"
|
| 126 |
+
try:
|
| 127 |
+
with open(logfile, 'w') as logfile:
|
| 128 |
+
logfile.write('\nLogging joblib python script\n')
|
| 129 |
+
logfile.write('\n---%s---\n' % time.ctime(self.last_time))
|
| 130 |
+
except: # noqa: E722
|
| 131 |
+
""" Multiprocessing writing to files can create race
|
| 132 |
+
conditions. Rather fail silently than crash the
|
| 133 |
+
computation.
|
| 134 |
+
"""
|
| 135 |
+
# XXX: We actually need a debug flag to disable this
|
| 136 |
+
# silent failure.
|
| 137 |
+
|
| 138 |
+
def __call__(self, msg='', total=False):
|
| 139 |
+
""" Print the time elapsed between the last call and the current
|
| 140 |
+
call, with an optional message.
|
| 141 |
+
"""
|
| 142 |
+
if not total:
|
| 143 |
+
time_lapse = time.time() - self.last_time
|
| 144 |
+
full_msg = "%s: %s" % (msg, format_time(time_lapse))
|
| 145 |
+
else:
|
| 146 |
+
# FIXME: Too much logic duplicated
|
| 147 |
+
time_lapse = time.time() - self.start_time
|
| 148 |
+
full_msg = "%s: %.2fs, %.1f min" % (msg, time_lapse,
|
| 149 |
+
time_lapse / 60)
|
| 150 |
+
print(full_msg, file=sys.stderr)
|
| 151 |
+
if self.logfile is not None:
|
| 152 |
+
try:
|
| 153 |
+
with open(self.logfile, 'a') as f:
|
| 154 |
+
print(full_msg, file=f)
|
| 155 |
+
except: # noqa: E722
|
| 156 |
+
""" Multiprocessing writing to files can create race
|
| 157 |
+
conditions. Rather fail silently than crash the
|
| 158 |
+
calculation.
|
| 159 |
+
"""
|
| 160 |
+
# XXX: We actually need a debug flag to disable this
|
| 161 |
+
# silent failure.
|
| 162 |
+
self.last_time = time.time()
|
material/dataset/xpsdeeplearning/.venv/lib/python3.10/site-packages/joblib/memory.py
ADDED
|
@@ -0,0 +1,1172 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
A context object for caching a function's return value each time it
|
| 3 |
+
is called with the same input arguments.
|
| 4 |
+
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
|
| 8 |
+
# Copyright (c) 2009 Gael Varoquaux
|
| 9 |
+
# License: BSD Style, 3 clauses.
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
import asyncio
|
| 13 |
+
import datetime
|
| 14 |
+
import functools
|
| 15 |
+
import inspect
|
| 16 |
+
import logging
|
| 17 |
+
import os
|
| 18 |
+
import pathlib
|
| 19 |
+
import pydoc
|
| 20 |
+
import re
|
| 21 |
+
import textwrap
|
| 22 |
+
import time
|
| 23 |
+
import tokenize
|
| 24 |
+
import traceback
|
| 25 |
+
import warnings
|
| 26 |
+
import weakref
|
| 27 |
+
|
| 28 |
+
from . import hashing
|
| 29 |
+
from ._store_backends import CacheWarning # noqa
|
| 30 |
+
from ._store_backends import FileSystemStoreBackend, StoreBackendBase
|
| 31 |
+
from .func_inspect import (filter_args, format_call, format_signature,
|
| 32 |
+
get_func_code, get_func_name)
|
| 33 |
+
from .logger import Logger, format_time, pformat
|
| 34 |
+
|
| 35 |
+
FIRST_LINE_TEXT = "# first line:"
|
| 36 |
+
|
| 37 |
+
# TODO: The following object should have a data store object as a sub
|
| 38 |
+
# object, and the interface to persist and query should be separated in
|
| 39 |
+
# the data store.
|
| 40 |
+
#
|
| 41 |
+
# This would enable creating 'Memory' objects with a different logic for
|
| 42 |
+
# pickling that would simply span a MemorizedFunc with the same
|
| 43 |
+
# store (or do we want to copy it to avoid cross-talks?), for instance to
|
| 44 |
+
# implement HDF5 pickling.
|
| 45 |
+
|
| 46 |
+
# TODO: Same remark for the logger, and probably use the Python logging
|
| 47 |
+
# mechanism.
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def extract_first_line(func_code):
|
| 51 |
+
""" Extract the first line information from the function code
|
| 52 |
+
text if available.
|
| 53 |
+
"""
|
| 54 |
+
if func_code.startswith(FIRST_LINE_TEXT):
|
| 55 |
+
func_code = func_code.split('\n')
|
| 56 |
+
first_line = int(func_code[0][len(FIRST_LINE_TEXT):])
|
| 57 |
+
func_code = '\n'.join(func_code[1:])
|
| 58 |
+
else:
|
| 59 |
+
first_line = -1
|
| 60 |
+
return func_code, first_line
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class JobLibCollisionWarning(UserWarning):
|
| 64 |
+
""" Warn that there might be a collision between names of functions.
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
_STORE_BACKENDS = {'local': FileSystemStoreBackend}
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def register_store_backend(backend_name, backend):
|
| 72 |
+
"""Extend available store backends.
|
| 73 |
+
|
| 74 |
+
The Memory, MemorizeResult and MemorizeFunc objects are designed to be
|
| 75 |
+
agnostic to the type of store used behind. By default, the local file
|
| 76 |
+
system is used but this function gives the possibility to extend joblib's
|
| 77 |
+
memory pattern with other types of storage such as cloud storage (S3, GCS,
|
| 78 |
+
OpenStack, HadoopFS, etc) or blob DBs.
|
| 79 |
+
|
| 80 |
+
Parameters
|
| 81 |
+
----------
|
| 82 |
+
backend_name: str
|
| 83 |
+
The name identifying the store backend being registered. For example,
|
| 84 |
+
'local' is used with FileSystemStoreBackend.
|
| 85 |
+
backend: StoreBackendBase subclass
|
| 86 |
+
The name of a class that implements the StoreBackendBase interface.
|
| 87 |
+
|
| 88 |
+
"""
|
| 89 |
+
if not isinstance(backend_name, str):
|
| 90 |
+
raise ValueError("Store backend name should be a string, "
|
| 91 |
+
"'{0}' given.".format(backend_name))
|
| 92 |
+
if backend is None or not issubclass(backend, StoreBackendBase):
|
| 93 |
+
raise ValueError("Store backend should inherit "
|
| 94 |
+
"StoreBackendBase, "
|
| 95 |
+
"'{0}' given.".format(backend))
|
| 96 |
+
|
| 97 |
+
_STORE_BACKENDS[backend_name] = backend
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
def _store_backend_factory(backend, location, verbose=0, backend_options=None):
|
| 101 |
+
"""Return the correct store object for the given location."""
|
| 102 |
+
if backend_options is None:
|
| 103 |
+
backend_options = {}
|
| 104 |
+
|
| 105 |
+
if isinstance(location, pathlib.Path):
|
| 106 |
+
location = str(location)
|
| 107 |
+
|
| 108 |
+
if isinstance(location, StoreBackendBase):
|
| 109 |
+
return location
|
| 110 |
+
elif isinstance(location, str):
|
| 111 |
+
obj = None
|
| 112 |
+
location = os.path.expanduser(location)
|
| 113 |
+
# The location is not a local file system, we look in the
|
| 114 |
+
# registered backends if there's one matching the given backend
|
| 115 |
+
# name.
|
| 116 |
+
for backend_key, backend_obj in _STORE_BACKENDS.items():
|
| 117 |
+
if backend == backend_key:
|
| 118 |
+
obj = backend_obj()
|
| 119 |
+
|
| 120 |
+
# By default, we assume the FileSystemStoreBackend can be used if no
|
| 121 |
+
# matching backend could be found.
|
| 122 |
+
if obj is None:
|
| 123 |
+
raise TypeError('Unknown location {0} or backend {1}'.format(
|
| 124 |
+
location, backend))
|
| 125 |
+
|
| 126 |
+
# The store backend is configured with the extra named parameters,
|
| 127 |
+
# some of them are specific to the underlying store backend.
|
| 128 |
+
obj.configure(location, verbose=verbose,
|
| 129 |
+
backend_options=backend_options)
|
| 130 |
+
return obj
|
| 131 |
+
elif location is not None:
|
| 132 |
+
warnings.warn(
|
| 133 |
+
"Instantiating a backend using a {} as a location is not "
|
| 134 |
+
"supported by joblib. Returning None instead.".format(
|
| 135 |
+
location.__class__.__name__), UserWarning)
|
| 136 |
+
|
| 137 |
+
return None
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
def _build_func_identifier(func):
|
| 141 |
+
"""Build a roughly unique identifier for the cached function."""
|
| 142 |
+
modules, funcname = get_func_name(func)
|
| 143 |
+
# We reuse historical fs-like way of building a function identifier
|
| 144 |
+
return os.path.join(*modules, funcname)
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
# An in-memory store to avoid looking at the disk-based function
|
| 148 |
+
# source code to check if a function definition has changed
|
| 149 |
+
_FUNCTION_HASHES = weakref.WeakKeyDictionary()
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
###############################################################################
|
| 153 |
+
# class `MemorizedResult`
|
| 154 |
+
###############################################################################
|
| 155 |
+
class MemorizedResult(Logger):
|
| 156 |
+
"""Object representing a cached value.
|
| 157 |
+
|
| 158 |
+
Attributes
|
| 159 |
+
----------
|
| 160 |
+
location: str
|
| 161 |
+
The location of joblib cache. Depends on the store backend used.
|
| 162 |
+
|
| 163 |
+
func: function or str
|
| 164 |
+
function whose output is cached. The string case is intended only for
|
| 165 |
+
instantiation based on the output of repr() on another instance.
|
| 166 |
+
(namely eval(repr(memorized_instance)) works).
|
| 167 |
+
|
| 168 |
+
argument_hash: str
|
| 169 |
+
hash of the function arguments.
|
| 170 |
+
|
| 171 |
+
backend: str
|
| 172 |
+
Type of store backend for reading/writing cache files.
|
| 173 |
+
Default is 'local'.
|
| 174 |
+
|
| 175 |
+
mmap_mode: {None, 'r+', 'r', 'w+', 'c'}
|
| 176 |
+
The memmapping mode used when loading from cache numpy arrays. See
|
| 177 |
+
numpy.load for the meaning of the different values.
|
| 178 |
+
|
| 179 |
+
verbose: int
|
| 180 |
+
verbosity level (0 means no message).
|
| 181 |
+
|
| 182 |
+
timestamp, metadata: string
|
| 183 |
+
for internal use only.
|
| 184 |
+
"""
|
| 185 |
+
def __init__(self, location, call_id, backend='local', mmap_mode=None,
|
| 186 |
+
verbose=0, timestamp=None, metadata=None):
|
| 187 |
+
Logger.__init__(self)
|
| 188 |
+
self._call_id = call_id
|
| 189 |
+
self.store_backend = _store_backend_factory(backend, location,
|
| 190 |
+
verbose=verbose)
|
| 191 |
+
self.mmap_mode = mmap_mode
|
| 192 |
+
|
| 193 |
+
if metadata is not None:
|
| 194 |
+
self.metadata = metadata
|
| 195 |
+
else:
|
| 196 |
+
self.metadata = self.store_backend.get_metadata(self._call_id)
|
| 197 |
+
|
| 198 |
+
self.duration = self.metadata.get('duration', None)
|
| 199 |
+
self.verbose = verbose
|
| 200 |
+
self.timestamp = timestamp
|
| 201 |
+
|
| 202 |
+
@property
|
| 203 |
+
def func(self):
|
| 204 |
+
return self.func_id
|
| 205 |
+
|
| 206 |
+
@property
|
| 207 |
+
def func_id(self):
|
| 208 |
+
return self._call_id[0]
|
| 209 |
+
|
| 210 |
+
@property
|
| 211 |
+
def args_id(self):
|
| 212 |
+
return self._call_id[1]
|
| 213 |
+
|
| 214 |
+
@property
|
| 215 |
+
def argument_hash(self):
|
| 216 |
+
warnings.warn(
|
| 217 |
+
"The 'argument_hash' attribute has been deprecated in version "
|
| 218 |
+
"0.12 and will be removed in version 0.14.\n"
|
| 219 |
+
"Use `args_id` attribute instead.",
|
| 220 |
+
DeprecationWarning, stacklevel=2)
|
| 221 |
+
return self.args_id
|
| 222 |
+
|
| 223 |
+
def get(self):
|
| 224 |
+
"""Read value from cache and return it."""
|
| 225 |
+
try:
|
| 226 |
+
return self.store_backend.load_item(
|
| 227 |
+
self._call_id,
|
| 228 |
+
timestamp=self.timestamp,
|
| 229 |
+
metadata=self.metadata,
|
| 230 |
+
verbose=self.verbose
|
| 231 |
+
)
|
| 232 |
+
except ValueError as exc:
|
| 233 |
+
new_exc = KeyError(
|
| 234 |
+
"Error while trying to load a MemorizedResult's value. "
|
| 235 |
+
"It seems that this folder is corrupted : {}".format(
|
| 236 |
+
os.path.join(self.store_backend.location, *self._call_id)))
|
| 237 |
+
raise new_exc from exc
|
| 238 |
+
|
| 239 |
+
def clear(self):
|
| 240 |
+
"""Clear value from cache"""
|
| 241 |
+
self.store_backend.clear_item(self._call_id)
|
| 242 |
+
|
| 243 |
+
def __repr__(self):
|
| 244 |
+
return '{}(location="{}", func="{}", args_id="{}")'.format(
|
| 245 |
+
self.__class__.__name__, self.store_backend.location,
|
| 246 |
+
*self._call_id
|
| 247 |
+
)
|
| 248 |
+
|
| 249 |
+
def __getstate__(self):
|
| 250 |
+
state = self.__dict__.copy()
|
| 251 |
+
state['timestamp'] = None
|
| 252 |
+
return state
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
class NotMemorizedResult(object):
|
| 256 |
+
"""Class representing an arbitrary value.
|
| 257 |
+
|
| 258 |
+
This class is a replacement for MemorizedResult when there is no cache.
|
| 259 |
+
"""
|
| 260 |
+
__slots__ = ('value', 'valid')
|
| 261 |
+
|
| 262 |
+
def __init__(self, value):
|
| 263 |
+
self.value = value
|
| 264 |
+
self.valid = True
|
| 265 |
+
|
| 266 |
+
def get(self):
|
| 267 |
+
if self.valid:
|
| 268 |
+
return self.value
|
| 269 |
+
else:
|
| 270 |
+
raise KeyError("No value stored.")
|
| 271 |
+
|
| 272 |
+
def clear(self):
|
| 273 |
+
self.valid = False
|
| 274 |
+
self.value = None
|
| 275 |
+
|
| 276 |
+
def __repr__(self):
|
| 277 |
+
if self.valid:
|
| 278 |
+
return ('{class_name}({value})'
|
| 279 |
+
.format(class_name=self.__class__.__name__,
|
| 280 |
+
value=pformat(self.value)))
|
| 281 |
+
else:
|
| 282 |
+
return self.__class__.__name__ + ' with no value'
|
| 283 |
+
|
| 284 |
+
# __getstate__ and __setstate__ are required because of __slots__
|
| 285 |
+
def __getstate__(self):
|
| 286 |
+
return {"valid": self.valid, "value": self.value}
|
| 287 |
+
|
| 288 |
+
def __setstate__(self, state):
|
| 289 |
+
self.valid = state["valid"]
|
| 290 |
+
self.value = state["value"]
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
###############################################################################
|
| 294 |
+
# class `NotMemorizedFunc`
|
| 295 |
+
###############################################################################
|
| 296 |
+
class NotMemorizedFunc(object):
|
| 297 |
+
"""No-op object decorating a function.
|
| 298 |
+
|
| 299 |
+
This class replaces MemorizedFunc when there is no cache. It provides an
|
| 300 |
+
identical API but does not write anything on disk.
|
| 301 |
+
|
| 302 |
+
Attributes
|
| 303 |
+
----------
|
| 304 |
+
func: callable
|
| 305 |
+
Original undecorated function.
|
| 306 |
+
"""
|
| 307 |
+
# Should be a light as possible (for speed)
|
| 308 |
+
def __init__(self, func):
|
| 309 |
+
self.func = func
|
| 310 |
+
|
| 311 |
+
def __call__(self, *args, **kwargs):
|
| 312 |
+
return self.func(*args, **kwargs)
|
| 313 |
+
|
| 314 |
+
def call_and_shelve(self, *args, **kwargs):
|
| 315 |
+
return NotMemorizedResult(self.func(*args, **kwargs))
|
| 316 |
+
|
| 317 |
+
def __repr__(self):
|
| 318 |
+
return '{0}(func={1})'.format(self.__class__.__name__, self.func)
|
| 319 |
+
|
| 320 |
+
def clear(self, warn=True):
|
| 321 |
+
# Argument "warn" is for compatibility with MemorizedFunc.clear
|
| 322 |
+
pass
|
| 323 |
+
|
| 324 |
+
def call(self, *args, **kwargs):
|
| 325 |
+
return self.func(*args, **kwargs), {}
|
| 326 |
+
|
| 327 |
+
def check_call_in_cache(self, *args, **kwargs):
|
| 328 |
+
return False
|
| 329 |
+
|
| 330 |
+
|
| 331 |
+
###############################################################################
|
| 332 |
+
# class `AsyncNotMemorizedFunc`
|
| 333 |
+
###############################################################################
|
| 334 |
+
class AsyncNotMemorizedFunc(NotMemorizedFunc):
|
| 335 |
+
async def call_and_shelve(self, *args, **kwargs):
|
| 336 |
+
return NotMemorizedResult(await self.func(*args, **kwargs))
|
| 337 |
+
|
| 338 |
+
|
| 339 |
+
###############################################################################
|
| 340 |
+
# class `MemorizedFunc`
|
| 341 |
+
###############################################################################
|
| 342 |
+
class MemorizedFunc(Logger):
|
| 343 |
+
"""Callable object decorating a function for caching its return value
|
| 344 |
+
each time it is called.
|
| 345 |
+
|
| 346 |
+
Methods are provided to inspect the cache or clean it.
|
| 347 |
+
|
| 348 |
+
Attributes
|
| 349 |
+
----------
|
| 350 |
+
func: callable
|
| 351 |
+
The original, undecorated, function.
|
| 352 |
+
|
| 353 |
+
location: string
|
| 354 |
+
The location of joblib cache. Depends on the store backend used.
|
| 355 |
+
|
| 356 |
+
backend: str
|
| 357 |
+
Type of store backend for reading/writing cache files.
|
| 358 |
+
Default is 'local', in which case the location is the path to a
|
| 359 |
+
disk storage.
|
| 360 |
+
|
| 361 |
+
ignore: list or None
|
| 362 |
+
List of variable names to ignore when choosing whether to
|
| 363 |
+
recompute.
|
| 364 |
+
|
| 365 |
+
mmap_mode: {None, 'r+', 'r', 'w+', 'c'}
|
| 366 |
+
The memmapping mode used when loading from cache
|
| 367 |
+
numpy arrays. See numpy.load for the meaning of the different
|
| 368 |
+
values.
|
| 369 |
+
|
| 370 |
+
compress: boolean, or integer
|
| 371 |
+
Whether to zip the stored data on disk. If an integer is
|
| 372 |
+
given, it should be between 1 and 9, and sets the amount
|
| 373 |
+
of compression. Note that compressed arrays cannot be
|
| 374 |
+
read by memmapping.
|
| 375 |
+
|
| 376 |
+
verbose: int, optional
|
| 377 |
+
The verbosity flag, controls messages that are issued as
|
| 378 |
+
the function is evaluated.
|
| 379 |
+
|
| 380 |
+
cache_validation_callback: callable, optional
|
| 381 |
+
Callable to check if a result in cache is valid or is to be recomputed.
|
| 382 |
+
When the function is called with arguments for which a cache exists,
|
| 383 |
+
the callback is called with the cache entry's metadata as its sole
|
| 384 |
+
argument. If it returns True, the cached result is returned, else the
|
| 385 |
+
cache for these arguments is cleared and the result is recomputed.
|
| 386 |
+
"""
|
| 387 |
+
# ------------------------------------------------------------------------
|
| 388 |
+
# Public interface
|
| 389 |
+
# ------------------------------------------------------------------------
|
| 390 |
+
|
| 391 |
+
def __init__(self, func, location, backend='local', ignore=None,
|
| 392 |
+
mmap_mode=None, compress=False, verbose=1, timestamp=None,
|
| 393 |
+
cache_validation_callback=None):
|
| 394 |
+
Logger.__init__(self)
|
| 395 |
+
self.mmap_mode = mmap_mode
|
| 396 |
+
self.compress = compress
|
| 397 |
+
self.func = func
|
| 398 |
+
self.cache_validation_callback = cache_validation_callback
|
| 399 |
+
self.func_id = _build_func_identifier(func)
|
| 400 |
+
self.ignore = ignore if ignore is not None else []
|
| 401 |
+
self._verbose = verbose
|
| 402 |
+
|
| 403 |
+
# retrieve store object from backend type and location.
|
| 404 |
+
self.store_backend = _store_backend_factory(backend, location,
|
| 405 |
+
verbose=verbose,
|
| 406 |
+
backend_options=dict(
|
| 407 |
+
compress=compress,
|
| 408 |
+
mmap_mode=mmap_mode),
|
| 409 |
+
)
|
| 410 |
+
if self.store_backend is not None:
|
| 411 |
+
# Create func directory on demand.
|
| 412 |
+
self.store_backend.store_cached_func_code([self.func_id])
|
| 413 |
+
|
| 414 |
+
self.timestamp = timestamp if timestamp is not None else time.time()
|
| 415 |
+
try:
|
| 416 |
+
functools.update_wrapper(self, func)
|
| 417 |
+
except Exception:
|
| 418 |
+
pass # Objects like ufunc don't like that
|
| 419 |
+
if inspect.isfunction(func):
|
| 420 |
+
doc = pydoc.TextDoc().document(func)
|
| 421 |
+
# Remove blank line
|
| 422 |
+
doc = doc.replace('\n', '\n\n', 1)
|
| 423 |
+
# Strip backspace-overprints for compatibility with autodoc
|
| 424 |
+
doc = re.sub('\x08.', '', doc)
|
| 425 |
+
else:
|
| 426 |
+
# Pydoc does a poor job on other objects
|
| 427 |
+
doc = func.__doc__
|
| 428 |
+
self.__doc__ = 'Memoized version of %s' % doc
|
| 429 |
+
|
| 430 |
+
self._func_code_info = None
|
| 431 |
+
self._func_code_id = None
|
| 432 |
+
|
| 433 |
+
def _is_in_cache_and_valid(self, call_id):
|
| 434 |
+
"""Check if the function call is cached and valid for given arguments.
|
| 435 |
+
|
| 436 |
+
- Compare the function code with the one from the cached function,
|
| 437 |
+
asserting if it has changed.
|
| 438 |
+
- Check if the function call is present in the cache.
|
| 439 |
+
- Call `cache_validation_callback` for user define cache validation.
|
| 440 |
+
|
| 441 |
+
Returns True if the function call is in cache and can be used, and
|
| 442 |
+
returns False otherwise.
|
| 443 |
+
"""
|
| 444 |
+
# Check if the code of the function has changed
|
| 445 |
+
if not self._check_previous_func_code(stacklevel=4):
|
| 446 |
+
return False
|
| 447 |
+
|
| 448 |
+
# Check if this specific call is in the cache
|
| 449 |
+
if not self.store_backend.contains_item(call_id):
|
| 450 |
+
return False
|
| 451 |
+
|
| 452 |
+
# Call the user defined cache validation callback
|
| 453 |
+
metadata = self.store_backend.get_metadata(call_id)
|
| 454 |
+
if (self.cache_validation_callback is not None and
|
| 455 |
+
not self.cache_validation_callback(metadata)):
|
| 456 |
+
self.store_backend.clear_item(call_id)
|
| 457 |
+
return False
|
| 458 |
+
|
| 459 |
+
return True
|
| 460 |
+
|
| 461 |
+
def _cached_call(self, args, kwargs, shelving):
|
| 462 |
+
"""Call wrapped function and cache result, or read cache if available.
|
| 463 |
+
|
| 464 |
+
This function returns the wrapped function output or a reference to
|
| 465 |
+
the cached result.
|
| 466 |
+
|
| 467 |
+
Arguments:
|
| 468 |
+
----------
|
| 469 |
+
|
| 470 |
+
args, kwargs: list and dict
|
| 471 |
+
input arguments for wrapped function
|
| 472 |
+
|
| 473 |
+
shelving: bool
|
| 474 |
+
True when called via the call_and_shelve function.
|
| 475 |
+
|
| 476 |
+
|
| 477 |
+
Returns
|
| 478 |
+
-------
|
| 479 |
+
output: Output of the wrapped function if shelving is false, or a
|
| 480 |
+
MemorizedResult reference to the value if shelving is true.
|
| 481 |
+
metadata: dict containing the metadata associated with the call.
|
| 482 |
+
"""
|
| 483 |
+
args_id = self._get_args_id(*args, **kwargs)
|
| 484 |
+
call_id = (self.func_id, args_id)
|
| 485 |
+
_, func_name = get_func_name(self.func)
|
| 486 |
+
func_info = self.store_backend.get_cached_func_info([self.func_id])
|
| 487 |
+
location = func_info['location']
|
| 488 |
+
|
| 489 |
+
if self._verbose >= 20:
|
| 490 |
+
logging.basicConfig(level=logging.INFO)
|
| 491 |
+
_, signature = format_signature(self.func, *args, **kwargs)
|
| 492 |
+
self.info(
|
| 493 |
+
textwrap.dedent(
|
| 494 |
+
f"""
|
| 495 |
+
Querying {func_name} with signature
|
| 496 |
+
{signature}.
|
| 497 |
+
|
| 498 |
+
(argument hash {args_id})
|
| 499 |
+
|
| 500 |
+
The store location is {location}.
|
| 501 |
+
"""
|
| 502 |
+
)
|
| 503 |
+
)
|
| 504 |
+
|
| 505 |
+
# Compare the function code with the previous to see if the
|
| 506 |
+
# function code has changed and check if the results are present in
|
| 507 |
+
# the cache.
|
| 508 |
+
if self._is_in_cache_and_valid(call_id):
|
| 509 |
+
if shelving:
|
| 510 |
+
return self._get_memorized_result(call_id), {}
|
| 511 |
+
|
| 512 |
+
try:
|
| 513 |
+
start_time = time.time()
|
| 514 |
+
output = self._load_item(call_id)
|
| 515 |
+
if self._verbose > 4:
|
| 516 |
+
self._print_duration(time.time() - start_time,
|
| 517 |
+
context='cache loaded ')
|
| 518 |
+
return output, {}
|
| 519 |
+
except Exception:
|
| 520 |
+
# XXX: Should use an exception logger
|
| 521 |
+
_, signature = format_signature(self.func, *args, **kwargs)
|
| 522 |
+
self.warn('Exception while loading results for '
|
| 523 |
+
'{}\n {}'.format(signature, traceback.format_exc()))
|
| 524 |
+
|
| 525 |
+
if self._verbose > 10:
|
| 526 |
+
self.warn(
|
| 527 |
+
f"Computing func {func_name}, argument hash {args_id} "
|
| 528 |
+
f"in location {location}"
|
| 529 |
+
)
|
| 530 |
+
|
| 531 |
+
# Returns the output but not the metadata
|
| 532 |
+
return self._call(call_id, args, kwargs, shelving)
|
| 533 |
+
|
| 534 |
+
@property
|
| 535 |
+
def func_code_info(self):
|
| 536 |
+
# 3-tuple property containing: the function source code, source file,
|
| 537 |
+
# and first line of the code inside the source file
|
| 538 |
+
if hasattr(self.func, '__code__'):
|
| 539 |
+
if self._func_code_id is None:
|
| 540 |
+
self._func_code_id = id(self.func.__code__)
|
| 541 |
+
elif id(self.func.__code__) != self._func_code_id:
|
| 542 |
+
# Be robust to dynamic reassignments of self.func.__code__
|
| 543 |
+
self._func_code_info = None
|
| 544 |
+
|
| 545 |
+
if self._func_code_info is None:
|
| 546 |
+
# Cache the source code of self.func . Provided that get_func_code
|
| 547 |
+
# (which should be called once on self) gets called in the process
|
| 548 |
+
# in which self.func was defined, this caching mechanism prevents
|
| 549 |
+
# undesired cache clearing when the cached function is called in
|
| 550 |
+
# an environment where the introspection utilities get_func_code
|
| 551 |
+
# relies on do not work (typically, in joblib child processes).
|
| 552 |
+
# See #1035 for more info
|
| 553 |
+
# TODO (pierreglaser): do the same with get_func_name?
|
| 554 |
+
self._func_code_info = get_func_code(self.func)
|
| 555 |
+
return self._func_code_info
|
| 556 |
+
|
| 557 |
+
def call_and_shelve(self, *args, **kwargs):
|
| 558 |
+
"""Call wrapped function, cache result and return a reference.
|
| 559 |
+
|
| 560 |
+
This method returns a reference to the cached result instead of the
|
| 561 |
+
result itself. The reference object is small and pickeable, allowing
|
| 562 |
+
to send or store it easily. Call .get() on reference object to get
|
| 563 |
+
result.
|
| 564 |
+
|
| 565 |
+
Returns
|
| 566 |
+
-------
|
| 567 |
+
cached_result: MemorizedResult or NotMemorizedResult
|
| 568 |
+
reference to the value returned by the wrapped function. The
|
| 569 |
+
class "NotMemorizedResult" is used when there is no cache
|
| 570 |
+
activated (e.g. location=None in Memory).
|
| 571 |
+
"""
|
| 572 |
+
# Return the wrapped output, without the metadata
|
| 573 |
+
return self._cached_call(args, kwargs, shelving=True)[0]
|
| 574 |
+
|
| 575 |
+
def __call__(self, *args, **kwargs):
|
| 576 |
+
# Return the output, without the metadata
|
| 577 |
+
return self._cached_call(args, kwargs, shelving=False)[0]
|
| 578 |
+
|
| 579 |
+
def __getstate__(self):
|
| 580 |
+
# Make sure self.func's source is introspected prior to being pickled -
|
| 581 |
+
# code introspection utilities typically do not work inside child
|
| 582 |
+
# processes
|
| 583 |
+
_ = self.func_code_info
|
| 584 |
+
|
| 585 |
+
# We don't store the timestamp when pickling, to avoid the hash
|
| 586 |
+
# depending from it.
|
| 587 |
+
state = self.__dict__.copy()
|
| 588 |
+
state['timestamp'] = None
|
| 589 |
+
|
| 590 |
+
# Invalidate the code id as id(obj) will be different in the child
|
| 591 |
+
state['_func_code_id'] = None
|
| 592 |
+
|
| 593 |
+
return state
|
| 594 |
+
|
| 595 |
+
def check_call_in_cache(self, *args, **kwargs):
|
| 596 |
+
"""Check if function call is in the memory cache.
|
| 597 |
+
|
| 598 |
+
Does not call the function or do any work besides func inspection
|
| 599 |
+
and arg hashing.
|
| 600 |
+
|
| 601 |
+
Returns
|
| 602 |
+
-------
|
| 603 |
+
is_call_in_cache: bool
|
| 604 |
+
Whether or not the result of the function has been cached
|
| 605 |
+
for the input arguments that have been passed.
|
| 606 |
+
"""
|
| 607 |
+
call_id = (self.func_id, self._get_args_id(*args, **kwargs))
|
| 608 |
+
return self.store_backend.contains_item(call_id)
|
| 609 |
+
|
| 610 |
+
# ------------------------------------------------------------------------
|
| 611 |
+
# Private interface
|
| 612 |
+
# ------------------------------------------------------------------------
|
| 613 |
+
|
| 614 |
+
def _get_args_id(self, *args, **kwargs):
|
| 615 |
+
"""Return the input parameter hash of a result."""
|
| 616 |
+
return hashing.hash(filter_args(self.func, self.ignore, args, kwargs),
|
| 617 |
+
coerce_mmap=self.mmap_mode is not None)
|
| 618 |
+
|
| 619 |
+
def _hash_func(self):
|
| 620 |
+
"""Hash a function to key the online cache"""
|
| 621 |
+
func_code_h = hash(getattr(self.func, '__code__', None))
|
| 622 |
+
return id(self.func), hash(self.func), func_code_h
|
| 623 |
+
|
| 624 |
+
def _write_func_code(self, func_code, first_line):
|
| 625 |
+
""" Write the function code and the filename to a file.
|
| 626 |
+
"""
|
| 627 |
+
# We store the first line because the filename and the function
|
| 628 |
+
# name is not always enough to identify a function: people
|
| 629 |
+
# sometimes have several functions named the same way in a
|
| 630 |
+
# file. This is bad practice, but joblib should be robust to bad
|
| 631 |
+
# practice.
|
| 632 |
+
func_code = u'%s %i\n%s' % (FIRST_LINE_TEXT, first_line, func_code)
|
| 633 |
+
self.store_backend.store_cached_func_code([self.func_id], func_code)
|
| 634 |
+
|
| 635 |
+
# Also store in the in-memory store of function hashes
|
| 636 |
+
is_named_callable = (hasattr(self.func, '__name__') and
|
| 637 |
+
self.func.__name__ != '<lambda>')
|
| 638 |
+
if is_named_callable:
|
| 639 |
+
# Don't do this for lambda functions or strange callable
|
| 640 |
+
# objects, as it ends up being too fragile
|
| 641 |
+
func_hash = self._hash_func()
|
| 642 |
+
try:
|
| 643 |
+
_FUNCTION_HASHES[self.func] = func_hash
|
| 644 |
+
except TypeError:
|
| 645 |
+
# Some callable are not hashable
|
| 646 |
+
pass
|
| 647 |
+
|
| 648 |
+
def _check_previous_func_code(self, stacklevel=2):
|
| 649 |
+
"""
|
| 650 |
+
stacklevel is the depth a which this function is called, to
|
| 651 |
+
issue useful warnings to the user.
|
| 652 |
+
"""
|
| 653 |
+
# First check if our function is in the in-memory store.
|
| 654 |
+
# Using the in-memory store not only makes things faster, but it
|
| 655 |
+
# also renders us robust to variations of the files when the
|
| 656 |
+
# in-memory version of the code does not vary
|
| 657 |
+
try:
|
| 658 |
+
if self.func in _FUNCTION_HASHES:
|
| 659 |
+
# We use as an identifier the id of the function and its
|
| 660 |
+
# hash. This is more likely to falsely change than have hash
|
| 661 |
+
# collisions, thus we are on the safe side.
|
| 662 |
+
func_hash = self._hash_func()
|
| 663 |
+
if func_hash == _FUNCTION_HASHES[self.func]:
|
| 664 |
+
return True
|
| 665 |
+
except TypeError:
|
| 666 |
+
# Some callables are not hashable
|
| 667 |
+
pass
|
| 668 |
+
|
| 669 |
+
# Here, we go through some effort to be robust to dynamically
|
| 670 |
+
# changing code and collision. We cannot inspect.getsource
|
| 671 |
+
# because it is not reliable when using IPython's magic "%run".
|
| 672 |
+
func_code, source_file, first_line = self.func_code_info
|
| 673 |
+
try:
|
| 674 |
+
old_func_code, old_first_line = extract_first_line(
|
| 675 |
+
self.store_backend.get_cached_func_code([self.func_id]))
|
| 676 |
+
except (IOError, OSError): # some backend can also raise OSError
|
| 677 |
+
self._write_func_code(func_code, first_line)
|
| 678 |
+
return False
|
| 679 |
+
if old_func_code == func_code:
|
| 680 |
+
return True
|
| 681 |
+
|
| 682 |
+
# We have differing code, is this because we are referring to
|
| 683 |
+
# different functions, or because the function we are referring to has
|
| 684 |
+
# changed?
|
| 685 |
+
|
| 686 |
+
_, func_name = get_func_name(self.func, resolv_alias=False,
|
| 687 |
+
win_characters=False)
|
| 688 |
+
if old_first_line == first_line == -1 or func_name == '<lambda>':
|
| 689 |
+
if not first_line == -1:
|
| 690 |
+
func_description = ("{0} ({1}:{2})"
|
| 691 |
+
.format(func_name, source_file,
|
| 692 |
+
first_line))
|
| 693 |
+
else:
|
| 694 |
+
func_description = func_name
|
| 695 |
+
warnings.warn(JobLibCollisionWarning(
|
| 696 |
+
"Cannot detect name collisions for function '{0}'"
|
| 697 |
+
.format(func_description)), stacklevel=stacklevel)
|
| 698 |
+
|
| 699 |
+
# Fetch the code at the old location and compare it. If it is the
|
| 700 |
+
# same than the code store, we have a collision: the code in the
|
| 701 |
+
# file has not changed, but the name we have is pointing to a new
|
| 702 |
+
# code block.
|
| 703 |
+
if not old_first_line == first_line and source_file is not None:
|
| 704 |
+
if os.path.exists(source_file):
|
| 705 |
+
_, func_name = get_func_name(self.func, resolv_alias=False)
|
| 706 |
+
num_lines = len(func_code.split('\n'))
|
| 707 |
+
with tokenize.open(source_file) as f:
|
| 708 |
+
on_disk_func_code = f.readlines()[
|
| 709 |
+
old_first_line - 1:old_first_line - 1 + num_lines - 1]
|
| 710 |
+
on_disk_func_code = ''.join(on_disk_func_code)
|
| 711 |
+
possible_collision = (on_disk_func_code.rstrip() ==
|
| 712 |
+
old_func_code.rstrip())
|
| 713 |
+
else:
|
| 714 |
+
possible_collision = source_file.startswith('<doctest ')
|
| 715 |
+
if possible_collision:
|
| 716 |
+
warnings.warn(JobLibCollisionWarning(
|
| 717 |
+
'Possible name collisions between functions '
|
| 718 |
+
"'%s' (%s:%i) and '%s' (%s:%i)" %
|
| 719 |
+
(func_name, source_file, old_first_line,
|
| 720 |
+
func_name, source_file, first_line)),
|
| 721 |
+
stacklevel=stacklevel)
|
| 722 |
+
|
| 723 |
+
# The function has changed, wipe the cache directory.
|
| 724 |
+
# XXX: Should be using warnings, and giving stacklevel
|
| 725 |
+
if self._verbose > 10:
|
| 726 |
+
_, func_name = get_func_name(self.func, resolv_alias=False)
|
| 727 |
+
self.warn("Function {0} (identified by {1}) has changed"
|
| 728 |
+
".".format(func_name, self.func_id))
|
| 729 |
+
self.clear(warn=True)
|
| 730 |
+
return False
|
| 731 |
+
|
| 732 |
+
def clear(self, warn=True):
|
| 733 |
+
"""Empty the function's cache."""
|
| 734 |
+
func_id = self.func_id
|
| 735 |
+
if self._verbose > 0 and warn:
|
| 736 |
+
self.warn("Clearing function cache identified by %s" % func_id)
|
| 737 |
+
self.store_backend.clear_path([func_id, ])
|
| 738 |
+
|
| 739 |
+
func_code, _, first_line = self.func_code_info
|
| 740 |
+
self._write_func_code(func_code, first_line)
|
| 741 |
+
|
| 742 |
+
def call(self, *args, **kwargs):
|
| 743 |
+
"""Force the execution of the function with the given arguments.
|
| 744 |
+
|
| 745 |
+
The output values will be persisted, i.e., the cache will be updated
|
| 746 |
+
with any new values.
|
| 747 |
+
|
| 748 |
+
Parameters
|
| 749 |
+
----------
|
| 750 |
+
*args: arguments
|
| 751 |
+
The arguments.
|
| 752 |
+
**kwargs: keyword arguments
|
| 753 |
+
Keyword arguments.
|
| 754 |
+
|
| 755 |
+
Returns
|
| 756 |
+
-------
|
| 757 |
+
output : object
|
| 758 |
+
The output of the function call.
|
| 759 |
+
metadata : dict
|
| 760 |
+
The metadata associated with the call.
|
| 761 |
+
"""
|
| 762 |
+
call_id = (self.func_id, self._get_args_id(*args, **kwargs))
|
| 763 |
+
|
| 764 |
+
# Return the output and the metadata
|
| 765 |
+
return self._call(call_id, args, kwargs)
|
| 766 |
+
|
| 767 |
+
def _call(self, call_id, args, kwargs, shelving=False):
|
| 768 |
+
# Return the output and the metadata
|
| 769 |
+
self._before_call(args, kwargs)
|
| 770 |
+
start_time = time.time()
|
| 771 |
+
output = self.func(*args, **kwargs)
|
| 772 |
+
return self._after_call(call_id, args, kwargs, shelving,
|
| 773 |
+
output, start_time)
|
| 774 |
+
|
| 775 |
+
def _before_call(self, args, kwargs):
|
| 776 |
+
if self._verbose > 0:
|
| 777 |
+
print(format_call(self.func, args, kwargs))
|
| 778 |
+
|
| 779 |
+
def _after_call(self, call_id, args, kwargs, shelving, output, start_time):
|
| 780 |
+
self.store_backend.dump_item(call_id, output, verbose=self._verbose)
|
| 781 |
+
duration = time.time() - start_time
|
| 782 |
+
if self._verbose > 0:
|
| 783 |
+
self._print_duration(duration)
|
| 784 |
+
metadata = self._persist_input(duration, call_id, args, kwargs)
|
| 785 |
+
if shelving:
|
| 786 |
+
return self._get_memorized_result(call_id, metadata), metadata
|
| 787 |
+
|
| 788 |
+
if self.mmap_mode is not None:
|
| 789 |
+
# Memmap the output at the first call to be consistent with
|
| 790 |
+
# later calls
|
| 791 |
+
output = self._load_item(call_id, metadata)
|
| 792 |
+
return output, metadata
|
| 793 |
+
|
| 794 |
+
def _persist_input(self, duration, call_id, args, kwargs,
|
| 795 |
+
this_duration_limit=0.5):
|
| 796 |
+
""" Save a small summary of the call using json format in the
|
| 797 |
+
output directory.
|
| 798 |
+
|
| 799 |
+
output_dir: string
|
| 800 |
+
directory where to write metadata.
|
| 801 |
+
|
| 802 |
+
duration: float
|
| 803 |
+
time taken by hashing input arguments, calling the wrapped
|
| 804 |
+
function and persisting its output.
|
| 805 |
+
|
| 806 |
+
args, kwargs: list and dict
|
| 807 |
+
input arguments for wrapped function
|
| 808 |
+
|
| 809 |
+
this_duration_limit: float
|
| 810 |
+
Max execution time for this function before issuing a warning.
|
| 811 |
+
"""
|
| 812 |
+
start_time = time.time()
|
| 813 |
+
argument_dict = filter_args(self.func, self.ignore,
|
| 814 |
+
args, kwargs)
|
| 815 |
+
|
| 816 |
+
input_repr = dict((k, repr(v)) for k, v in argument_dict.items())
|
| 817 |
+
# This can fail due to race-conditions with multiple
|
| 818 |
+
# concurrent joblibs removing the file or the directory
|
| 819 |
+
metadata = {
|
| 820 |
+
"duration": duration, "input_args": input_repr, "time": start_time,
|
| 821 |
+
}
|
| 822 |
+
|
| 823 |
+
self.store_backend.store_metadata(call_id, metadata)
|
| 824 |
+
|
| 825 |
+
this_duration = time.time() - start_time
|
| 826 |
+
if this_duration > this_duration_limit:
|
| 827 |
+
# This persistence should be fast. It will not be if repr() takes
|
| 828 |
+
# time and its output is large, because json.dump will have to
|
| 829 |
+
# write a large file. This should not be an issue with numpy arrays
|
| 830 |
+
# for which repr() always output a short representation, but can
|
| 831 |
+
# be with complex dictionaries. Fixing the problem should be a
|
| 832 |
+
# matter of replacing repr() above by something smarter.
|
| 833 |
+
warnings.warn("Persisting input arguments took %.2fs to run."
|
| 834 |
+
"If this happens often in your code, it can cause "
|
| 835 |
+
"performance problems "
|
| 836 |
+
"(results will be correct in all cases). "
|
| 837 |
+
"The reason for this is probably some large input "
|
| 838 |
+
"arguments for a wrapped function."
|
| 839 |
+
% this_duration, stacklevel=5)
|
| 840 |
+
return metadata
|
| 841 |
+
|
| 842 |
+
def _get_memorized_result(self, call_id, metadata=None):
|
| 843 |
+
return MemorizedResult(self.store_backend, call_id,
|
| 844 |
+
metadata=metadata, timestamp=self.timestamp,
|
| 845 |
+
verbose=self._verbose - 1)
|
| 846 |
+
|
| 847 |
+
def _load_item(self, call_id, metadata=None):
|
| 848 |
+
return self.store_backend.load_item(call_id, metadata=metadata,
|
| 849 |
+
timestamp=self.timestamp,
|
| 850 |
+
verbose=self._verbose)
|
| 851 |
+
|
| 852 |
+
def _print_duration(self, duration, context=''):
|
| 853 |
+
_, name = get_func_name(self.func)
|
| 854 |
+
msg = f"{name} {context}- {format_time(duration)}"
|
| 855 |
+
print(max(0, (80 - len(msg))) * '_' + msg)
|
| 856 |
+
|
| 857 |
+
# ------------------------------------------------------------------------
|
| 858 |
+
# Private `object` interface
|
| 859 |
+
# ------------------------------------------------------------------------
|
| 860 |
+
|
| 861 |
+
def __repr__(self):
|
| 862 |
+
return '{class_name}(func={func}, location={location})'.format(
|
| 863 |
+
class_name=self.__class__.__name__,
|
| 864 |
+
func=self.func,
|
| 865 |
+
location=self.store_backend.location,)
|
| 866 |
+
|
| 867 |
+
|
| 868 |
+
###############################################################################
|
| 869 |
+
# class `AsyncMemorizedFunc`
|
| 870 |
+
###############################################################################
|
| 871 |
+
class AsyncMemorizedFunc(MemorizedFunc):
|
| 872 |
+
async def __call__(self, *args, **kwargs):
|
| 873 |
+
out = self._cached_call(args, kwargs, shelving=False)
|
| 874 |
+
out = await out if asyncio.iscoroutine(out) else out
|
| 875 |
+
return out[0] # Don't return metadata
|
| 876 |
+
|
| 877 |
+
async def call_and_shelve(self, *args, **kwargs):
|
| 878 |
+
out = self._cached_call(args, kwargs, shelving=True)
|
| 879 |
+
out = await out if asyncio.iscoroutine(out) else out
|
| 880 |
+
return out[0] # Don't return metadata
|
| 881 |
+
|
| 882 |
+
async def call(self, *args, **kwargs):
|
| 883 |
+
out = super().call(*args, **kwargs)
|
| 884 |
+
return await out if asyncio.iscoroutine(out) else out
|
| 885 |
+
|
| 886 |
+
async def _call(self, call_id, args, kwargs, shelving=False):
|
| 887 |
+
self._before_call(args, kwargs)
|
| 888 |
+
start_time = time.time()
|
| 889 |
+
output = await self.func(*args, **kwargs)
|
| 890 |
+
return self._after_call(
|
| 891 |
+
call_id, args, kwargs, shelving, output, start_time
|
| 892 |
+
)
|
| 893 |
+
|
| 894 |
+
|
| 895 |
+
###############################################################################
|
| 896 |
+
# class `Memory`
|
| 897 |
+
###############################################################################
|
| 898 |
+
class Memory(Logger):
|
| 899 |
+
""" A context object for caching a function's return value each time it
|
| 900 |
+
is called with the same input arguments.
|
| 901 |
+
|
| 902 |
+
All values are cached on the filesystem, in a deep directory
|
| 903 |
+
structure.
|
| 904 |
+
|
| 905 |
+
Read more in the :ref:`User Guide <memory>`.
|
| 906 |
+
|
| 907 |
+
Parameters
|
| 908 |
+
----------
|
| 909 |
+
location: str, pathlib.Path or None
|
| 910 |
+
The path of the base directory to use as a data store
|
| 911 |
+
or None. If None is given, no caching is done and
|
| 912 |
+
the Memory object is completely transparent. This option
|
| 913 |
+
replaces cachedir since version 0.12.
|
| 914 |
+
|
| 915 |
+
backend: str, optional
|
| 916 |
+
Type of store backend for reading/writing cache files.
|
| 917 |
+
Default: 'local'.
|
| 918 |
+
The 'local' backend is using regular filesystem operations to
|
| 919 |
+
manipulate data (open, mv, etc) in the backend.
|
| 920 |
+
|
| 921 |
+
mmap_mode: {None, 'r+', 'r', 'w+', 'c'}, optional
|
| 922 |
+
The memmapping mode used when loading from cache
|
| 923 |
+
numpy arrays. See numpy.load for the meaning of the
|
| 924 |
+
arguments.
|
| 925 |
+
|
| 926 |
+
compress: boolean, or integer, optional
|
| 927 |
+
Whether to zip the stored data on disk. If an integer is
|
| 928 |
+
given, it should be between 1 and 9, and sets the amount
|
| 929 |
+
of compression. Note that compressed arrays cannot be
|
| 930 |
+
read by memmapping.
|
| 931 |
+
|
| 932 |
+
verbose: int, optional
|
| 933 |
+
Verbosity flag, controls the debug messages that are issued
|
| 934 |
+
as functions are evaluated.
|
| 935 |
+
|
| 936 |
+
bytes_limit: int | str, optional
|
| 937 |
+
Limit in bytes of the size of the cache. By default, the size of
|
| 938 |
+
the cache is unlimited. When reducing the size of the cache,
|
| 939 |
+
``joblib`` keeps the most recently accessed items first. If a
|
| 940 |
+
str is passed, it is converted to a number of bytes using units
|
| 941 |
+
{ K | M | G} for kilo, mega, giga.
|
| 942 |
+
|
| 943 |
+
**Note:** You need to call :meth:`joblib.Memory.reduce_size` to
|
| 944 |
+
actually reduce the cache size to be less than ``bytes_limit``.
|
| 945 |
+
|
| 946 |
+
**Note:** This argument has been deprecated. One should give the
|
| 947 |
+
value of ``bytes_limit`` directly in
|
| 948 |
+
:meth:`joblib.Memory.reduce_size`.
|
| 949 |
+
|
| 950 |
+
backend_options: dict, optional
|
| 951 |
+
Contains a dictionary of named parameters used to configure
|
| 952 |
+
the store backend.
|
| 953 |
+
"""
|
| 954 |
+
# ------------------------------------------------------------------------
|
| 955 |
+
# Public interface
|
| 956 |
+
# ------------------------------------------------------------------------
|
| 957 |
+
|
| 958 |
+
def __init__(self, location=None, backend='local',
|
| 959 |
+
mmap_mode=None, compress=False, verbose=1, bytes_limit=None,
|
| 960 |
+
backend_options=None):
|
| 961 |
+
Logger.__init__(self)
|
| 962 |
+
self._verbose = verbose
|
| 963 |
+
self.mmap_mode = mmap_mode
|
| 964 |
+
self.timestamp = time.time()
|
| 965 |
+
if bytes_limit is not None:
|
| 966 |
+
warnings.warn(
|
| 967 |
+
"bytes_limit argument has been deprecated. It will be removed "
|
| 968 |
+
"in version 1.5. Please pass its value directly to "
|
| 969 |
+
"Memory.reduce_size.",
|
| 970 |
+
category=DeprecationWarning
|
| 971 |
+
)
|
| 972 |
+
self.bytes_limit = bytes_limit
|
| 973 |
+
self.backend = backend
|
| 974 |
+
self.compress = compress
|
| 975 |
+
if backend_options is None:
|
| 976 |
+
backend_options = {}
|
| 977 |
+
self.backend_options = backend_options
|
| 978 |
+
|
| 979 |
+
if compress and mmap_mode is not None:
|
| 980 |
+
warnings.warn('Compressed results cannot be memmapped',
|
| 981 |
+
stacklevel=2)
|
| 982 |
+
|
| 983 |
+
self.location = location
|
| 984 |
+
if isinstance(location, str):
|
| 985 |
+
location = os.path.join(location, 'joblib')
|
| 986 |
+
|
| 987 |
+
self.store_backend = _store_backend_factory(
|
| 988 |
+
backend, location, verbose=self._verbose,
|
| 989 |
+
backend_options=dict(compress=compress, mmap_mode=mmap_mode,
|
| 990 |
+
**backend_options))
|
| 991 |
+
|
| 992 |
+
def cache(self, func=None, ignore=None, verbose=None, mmap_mode=False,
|
| 993 |
+
cache_validation_callback=None):
|
| 994 |
+
""" Decorates the given function func to only compute its return
|
| 995 |
+
value for input arguments not cached on disk.
|
| 996 |
+
|
| 997 |
+
Parameters
|
| 998 |
+
----------
|
| 999 |
+
func: callable, optional
|
| 1000 |
+
The function to be decorated
|
| 1001 |
+
ignore: list of strings
|
| 1002 |
+
A list of arguments name to ignore in the hashing
|
| 1003 |
+
verbose: integer, optional
|
| 1004 |
+
The verbosity mode of the function. By default that
|
| 1005 |
+
of the memory object is used.
|
| 1006 |
+
mmap_mode: {None, 'r+', 'r', 'w+', 'c'}, optional
|
| 1007 |
+
The memmapping mode used when loading from cache
|
| 1008 |
+
numpy arrays. See numpy.load for the meaning of the
|
| 1009 |
+
arguments. By default that of the memory object is used.
|
| 1010 |
+
cache_validation_callback: callable, optional
|
| 1011 |
+
Callable to validate whether or not the cache is valid. When
|
| 1012 |
+
the cached function is called with arguments for which a cache
|
| 1013 |
+
exists, this callable is called with the metadata of the cached
|
| 1014 |
+
result as its sole argument. If it returns True, then the
|
| 1015 |
+
cached result is returned, else the cache for these arguments
|
| 1016 |
+
is cleared and recomputed.
|
| 1017 |
+
|
| 1018 |
+
Returns
|
| 1019 |
+
-------
|
| 1020 |
+
decorated_func: MemorizedFunc object
|
| 1021 |
+
The returned object is a MemorizedFunc object, that is
|
| 1022 |
+
callable (behaves like a function), but offers extra
|
| 1023 |
+
methods for cache lookup and management. See the
|
| 1024 |
+
documentation for :class:`joblib.memory.MemorizedFunc`.
|
| 1025 |
+
"""
|
| 1026 |
+
if (cache_validation_callback is not None and
|
| 1027 |
+
not callable(cache_validation_callback)):
|
| 1028 |
+
raise ValueError(
|
| 1029 |
+
"cache_validation_callback needs to be callable. "
|
| 1030 |
+
f"Got {cache_validation_callback}."
|
| 1031 |
+
)
|
| 1032 |
+
if func is None:
|
| 1033 |
+
# Partial application, to be able to specify extra keyword
|
| 1034 |
+
# arguments in decorators
|
| 1035 |
+
return functools.partial(
|
| 1036 |
+
self.cache, ignore=ignore,
|
| 1037 |
+
mmap_mode=mmap_mode,
|
| 1038 |
+
verbose=verbose,
|
| 1039 |
+
cache_validation_callback=cache_validation_callback
|
| 1040 |
+
)
|
| 1041 |
+
if self.store_backend is None:
|
| 1042 |
+
cls = (AsyncNotMemorizedFunc
|
| 1043 |
+
if asyncio.iscoroutinefunction(func)
|
| 1044 |
+
else NotMemorizedFunc)
|
| 1045 |
+
return cls(func)
|
| 1046 |
+
if verbose is None:
|
| 1047 |
+
verbose = self._verbose
|
| 1048 |
+
if mmap_mode is False:
|
| 1049 |
+
mmap_mode = self.mmap_mode
|
| 1050 |
+
if isinstance(func, MemorizedFunc):
|
| 1051 |
+
func = func.func
|
| 1052 |
+
cls = (AsyncMemorizedFunc
|
| 1053 |
+
if asyncio.iscoroutinefunction(func)
|
| 1054 |
+
else MemorizedFunc)
|
| 1055 |
+
return cls(
|
| 1056 |
+
func, location=self.store_backend, backend=self.backend,
|
| 1057 |
+
ignore=ignore, mmap_mode=mmap_mode, compress=self.compress,
|
| 1058 |
+
verbose=verbose, timestamp=self.timestamp,
|
| 1059 |
+
cache_validation_callback=cache_validation_callback
|
| 1060 |
+
)
|
| 1061 |
+
|
| 1062 |
+
def clear(self, warn=True):
|
| 1063 |
+
""" Erase the complete cache directory.
|
| 1064 |
+
"""
|
| 1065 |
+
if warn:
|
| 1066 |
+
self.warn('Flushing completely the cache')
|
| 1067 |
+
if self.store_backend is not None:
|
| 1068 |
+
self.store_backend.clear()
|
| 1069 |
+
|
| 1070 |
+
# As the cache is completely clear, make sure the _FUNCTION_HASHES
|
| 1071 |
+
# cache is also reset. Else, for a function that is present in this
|
| 1072 |
+
# table, results cached after this clear will be have cache miss
|
| 1073 |
+
# as the function code is not re-written.
|
| 1074 |
+
_FUNCTION_HASHES.clear()
|
| 1075 |
+
|
| 1076 |
+
def reduce_size(self, bytes_limit=None, items_limit=None, age_limit=None):
|
| 1077 |
+
"""Remove cache elements to make the cache fit its limits.
|
| 1078 |
+
|
| 1079 |
+
The limitation can impose that the cache size fits in ``bytes_limit``,
|
| 1080 |
+
that the number of cache items is no more than ``items_limit``, and
|
| 1081 |
+
that all files in cache are not older than ``age_limit``.
|
| 1082 |
+
|
| 1083 |
+
Parameters
|
| 1084 |
+
----------
|
| 1085 |
+
bytes_limit: int | str, optional
|
| 1086 |
+
Limit in bytes of the size of the cache. By default, the size of
|
| 1087 |
+
the cache is unlimited. When reducing the size of the cache,
|
| 1088 |
+
``joblib`` keeps the most recently accessed items first. If a
|
| 1089 |
+
str is passed, it is converted to a number of bytes using units
|
| 1090 |
+
{ K | M | G} for kilo, mega, giga.
|
| 1091 |
+
|
| 1092 |
+
items_limit: int, optional
|
| 1093 |
+
Number of items to limit the cache to. By default, the number of
|
| 1094 |
+
items in the cache is unlimited. When reducing the size of the
|
| 1095 |
+
cache, ``joblib`` keeps the most recently accessed items first.
|
| 1096 |
+
|
| 1097 |
+
age_limit: datetime.timedelta, optional
|
| 1098 |
+
Maximum age of items to limit the cache to. When reducing the size
|
| 1099 |
+
of the cache, any items last accessed more than the given length of
|
| 1100 |
+
time ago are deleted.
|
| 1101 |
+
"""
|
| 1102 |
+
if bytes_limit is None:
|
| 1103 |
+
bytes_limit = self.bytes_limit
|
| 1104 |
+
|
| 1105 |
+
if self.store_backend is None:
|
| 1106 |
+
# No cached results, this function does nothing.
|
| 1107 |
+
return
|
| 1108 |
+
|
| 1109 |
+
if bytes_limit is None and items_limit is None and age_limit is None:
|
| 1110 |
+
# No limitation to impose, returning
|
| 1111 |
+
return
|
| 1112 |
+
|
| 1113 |
+
# Defers the actual limits enforcing to the store backend.
|
| 1114 |
+
self.store_backend.enforce_store_limits(
|
| 1115 |
+
bytes_limit, items_limit, age_limit
|
| 1116 |
+
)
|
| 1117 |
+
|
| 1118 |
+
def eval(self, func, *args, **kwargs):
|
| 1119 |
+
""" Eval function func with arguments `*args` and `**kwargs`,
|
| 1120 |
+
in the context of the memory.
|
| 1121 |
+
|
| 1122 |
+
This method works similarly to the builtin `apply`, except
|
| 1123 |
+
that the function is called only if the cache is not
|
| 1124 |
+
up to date.
|
| 1125 |
+
|
| 1126 |
+
"""
|
| 1127 |
+
if self.store_backend is None:
|
| 1128 |
+
return func(*args, **kwargs)
|
| 1129 |
+
return self.cache(func)(*args, **kwargs)
|
| 1130 |
+
|
| 1131 |
+
# ------------------------------------------------------------------------
|
| 1132 |
+
# Private `object` interface
|
| 1133 |
+
# ------------------------------------------------------------------------
|
| 1134 |
+
|
| 1135 |
+
def __repr__(self):
|
| 1136 |
+
return '{class_name}(location={location})'.format(
|
| 1137 |
+
class_name=self.__class__.__name__,
|
| 1138 |
+
location=(None if self.store_backend is None
|
| 1139 |
+
else self.store_backend.location))
|
| 1140 |
+
|
| 1141 |
+
def __getstate__(self):
|
| 1142 |
+
""" We don't store the timestamp when pickling, to avoid the hash
|
| 1143 |
+
depending from it.
|
| 1144 |
+
"""
|
| 1145 |
+
state = self.__dict__.copy()
|
| 1146 |
+
state['timestamp'] = None
|
| 1147 |
+
return state
|
| 1148 |
+
|
| 1149 |
+
|
| 1150 |
+
###############################################################################
|
| 1151 |
+
# cache_validation_callback helpers
|
| 1152 |
+
###############################################################################
|
| 1153 |
+
|
| 1154 |
+
def expires_after(days=0, seconds=0, microseconds=0, milliseconds=0, minutes=0,
|
| 1155 |
+
hours=0, weeks=0):
|
| 1156 |
+
"""Helper cache_validation_callback to force recompute after a duration.
|
| 1157 |
+
|
| 1158 |
+
Parameters
|
| 1159 |
+
----------
|
| 1160 |
+
days, seconds, microseconds, milliseconds, minutes, hours, weeks: numbers
|
| 1161 |
+
argument passed to a timedelta.
|
| 1162 |
+
"""
|
| 1163 |
+
delta = datetime.timedelta(
|
| 1164 |
+
days=days, seconds=seconds, microseconds=microseconds,
|
| 1165 |
+
milliseconds=milliseconds, minutes=minutes, hours=hours, weeks=weeks
|
| 1166 |
+
)
|
| 1167 |
+
|
| 1168 |
+
def cache_validation_callback(metadata):
|
| 1169 |
+
computation_age = time.time() - metadata['time']
|
| 1170 |
+
return computation_age < delta.total_seconds()
|
| 1171 |
+
|
| 1172 |
+
return cache_validation_callback
|