Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- llava_video/lib/python3.10/site-packages/jinja2/__pycache__/async_utils.cpython-310.pyc +0 -0
- llava_video/lib/python3.10/site-packages/jinja2/__pycache__/debug.cpython-310.pyc +0 -0
- llava_video/lib/python3.10/site-packages/jinja2/__pycache__/environment.cpython-310.pyc +0 -0
- llava_video/lib/python3.10/site-packages/jinja2/__pycache__/ext.cpython-310.pyc +0 -0
- llava_video/lib/python3.10/site-packages/jinja2/__pycache__/idtracking.cpython-310.pyc +0 -0
- llava_video/lib/python3.10/site-packages/jinja2/__pycache__/nativetypes.cpython-310.pyc +0 -0
- llava_video/lib/python3.10/site-packages/jinja2/__pycache__/nodes.cpython-310.pyc +0 -0
- llava_video/lib/python3.10/site-packages/jinja2/__pycache__/runtime.cpython-310.pyc +0 -0
- llava_video/lib/python3.10/site-packages/jinja2/__pycache__/sandbox.cpython-310.pyc +0 -0
- llava_video/lib/python3.10/site-packages/jinja2/__pycache__/tests.cpython-310.pyc +0 -0
- llava_video/lib/python3.10/site-packages/jinja2/__pycache__/utils.cpython-310.pyc +0 -0
- llava_video/lib/python3.10/site-packages/jinja2/__pycache__/visitor.cpython-310.pyc +0 -0
- llava_video/lib/python3.10/site-packages/pkg_resources/__init__.py +0 -0
- llava_video/lib/python3.10/site-packages/pkg_resources/api_tests.txt +424 -0
- llava_video/lib/python3.10/site-packages/pkg_resources/tests/__init__.py +0 -0
- llava_video/lib/python3.10/site-packages/pkg_resources/tests/__pycache__/test_integration_zope_interface.cpython-310.pyc +0 -0
- llava_video/lib/python3.10/site-packages/pkg_resources/tests/__pycache__/test_markers.cpython-310.pyc +0 -0
- llava_video/lib/python3.10/site-packages/pkg_resources/tests/data/my-test-package-source/__pycache__/setup.cpython-310.pyc +0 -0
- llava_video/lib/python3.10/site-packages/pkg_resources/tests/data/my-test-package-source/setup.py +7 -0
- llava_video/lib/python3.10/site-packages/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/SOURCES.txt +7 -0
- llava_video/lib/python3.10/site-packages/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/dependency_links.txt +1 -0
- llava_video/lib/python3.10/site-packages/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/zip-safe +1 -0
- llava_video/lib/python3.10/site-packages/pkg_resources/tests/data/my-test-package_zipped-egg/my_test_package-1.0-py3.7.egg +0 -0
- llava_video/lib/python3.10/site-packages/pkg_resources/tests/test_integration_zope_interface.py +54 -0
- llava_video/lib/python3.10/site-packages/pkg_resources/tests/test_markers.py +8 -0
- llava_video/lib/python3.10/site-packages/pkg_resources/tests/test_pkg_resources.py +427 -0
- llava_video/lib/python3.10/site-packages/pkg_resources/tests/test_resources.py +869 -0
- llava_video/lib/python3.10/site-packages/pybind11/__init__.py +19 -0
- llava_video/lib/python3.10/site-packages/pybind11/__main__.py +86 -0
- llava_video/lib/python3.10/site-packages/pybind11/commands.py +39 -0
- pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_add_relu.h +63 -0
- pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_amp_update_scale_cpu_dispatch.h +23 -0
- pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_fake_quantize_learnable_per_channel_affine_backward_cuda_dispatch.h +23 -0
- pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_fused_dropout_cuda_dispatch.h +23 -0
- pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_histogramdd_bin_edges_native.h +22 -0
- pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_mps_convolution_native.h +21 -0
- pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_native_batch_norm_legit_no_training_native.h +22 -0
- pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_safe_softmax.h +30 -0
- pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_scaled_dot_product_flash_attention_for_cpu_backward_ops.h +28 -0
- pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_scaled_dot_product_flash_attention_for_cpu_ops.h +28 -0
- pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_test_functorch_fallback_ops.h +39 -0
- pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_transform_bias_rescale_qkv.h +39 -0
- pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_validate_sparse_compressed_tensor_args_ops.h +28 -0
- pllava/lib/python3.10/site-packages/torch/include/ATen/ops/bitwise_or_compositeexplicitautogradnonfunctional_dispatch.h +24 -0
- pllava/lib/python3.10/site-packages/torch/include/ATen/ops/ceil.h +44 -0
- pllava/lib/python3.10/site-packages/torch/include/ATen/ops/conv_depthwise3d_compositeexplicitautograd_dispatch.h +26 -0
- pllava/lib/python3.10/site-packages/torch/include/ATen/ops/expand_as_native.h +21 -0
- pllava/lib/python3.10/site-packages/torch/include/ATen/ops/eye_meta_dispatch.h +30 -0
- pllava/lib/python3.10/site-packages/torch/include/ATen/ops/fft_fftshift_native.h +21 -0
- pllava/lib/python3.10/site-packages/torch/include/ATen/ops/fix_compositeimplicitautograd_dispatch.h +26 -0
llava_video/lib/python3.10/site-packages/jinja2/__pycache__/async_utils.cpython-310.pyc
ADDED
|
Binary file (3.44 kB). View file
|
|
|
llava_video/lib/python3.10/site-packages/jinja2/__pycache__/debug.cpython-310.pyc
ADDED
|
Binary file (3.98 kB). View file
|
|
|
llava_video/lib/python3.10/site-packages/jinja2/__pycache__/environment.cpython-310.pyc
ADDED
|
Binary file (53.4 kB). View file
|
|
|
llava_video/lib/python3.10/site-packages/jinja2/__pycache__/ext.cpython-310.pyc
ADDED
|
Binary file (25.9 kB). View file
|
|
|
llava_video/lib/python3.10/site-packages/jinja2/__pycache__/idtracking.cpython-310.pyc
ADDED
|
Binary file (11.1 kB). View file
|
|
|
llava_video/lib/python3.10/site-packages/jinja2/__pycache__/nativetypes.cpython-310.pyc
ADDED
|
Binary file (5 kB). View file
|
|
|
llava_video/lib/python3.10/site-packages/jinja2/__pycache__/nodes.cpython-310.pyc
ADDED
|
Binary file (40.3 kB). View file
|
|
|
llava_video/lib/python3.10/site-packages/jinja2/__pycache__/runtime.cpython-310.pyc
ADDED
|
Binary file (32.3 kB). View file
|
|
|
llava_video/lib/python3.10/site-packages/jinja2/__pycache__/sandbox.cpython-310.pyc
ADDED
|
Binary file (12.2 kB). View file
|
|
|
llava_video/lib/python3.10/site-packages/jinja2/__pycache__/tests.cpython-310.pyc
ADDED
|
Binary file (6.69 kB). View file
|
|
|
llava_video/lib/python3.10/site-packages/jinja2/__pycache__/utils.cpython-310.pyc
ADDED
|
Binary file (24.8 kB). View file
|
|
|
llava_video/lib/python3.10/site-packages/jinja2/__pycache__/visitor.cpython-310.pyc
ADDED
|
Binary file (3.97 kB). View file
|
|
|
llava_video/lib/python3.10/site-packages/pkg_resources/__init__.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
llava_video/lib/python3.10/site-packages/pkg_resources/api_tests.txt
ADDED
|
@@ -0,0 +1,424 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Pluggable Distributions of Python Software
|
| 2 |
+
==========================================
|
| 3 |
+
|
| 4 |
+
Distributions
|
| 5 |
+
-------------
|
| 6 |
+
|
| 7 |
+
A "Distribution" is a collection of files that represent a "Release" of a
|
| 8 |
+
"Project" as of a particular point in time, denoted by a
|
| 9 |
+
"Version"::
|
| 10 |
+
|
| 11 |
+
>>> import sys, pkg_resources
|
| 12 |
+
>>> from pkg_resources import Distribution
|
| 13 |
+
>>> Distribution(project_name="Foo", version="1.2")
|
| 14 |
+
Foo 1.2
|
| 15 |
+
|
| 16 |
+
Distributions have a location, which can be a filename, URL, or really anything
|
| 17 |
+
else you care to use::
|
| 18 |
+
|
| 19 |
+
>>> dist = Distribution(
|
| 20 |
+
... location="http://example.com/something",
|
| 21 |
+
... project_name="Bar", version="0.9"
|
| 22 |
+
... )
|
| 23 |
+
|
| 24 |
+
>>> dist
|
| 25 |
+
Bar 0.9 (http://example.com/something)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
Distributions have various introspectable attributes::
|
| 29 |
+
|
| 30 |
+
>>> dist.location
|
| 31 |
+
'http://example.com/something'
|
| 32 |
+
|
| 33 |
+
>>> dist.project_name
|
| 34 |
+
'Bar'
|
| 35 |
+
|
| 36 |
+
>>> dist.version
|
| 37 |
+
'0.9'
|
| 38 |
+
|
| 39 |
+
>>> dist.py_version == '{}.{}'.format(*sys.version_info)
|
| 40 |
+
True
|
| 41 |
+
|
| 42 |
+
>>> print(dist.platform)
|
| 43 |
+
None
|
| 44 |
+
|
| 45 |
+
Including various computed attributes::
|
| 46 |
+
|
| 47 |
+
>>> from pkg_resources import parse_version
|
| 48 |
+
>>> dist.parsed_version == parse_version(dist.version)
|
| 49 |
+
True
|
| 50 |
+
|
| 51 |
+
>>> dist.key # case-insensitive form of the project name
|
| 52 |
+
'bar'
|
| 53 |
+
|
| 54 |
+
Distributions are compared (and hashed) by version first::
|
| 55 |
+
|
| 56 |
+
>>> Distribution(version='1.0') == Distribution(version='1.0')
|
| 57 |
+
True
|
| 58 |
+
>>> Distribution(version='1.0') == Distribution(version='1.1')
|
| 59 |
+
False
|
| 60 |
+
>>> Distribution(version='1.0') < Distribution(version='1.1')
|
| 61 |
+
True
|
| 62 |
+
|
| 63 |
+
but also by project name (case-insensitive), platform, Python version,
|
| 64 |
+
location, etc.::
|
| 65 |
+
|
| 66 |
+
>>> Distribution(project_name="Foo",version="1.0") == \
|
| 67 |
+
... Distribution(project_name="Foo",version="1.0")
|
| 68 |
+
True
|
| 69 |
+
|
| 70 |
+
>>> Distribution(project_name="Foo",version="1.0") == \
|
| 71 |
+
... Distribution(project_name="foo",version="1.0")
|
| 72 |
+
True
|
| 73 |
+
|
| 74 |
+
>>> Distribution(project_name="Foo",version="1.0") == \
|
| 75 |
+
... Distribution(project_name="Foo",version="1.1")
|
| 76 |
+
False
|
| 77 |
+
|
| 78 |
+
>>> Distribution(project_name="Foo",py_version="2.3",version="1.0") == \
|
| 79 |
+
... Distribution(project_name="Foo",py_version="2.4",version="1.0")
|
| 80 |
+
False
|
| 81 |
+
|
| 82 |
+
>>> Distribution(location="spam",version="1.0") == \
|
| 83 |
+
... Distribution(location="spam",version="1.0")
|
| 84 |
+
True
|
| 85 |
+
|
| 86 |
+
>>> Distribution(location="spam",version="1.0") == \
|
| 87 |
+
... Distribution(location="baz",version="1.0")
|
| 88 |
+
False
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
Hash and compare distribution by prio/plat
|
| 93 |
+
|
| 94 |
+
Get version from metadata
|
| 95 |
+
provider capabilities
|
| 96 |
+
egg_name()
|
| 97 |
+
as_requirement()
|
| 98 |
+
from_location, from_filename (w/path normalization)
|
| 99 |
+
|
| 100 |
+
Releases may have zero or more "Requirements", which indicate
|
| 101 |
+
what releases of another project the release requires in order to
|
| 102 |
+
function. A Requirement names the other project, expresses some criteria
|
| 103 |
+
as to what releases of that project are acceptable, and lists any "Extras"
|
| 104 |
+
that the requiring release may need from that project. (An Extra is an
|
| 105 |
+
optional feature of a Release, that can only be used if its additional
|
| 106 |
+
Requirements are satisfied.)
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
The Working Set
|
| 111 |
+
---------------
|
| 112 |
+
|
| 113 |
+
A collection of active distributions is called a Working Set. Note that a
|
| 114 |
+
Working Set can contain any importable distribution, not just pluggable ones.
|
| 115 |
+
For example, the Python standard library is an importable distribution that
|
| 116 |
+
will usually be part of the Working Set, even though it is not pluggable.
|
| 117 |
+
Similarly, when you are doing development work on a project, the files you are
|
| 118 |
+
editing are also a Distribution. (And, with a little attention to the
|
| 119 |
+
directory names used, and including some additional metadata, such a
|
| 120 |
+
"development distribution" can be made pluggable as well.)
|
| 121 |
+
|
| 122 |
+
>>> from pkg_resources import WorkingSet
|
| 123 |
+
|
| 124 |
+
A working set's entries are the sys.path entries that correspond to the active
|
| 125 |
+
distributions. By default, the working set's entries are the items on
|
| 126 |
+
``sys.path``::
|
| 127 |
+
|
| 128 |
+
>>> ws = WorkingSet()
|
| 129 |
+
>>> ws.entries == sys.path
|
| 130 |
+
True
|
| 131 |
+
|
| 132 |
+
But you can also create an empty working set explicitly, and add distributions
|
| 133 |
+
to it::
|
| 134 |
+
|
| 135 |
+
>>> ws = WorkingSet([])
|
| 136 |
+
>>> ws.add(dist)
|
| 137 |
+
>>> ws.entries
|
| 138 |
+
['http://example.com/something']
|
| 139 |
+
>>> dist in ws
|
| 140 |
+
True
|
| 141 |
+
>>> Distribution('foo',version="") in ws
|
| 142 |
+
False
|
| 143 |
+
|
| 144 |
+
And you can iterate over its distributions::
|
| 145 |
+
|
| 146 |
+
>>> list(ws)
|
| 147 |
+
[Bar 0.9 (http://example.com/something)]
|
| 148 |
+
|
| 149 |
+
Adding the same distribution more than once is a no-op::
|
| 150 |
+
|
| 151 |
+
>>> ws.add(dist)
|
| 152 |
+
>>> list(ws)
|
| 153 |
+
[Bar 0.9 (http://example.com/something)]
|
| 154 |
+
|
| 155 |
+
For that matter, adding multiple distributions for the same project also does
|
| 156 |
+
nothing, because a working set can only hold one active distribution per
|
| 157 |
+
project -- the first one added to it::
|
| 158 |
+
|
| 159 |
+
>>> ws.add(
|
| 160 |
+
... Distribution(
|
| 161 |
+
... 'http://example.com/something', project_name="Bar",
|
| 162 |
+
... version="7.2"
|
| 163 |
+
... )
|
| 164 |
+
... )
|
| 165 |
+
>>> list(ws)
|
| 166 |
+
[Bar 0.9 (http://example.com/something)]
|
| 167 |
+
|
| 168 |
+
You can append a path entry to a working set using ``add_entry()``::
|
| 169 |
+
|
| 170 |
+
>>> ws.entries
|
| 171 |
+
['http://example.com/something']
|
| 172 |
+
>>> ws.add_entry(pkg_resources.__file__)
|
| 173 |
+
>>> ws.entries
|
| 174 |
+
['http://example.com/something', '...pkg_resources...']
|
| 175 |
+
|
| 176 |
+
Multiple additions result in multiple entries, even if the entry is already in
|
| 177 |
+
the working set (because ``sys.path`` can contain the same entry more than
|
| 178 |
+
once)::
|
| 179 |
+
|
| 180 |
+
>>> ws.add_entry(pkg_resources.__file__)
|
| 181 |
+
>>> ws.entries
|
| 182 |
+
['...example.com...', '...pkg_resources...', '...pkg_resources...']
|
| 183 |
+
|
| 184 |
+
And you can specify the path entry a distribution was found under, using the
|
| 185 |
+
optional second parameter to ``add()``::
|
| 186 |
+
|
| 187 |
+
>>> ws = WorkingSet([])
|
| 188 |
+
>>> ws.add(dist,"foo")
|
| 189 |
+
>>> ws.entries
|
| 190 |
+
['foo']
|
| 191 |
+
|
| 192 |
+
But even if a distribution is found under multiple path entries, it still only
|
| 193 |
+
shows up once when iterating the working set:
|
| 194 |
+
|
| 195 |
+
>>> ws.add_entry(ws.entries[0])
|
| 196 |
+
>>> list(ws)
|
| 197 |
+
[Bar 0.9 (http://example.com/something)]
|
| 198 |
+
|
| 199 |
+
You can ask a WorkingSet to ``find()`` a distribution matching a requirement::
|
| 200 |
+
|
| 201 |
+
>>> from pkg_resources import Requirement
|
| 202 |
+
>>> print(ws.find(Requirement.parse("Foo==1.0"))) # no match, return None
|
| 203 |
+
None
|
| 204 |
+
|
| 205 |
+
>>> ws.find(Requirement.parse("Bar==0.9")) # match, return distribution
|
| 206 |
+
Bar 0.9 (http://example.com/something)
|
| 207 |
+
|
| 208 |
+
Note that asking for a conflicting version of a distribution already in a
|
| 209 |
+
working set triggers a ``pkg_resources.VersionConflict`` error:
|
| 210 |
+
|
| 211 |
+
>>> try:
|
| 212 |
+
... ws.find(Requirement.parse("Bar==1.0"))
|
| 213 |
+
... except pkg_resources.VersionConflict as exc:
|
| 214 |
+
... print(str(exc))
|
| 215 |
+
... else:
|
| 216 |
+
... raise AssertionError("VersionConflict was not raised")
|
| 217 |
+
(Bar 0.9 (http://example.com/something), Requirement.parse('Bar==1.0'))
|
| 218 |
+
|
| 219 |
+
You can subscribe a callback function to receive notifications whenever a new
|
| 220 |
+
distribution is added to a working set. The callback is immediately invoked
|
| 221 |
+
once for each existing distribution in the working set, and then is called
|
| 222 |
+
again for new distributions added thereafter::
|
| 223 |
+
|
| 224 |
+
>>> def added(dist): print("Added %s" % dist)
|
| 225 |
+
>>> ws.subscribe(added)
|
| 226 |
+
Added Bar 0.9
|
| 227 |
+
>>> foo12 = Distribution(project_name="Foo", version="1.2", location="f12")
|
| 228 |
+
>>> ws.add(foo12)
|
| 229 |
+
Added Foo 1.2
|
| 230 |
+
|
| 231 |
+
Note, however, that only the first distribution added for a given project name
|
| 232 |
+
will trigger a callback, even during the initial ``subscribe()`` callback::
|
| 233 |
+
|
| 234 |
+
>>> foo14 = Distribution(project_name="Foo", version="1.4", location="f14")
|
| 235 |
+
>>> ws.add(foo14) # no callback, because Foo 1.2 is already active
|
| 236 |
+
|
| 237 |
+
>>> ws = WorkingSet([])
|
| 238 |
+
>>> ws.add(foo12)
|
| 239 |
+
>>> ws.add(foo14)
|
| 240 |
+
>>> ws.subscribe(added)
|
| 241 |
+
Added Foo 1.2
|
| 242 |
+
|
| 243 |
+
And adding a callback more than once has no effect, either::
|
| 244 |
+
|
| 245 |
+
>>> ws.subscribe(added) # no callbacks
|
| 246 |
+
|
| 247 |
+
# and no double-callbacks on subsequent additions, either
|
| 248 |
+
>>> just_a_test = Distribution(project_name="JustATest", version="0.99")
|
| 249 |
+
>>> ws.add(just_a_test)
|
| 250 |
+
Added JustATest 0.99
|
| 251 |
+
|
| 252 |
+
|
| 253 |
+
Finding Plugins
|
| 254 |
+
---------------
|
| 255 |
+
|
| 256 |
+
``WorkingSet`` objects can be used to figure out what plugins in an
|
| 257 |
+
``Environment`` can be loaded without any resolution errors::
|
| 258 |
+
|
| 259 |
+
>>> from pkg_resources import Environment
|
| 260 |
+
|
| 261 |
+
>>> plugins = Environment([]) # normally, a list of plugin directories
|
| 262 |
+
>>> plugins.add(foo12)
|
| 263 |
+
>>> plugins.add(foo14)
|
| 264 |
+
>>> plugins.add(just_a_test)
|
| 265 |
+
|
| 266 |
+
In the simplest case, we just get the newest version of each distribution in
|
| 267 |
+
the plugin environment::
|
| 268 |
+
|
| 269 |
+
>>> ws = WorkingSet([])
|
| 270 |
+
>>> ws.find_plugins(plugins)
|
| 271 |
+
([JustATest 0.99, Foo 1.4 (f14)], {})
|
| 272 |
+
|
| 273 |
+
But if there's a problem with a version conflict or missing requirements, the
|
| 274 |
+
method falls back to older versions, and the error info dict will contain an
|
| 275 |
+
exception instance for each unloadable plugin::
|
| 276 |
+
|
| 277 |
+
>>> ws.add(foo12) # this will conflict with Foo 1.4
|
| 278 |
+
>>> ws.find_plugins(plugins)
|
| 279 |
+
([JustATest 0.99, Foo 1.2 (f12)], {Foo 1.4 (f14): VersionConflict(...)})
|
| 280 |
+
|
| 281 |
+
But if you disallow fallbacks, the failed plugin will be skipped instead of
|
| 282 |
+
trying older versions::
|
| 283 |
+
|
| 284 |
+
>>> ws.find_plugins(plugins, fallback=False)
|
| 285 |
+
([JustATest 0.99], {Foo 1.4 (f14): VersionConflict(...)})
|
| 286 |
+
|
| 287 |
+
|
| 288 |
+
|
| 289 |
+
Platform Compatibility Rules
|
| 290 |
+
----------------------------
|
| 291 |
+
|
| 292 |
+
On the Mac, there are potential compatibility issues for modules compiled
|
| 293 |
+
on newer versions of macOS than what the user is running. Additionally,
|
| 294 |
+
macOS will soon have two platforms to contend with: Intel and PowerPC.
|
| 295 |
+
|
| 296 |
+
Basic equality works as on other platforms::
|
| 297 |
+
|
| 298 |
+
>>> from pkg_resources import compatible_platforms as cp
|
| 299 |
+
>>> reqd = 'macosx-10.4-ppc'
|
| 300 |
+
>>> cp(reqd, reqd)
|
| 301 |
+
True
|
| 302 |
+
>>> cp("win32", reqd)
|
| 303 |
+
False
|
| 304 |
+
|
| 305 |
+
Distributions made on other machine types are not compatible::
|
| 306 |
+
|
| 307 |
+
>>> cp("macosx-10.4-i386", reqd)
|
| 308 |
+
False
|
| 309 |
+
|
| 310 |
+
Distributions made on earlier versions of the OS are compatible, as
|
| 311 |
+
long as they are from the same top-level version. The patchlevel version
|
| 312 |
+
number does not matter::
|
| 313 |
+
|
| 314 |
+
>>> cp("macosx-10.4-ppc", reqd)
|
| 315 |
+
True
|
| 316 |
+
>>> cp("macosx-10.3-ppc", reqd)
|
| 317 |
+
True
|
| 318 |
+
>>> cp("macosx-10.5-ppc", reqd)
|
| 319 |
+
False
|
| 320 |
+
>>> cp("macosx-9.5-ppc", reqd)
|
| 321 |
+
False
|
| 322 |
+
|
| 323 |
+
Backwards compatibility for packages made via earlier versions of
|
| 324 |
+
setuptools is provided as well::
|
| 325 |
+
|
| 326 |
+
>>> cp("darwin-8.2.0-Power_Macintosh", reqd)
|
| 327 |
+
True
|
| 328 |
+
>>> cp("darwin-7.2.0-Power_Macintosh", reqd)
|
| 329 |
+
True
|
| 330 |
+
>>> cp("darwin-8.2.0-Power_Macintosh", "macosx-10.3-ppc")
|
| 331 |
+
False
|
| 332 |
+
|
| 333 |
+
|
| 334 |
+
Environment Markers
|
| 335 |
+
-------------------
|
| 336 |
+
|
| 337 |
+
>>> from pkg_resources import invalid_marker as im, evaluate_marker as em
|
| 338 |
+
>>> import os
|
| 339 |
+
|
| 340 |
+
>>> print(im("sys_platform"))
|
| 341 |
+
Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in
|
| 342 |
+
sys_platform
|
| 343 |
+
^
|
| 344 |
+
|
| 345 |
+
>>> print(im("sys_platform=="))
|
| 346 |
+
Expected a marker variable or quoted string
|
| 347 |
+
sys_platform==
|
| 348 |
+
^
|
| 349 |
+
|
| 350 |
+
>>> print(im("sys_platform=='win32'"))
|
| 351 |
+
False
|
| 352 |
+
|
| 353 |
+
>>> print(im("sys=='x'"))
|
| 354 |
+
Expected a marker variable or quoted string
|
| 355 |
+
sys=='x'
|
| 356 |
+
^
|
| 357 |
+
|
| 358 |
+
>>> print(im("(extra)"))
|
| 359 |
+
Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in
|
| 360 |
+
(extra)
|
| 361 |
+
^
|
| 362 |
+
|
| 363 |
+
>>> print(im("(extra"))
|
| 364 |
+
Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in
|
| 365 |
+
(extra
|
| 366 |
+
^
|
| 367 |
+
|
| 368 |
+
>>> print(im("os.open('foo')=='y'"))
|
| 369 |
+
Expected a marker variable or quoted string
|
| 370 |
+
os.open('foo')=='y'
|
| 371 |
+
^
|
| 372 |
+
|
| 373 |
+
>>> print(im("'x'=='y' and os.open('foo')=='y'")) # no short-circuit!
|
| 374 |
+
Expected a marker variable or quoted string
|
| 375 |
+
'x'=='y' and os.open('foo')=='y'
|
| 376 |
+
^
|
| 377 |
+
|
| 378 |
+
>>> print(im("'x'=='x' or os.open('foo')=='y'")) # no short-circuit!
|
| 379 |
+
Expected a marker variable or quoted string
|
| 380 |
+
'x'=='x' or os.open('foo')=='y'
|
| 381 |
+
^
|
| 382 |
+
|
| 383 |
+
>>> print(im("r'x'=='x'"))
|
| 384 |
+
Expected a marker variable or quoted string
|
| 385 |
+
r'x'=='x'
|
| 386 |
+
^
|
| 387 |
+
|
| 388 |
+
>>> print(im("'''x'''=='x'"))
|
| 389 |
+
Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in
|
| 390 |
+
'''x'''=='x'
|
| 391 |
+
^
|
| 392 |
+
|
| 393 |
+
>>> print(im('"""x"""=="x"'))
|
| 394 |
+
Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in
|
| 395 |
+
"""x"""=="x"
|
| 396 |
+
^
|
| 397 |
+
|
| 398 |
+
>>> print(im(r"x\n=='x'"))
|
| 399 |
+
Expected a marker variable or quoted string
|
| 400 |
+
x\n=='x'
|
| 401 |
+
^
|
| 402 |
+
|
| 403 |
+
>>> print(im("os.open=='y'"))
|
| 404 |
+
Expected a marker variable or quoted string
|
| 405 |
+
os.open=='y'
|
| 406 |
+
^
|
| 407 |
+
|
| 408 |
+
>>> em("sys_platform=='win32'") == (sys.platform=='win32')
|
| 409 |
+
True
|
| 410 |
+
|
| 411 |
+
>>> em("python_version >= '2.7'")
|
| 412 |
+
True
|
| 413 |
+
|
| 414 |
+
>>> em("python_version > '2.6'")
|
| 415 |
+
True
|
| 416 |
+
|
| 417 |
+
>>> im("implementation_name=='cpython'")
|
| 418 |
+
False
|
| 419 |
+
|
| 420 |
+
>>> im("platform_python_implementation=='CPython'")
|
| 421 |
+
False
|
| 422 |
+
|
| 423 |
+
>>> im("implementation_version=='3.5.1'")
|
| 424 |
+
False
|
llava_video/lib/python3.10/site-packages/pkg_resources/tests/__init__.py
ADDED
|
File without changes
|
llava_video/lib/python3.10/site-packages/pkg_resources/tests/__pycache__/test_integration_zope_interface.cpython-310.pyc
ADDED
|
Binary file (1.56 kB). View file
|
|
|
llava_video/lib/python3.10/site-packages/pkg_resources/tests/__pycache__/test_markers.cpython-310.pyc
ADDED
|
Binary file (493 Bytes). View file
|
|
|
llava_video/lib/python3.10/site-packages/pkg_resources/tests/data/my-test-package-source/__pycache__/setup.cpython-310.pyc
ADDED
|
Binary file (283 Bytes). View file
|
|
|
llava_video/lib/python3.10/site-packages/pkg_resources/tests/data/my-test-package-source/setup.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import setuptools
|
| 2 |
+
|
| 3 |
+
setuptools.setup(
|
| 4 |
+
name="my-test-package",
|
| 5 |
+
version="1.0",
|
| 6 |
+
zip_safe=True,
|
| 7 |
+
)
|
llava_video/lib/python3.10/site-packages/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/SOURCES.txt
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
setup.cfg
|
| 2 |
+
setup.py
|
| 3 |
+
my_test_package.egg-info/PKG-INFO
|
| 4 |
+
my_test_package.egg-info/SOURCES.txt
|
| 5 |
+
my_test_package.egg-info/dependency_links.txt
|
| 6 |
+
my_test_package.egg-info/top_level.txt
|
| 7 |
+
my_test_package.egg-info/zip-safe
|
llava_video/lib/python3.10/site-packages/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/dependency_links.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
|
llava_video/lib/python3.10/site-packages/pkg_resources/tests/data/my-test-package_unpacked-egg/my_test_package-1.0-py3.7.egg/EGG-INFO/zip-safe
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
|
llava_video/lib/python3.10/site-packages/pkg_resources/tests/data/my-test-package_zipped-egg/my_test_package-1.0-py3.7.egg
ADDED
|
Binary file (843 Bytes). View file
|
|
|
llava_video/lib/python3.10/site-packages/pkg_resources/tests/test_integration_zope_interface.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import platform
|
| 2 |
+
from inspect import cleandoc
|
| 3 |
+
|
| 4 |
+
import jaraco.path
|
| 5 |
+
import pytest
|
| 6 |
+
|
| 7 |
+
pytestmark = pytest.mark.integration
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
# For the sake of simplicity this test uses fixtures defined in
|
| 11 |
+
# `setuptools.test.fixtures`,
|
| 12 |
+
# and it also exercise conditions considered deprecated...
|
| 13 |
+
# So if needed this test can be deleted.
|
| 14 |
+
@pytest.mark.skipif(
|
| 15 |
+
platform.system() != "Linux",
|
| 16 |
+
reason="only demonstrated to fail on Linux in #4399",
|
| 17 |
+
)
|
| 18 |
+
def test_interop_pkg_resources_iter_entry_points(tmp_path, venv):
|
| 19 |
+
"""
|
| 20 |
+
Importing pkg_resources.iter_entry_points on console_scripts
|
| 21 |
+
seems to cause trouble with zope-interface, when deprecates installation method
|
| 22 |
+
is used. See #4399.
|
| 23 |
+
"""
|
| 24 |
+
project = {
|
| 25 |
+
"pkg": {
|
| 26 |
+
"foo.py": cleandoc(
|
| 27 |
+
"""
|
| 28 |
+
from pkg_resources import iter_entry_points
|
| 29 |
+
|
| 30 |
+
def bar():
|
| 31 |
+
print("Print me if you can")
|
| 32 |
+
"""
|
| 33 |
+
),
|
| 34 |
+
"setup.py": cleandoc(
|
| 35 |
+
"""
|
| 36 |
+
from setuptools import setup, find_packages
|
| 37 |
+
|
| 38 |
+
setup(
|
| 39 |
+
install_requires=["zope-interface==6.4.post2"],
|
| 40 |
+
entry_points={
|
| 41 |
+
"console_scripts": [
|
| 42 |
+
"foo=foo:bar",
|
| 43 |
+
],
|
| 44 |
+
},
|
| 45 |
+
)
|
| 46 |
+
"""
|
| 47 |
+
),
|
| 48 |
+
}
|
| 49 |
+
}
|
| 50 |
+
jaraco.path.build(project, prefix=tmp_path)
|
| 51 |
+
cmd = ["pip", "install", "-e", ".", "--no-use-pep517"]
|
| 52 |
+
venv.run(cmd, cwd=tmp_path / "pkg") # Needs this version of pkg_resources installed
|
| 53 |
+
out = venv.run(["foo"])
|
| 54 |
+
assert "Print me if you can" in out
|
llava_video/lib/python3.10/site-packages/pkg_resources/tests/test_markers.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from unittest import mock
|
| 2 |
+
|
| 3 |
+
from pkg_resources import evaluate_marker
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
@mock.patch('platform.python_version', return_value='2.7.10')
|
| 7 |
+
def test_ordering(python_version_mock):
|
| 8 |
+
assert evaluate_marker("python_full_version > '2.7.3'") is True
|
llava_video/lib/python3.10/site-packages/pkg_resources/tests/test_pkg_resources.py
ADDED
|
@@ -0,0 +1,427 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import builtins
|
| 4 |
+
import datetime
|
| 5 |
+
import os
|
| 6 |
+
import plistlib
|
| 7 |
+
import stat
|
| 8 |
+
import subprocess
|
| 9 |
+
import sys
|
| 10 |
+
import tempfile
|
| 11 |
+
import zipfile
|
| 12 |
+
from unittest import mock
|
| 13 |
+
|
| 14 |
+
import pytest
|
| 15 |
+
|
| 16 |
+
import pkg_resources
|
| 17 |
+
from pkg_resources import DistInfoDistribution, Distribution, EggInfoDistribution
|
| 18 |
+
|
| 19 |
+
import distutils.command.install_egg_info
|
| 20 |
+
import distutils.dist
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class EggRemover(str):
|
| 24 |
+
def __call__(self):
|
| 25 |
+
if self in sys.path:
|
| 26 |
+
sys.path.remove(self)
|
| 27 |
+
if os.path.exists(self):
|
| 28 |
+
os.remove(self)
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class TestZipProvider:
|
| 32 |
+
finalizers: list[EggRemover] = []
|
| 33 |
+
|
| 34 |
+
ref_time = datetime.datetime(2013, 5, 12, 13, 25, 0)
|
| 35 |
+
"A reference time for a file modification"
|
| 36 |
+
|
| 37 |
+
@classmethod
|
| 38 |
+
def setup_class(cls):
|
| 39 |
+
"create a zip egg and add it to sys.path"
|
| 40 |
+
egg = tempfile.NamedTemporaryFile(suffix='.egg', delete=False)
|
| 41 |
+
zip_egg = zipfile.ZipFile(egg, 'w')
|
| 42 |
+
zip_info = zipfile.ZipInfo()
|
| 43 |
+
zip_info.filename = 'mod.py'
|
| 44 |
+
zip_info.date_time = cls.ref_time.timetuple()
|
| 45 |
+
zip_egg.writestr(zip_info, 'x = 3\n')
|
| 46 |
+
zip_info = zipfile.ZipInfo()
|
| 47 |
+
zip_info.filename = 'data.dat'
|
| 48 |
+
zip_info.date_time = cls.ref_time.timetuple()
|
| 49 |
+
zip_egg.writestr(zip_info, 'hello, world!')
|
| 50 |
+
zip_info = zipfile.ZipInfo()
|
| 51 |
+
zip_info.filename = 'subdir/mod2.py'
|
| 52 |
+
zip_info.date_time = cls.ref_time.timetuple()
|
| 53 |
+
zip_egg.writestr(zip_info, 'x = 6\n')
|
| 54 |
+
zip_info = zipfile.ZipInfo()
|
| 55 |
+
zip_info.filename = 'subdir/data2.dat'
|
| 56 |
+
zip_info.date_time = cls.ref_time.timetuple()
|
| 57 |
+
zip_egg.writestr(zip_info, 'goodbye, world!')
|
| 58 |
+
zip_egg.close()
|
| 59 |
+
egg.close()
|
| 60 |
+
|
| 61 |
+
sys.path.append(egg.name)
|
| 62 |
+
subdir = os.path.join(egg.name, 'subdir')
|
| 63 |
+
sys.path.append(subdir)
|
| 64 |
+
cls.finalizers.append(EggRemover(subdir))
|
| 65 |
+
cls.finalizers.append(EggRemover(egg.name))
|
| 66 |
+
|
| 67 |
+
@classmethod
|
| 68 |
+
def teardown_class(cls):
|
| 69 |
+
for finalizer in cls.finalizers:
|
| 70 |
+
finalizer()
|
| 71 |
+
|
| 72 |
+
def test_resource_listdir(self):
|
| 73 |
+
import mod # pyright: ignore[reportMissingImports] # Temporary package for test
|
| 74 |
+
|
| 75 |
+
zp = pkg_resources.ZipProvider(mod)
|
| 76 |
+
|
| 77 |
+
expected_root = ['data.dat', 'mod.py', 'subdir']
|
| 78 |
+
assert sorted(zp.resource_listdir('')) == expected_root
|
| 79 |
+
|
| 80 |
+
expected_subdir = ['data2.dat', 'mod2.py']
|
| 81 |
+
assert sorted(zp.resource_listdir('subdir')) == expected_subdir
|
| 82 |
+
assert sorted(zp.resource_listdir('subdir/')) == expected_subdir
|
| 83 |
+
|
| 84 |
+
assert zp.resource_listdir('nonexistent') == []
|
| 85 |
+
assert zp.resource_listdir('nonexistent/') == []
|
| 86 |
+
|
| 87 |
+
import mod2 # pyright: ignore[reportMissingImports] # Temporary package for test
|
| 88 |
+
|
| 89 |
+
zp2 = pkg_resources.ZipProvider(mod2)
|
| 90 |
+
|
| 91 |
+
assert sorted(zp2.resource_listdir('')) == expected_subdir
|
| 92 |
+
|
| 93 |
+
assert zp2.resource_listdir('subdir') == []
|
| 94 |
+
assert zp2.resource_listdir('subdir/') == []
|
| 95 |
+
|
| 96 |
+
def test_resource_filename_rewrites_on_change(self):
|
| 97 |
+
"""
|
| 98 |
+
If a previous call to get_resource_filename has saved the file, but
|
| 99 |
+
the file has been subsequently mutated with different file of the
|
| 100 |
+
same size and modification time, it should not be overwritten on a
|
| 101 |
+
subsequent call to get_resource_filename.
|
| 102 |
+
"""
|
| 103 |
+
import mod # pyright: ignore[reportMissingImports] # Temporary package for test
|
| 104 |
+
|
| 105 |
+
manager = pkg_resources.ResourceManager()
|
| 106 |
+
zp = pkg_resources.ZipProvider(mod)
|
| 107 |
+
filename = zp.get_resource_filename(manager, 'data.dat')
|
| 108 |
+
actual = datetime.datetime.fromtimestamp(os.stat(filename).st_mtime)
|
| 109 |
+
assert actual == self.ref_time
|
| 110 |
+
f = open(filename, 'w', encoding="utf-8")
|
| 111 |
+
f.write('hello, world?')
|
| 112 |
+
f.close()
|
| 113 |
+
ts = self.ref_time.timestamp()
|
| 114 |
+
os.utime(filename, (ts, ts))
|
| 115 |
+
filename = zp.get_resource_filename(manager, 'data.dat')
|
| 116 |
+
with open(filename, encoding="utf-8") as f:
|
| 117 |
+
assert f.read() == 'hello, world!'
|
| 118 |
+
manager.cleanup_resources()
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
class TestResourceManager:
|
| 122 |
+
def test_get_cache_path(self):
|
| 123 |
+
mgr = pkg_resources.ResourceManager()
|
| 124 |
+
path = mgr.get_cache_path('foo')
|
| 125 |
+
type_ = str(type(path))
|
| 126 |
+
message = "Unexpected type from get_cache_path: " + type_
|
| 127 |
+
assert isinstance(path, str), message
|
| 128 |
+
|
| 129 |
+
def test_get_cache_path_race(self, tmpdir):
|
| 130 |
+
# Patch to os.path.isdir to create a race condition
|
| 131 |
+
def patched_isdir(dirname, unpatched_isdir=pkg_resources.isdir):
|
| 132 |
+
patched_isdir.dirnames.append(dirname)
|
| 133 |
+
|
| 134 |
+
was_dir = unpatched_isdir(dirname)
|
| 135 |
+
if not was_dir:
|
| 136 |
+
os.makedirs(dirname)
|
| 137 |
+
return was_dir
|
| 138 |
+
|
| 139 |
+
patched_isdir.dirnames = []
|
| 140 |
+
|
| 141 |
+
# Get a cache path with a "race condition"
|
| 142 |
+
mgr = pkg_resources.ResourceManager()
|
| 143 |
+
mgr.set_extraction_path(str(tmpdir))
|
| 144 |
+
|
| 145 |
+
archive_name = os.sep.join(('foo', 'bar', 'baz'))
|
| 146 |
+
with mock.patch.object(pkg_resources, 'isdir', new=patched_isdir):
|
| 147 |
+
mgr.get_cache_path(archive_name)
|
| 148 |
+
|
| 149 |
+
# Because this test relies on the implementation details of this
|
| 150 |
+
# function, these assertions are a sentinel to ensure that the
|
| 151 |
+
# test suite will not fail silently if the implementation changes.
|
| 152 |
+
called_dirnames = patched_isdir.dirnames
|
| 153 |
+
assert len(called_dirnames) == 2
|
| 154 |
+
assert called_dirnames[0].split(os.sep)[-2:] == ['foo', 'bar']
|
| 155 |
+
assert called_dirnames[1].split(os.sep)[-1:] == ['foo']
|
| 156 |
+
|
| 157 |
+
"""
|
| 158 |
+
Tests to ensure that pkg_resources runs independently from setuptools.
|
| 159 |
+
"""
|
| 160 |
+
|
| 161 |
+
def test_setuptools_not_imported(self):
|
| 162 |
+
"""
|
| 163 |
+
In a separate Python environment, import pkg_resources and assert
|
| 164 |
+
that action doesn't cause setuptools to be imported.
|
| 165 |
+
"""
|
| 166 |
+
lines = (
|
| 167 |
+
'import pkg_resources',
|
| 168 |
+
'import sys',
|
| 169 |
+
('assert "setuptools" not in sys.modules, "setuptools was imported"'),
|
| 170 |
+
)
|
| 171 |
+
cmd = [sys.executable, '-c', '; '.join(lines)]
|
| 172 |
+
subprocess.check_call(cmd)
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
def make_test_distribution(metadata_path, metadata):
|
| 176 |
+
"""
|
| 177 |
+
Make a test Distribution object, and return it.
|
| 178 |
+
|
| 179 |
+
:param metadata_path: the path to the metadata file that should be
|
| 180 |
+
created. This should be inside a distribution directory that should
|
| 181 |
+
also be created. For example, an argument value might end with
|
| 182 |
+
"<project>.dist-info/METADATA".
|
| 183 |
+
:param metadata: the desired contents of the metadata file, as bytes.
|
| 184 |
+
"""
|
| 185 |
+
dist_dir = os.path.dirname(metadata_path)
|
| 186 |
+
os.mkdir(dist_dir)
|
| 187 |
+
with open(metadata_path, 'wb') as f:
|
| 188 |
+
f.write(metadata)
|
| 189 |
+
dists = list(pkg_resources.distributions_from_metadata(dist_dir))
|
| 190 |
+
(dist,) = dists
|
| 191 |
+
|
| 192 |
+
return dist
|
| 193 |
+
|
| 194 |
+
|
| 195 |
+
def test_get_metadata__bad_utf8(tmpdir):
|
| 196 |
+
"""
|
| 197 |
+
Test a metadata file with bytes that can't be decoded as utf-8.
|
| 198 |
+
"""
|
| 199 |
+
filename = 'METADATA'
|
| 200 |
+
# Convert the tmpdir LocalPath object to a string before joining.
|
| 201 |
+
metadata_path = os.path.join(str(tmpdir), 'foo.dist-info', filename)
|
| 202 |
+
# Encode a non-ascii string with the wrong encoding (not utf-8).
|
| 203 |
+
metadata = 'née'.encode('iso-8859-1')
|
| 204 |
+
dist = make_test_distribution(metadata_path, metadata=metadata)
|
| 205 |
+
|
| 206 |
+
with pytest.raises(UnicodeDecodeError) as excinfo:
|
| 207 |
+
dist.get_metadata(filename)
|
| 208 |
+
|
| 209 |
+
exc = excinfo.value
|
| 210 |
+
actual = str(exc)
|
| 211 |
+
expected = (
|
| 212 |
+
# The error message starts with "'utf-8' codec ..." However, the
|
| 213 |
+
# spelling of "utf-8" can vary (e.g. "utf8") so we don't include it
|
| 214 |
+
"codec can't decode byte 0xe9 in position 1: "
|
| 215 |
+
'invalid continuation byte in METADATA file at path: '
|
| 216 |
+
)
|
| 217 |
+
assert expected in actual, f'actual: {actual}'
|
| 218 |
+
assert actual.endswith(metadata_path), f'actual: {actual}'
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
def make_distribution_no_version(tmpdir, basename):
|
| 222 |
+
"""
|
| 223 |
+
Create a distribution directory with no file containing the version.
|
| 224 |
+
"""
|
| 225 |
+
dist_dir = tmpdir / basename
|
| 226 |
+
dist_dir.ensure_dir()
|
| 227 |
+
# Make the directory non-empty so distributions_from_metadata()
|
| 228 |
+
# will detect it and yield it.
|
| 229 |
+
dist_dir.join('temp.txt').ensure()
|
| 230 |
+
|
| 231 |
+
dists = list(pkg_resources.distributions_from_metadata(dist_dir))
|
| 232 |
+
assert len(dists) == 1
|
| 233 |
+
(dist,) = dists
|
| 234 |
+
|
| 235 |
+
return dist, dist_dir
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
@pytest.mark.parametrize(
|
| 239 |
+
("suffix", "expected_filename", "expected_dist_type"),
|
| 240 |
+
[
|
| 241 |
+
('egg-info', 'PKG-INFO', EggInfoDistribution),
|
| 242 |
+
('dist-info', 'METADATA', DistInfoDistribution),
|
| 243 |
+
],
|
| 244 |
+
)
|
| 245 |
+
@pytest.mark.xfail(
|
| 246 |
+
sys.version_info[:2] == (3, 12) and sys.version_info.releaselevel != 'final',
|
| 247 |
+
reason="https://github.com/python/cpython/issues/103632",
|
| 248 |
+
)
|
| 249 |
+
def test_distribution_version_missing(
|
| 250 |
+
tmpdir, suffix, expected_filename, expected_dist_type
|
| 251 |
+
):
|
| 252 |
+
"""
|
| 253 |
+
Test Distribution.version when the "Version" header is missing.
|
| 254 |
+
"""
|
| 255 |
+
basename = f'foo.{suffix}'
|
| 256 |
+
dist, dist_dir = make_distribution_no_version(tmpdir, basename)
|
| 257 |
+
|
| 258 |
+
expected_text = (
|
| 259 |
+
f"Missing 'Version:' header and/or {expected_filename} file at path: "
|
| 260 |
+
)
|
| 261 |
+
metadata_path = os.path.join(dist_dir, expected_filename)
|
| 262 |
+
|
| 263 |
+
# Now check the exception raised when the "version" attribute is accessed.
|
| 264 |
+
with pytest.raises(ValueError) as excinfo:
|
| 265 |
+
dist.version
|
| 266 |
+
|
| 267 |
+
err = str(excinfo.value)
|
| 268 |
+
# Include a string expression after the assert so the full strings
|
| 269 |
+
# will be visible for inspection on failure.
|
| 270 |
+
assert expected_text in err, str((expected_text, err))
|
| 271 |
+
|
| 272 |
+
# Also check the args passed to the ValueError.
|
| 273 |
+
msg, dist = excinfo.value.args
|
| 274 |
+
assert expected_text in msg
|
| 275 |
+
# Check that the message portion contains the path.
|
| 276 |
+
assert metadata_path in msg, str((metadata_path, msg))
|
| 277 |
+
assert type(dist) is expected_dist_type
|
| 278 |
+
|
| 279 |
+
|
| 280 |
+
@pytest.mark.xfail(
|
| 281 |
+
sys.version_info[:2] == (3, 12) and sys.version_info.releaselevel != 'final',
|
| 282 |
+
reason="https://github.com/python/cpython/issues/103632",
|
| 283 |
+
)
|
| 284 |
+
def test_distribution_version_missing_undetected_path():
|
| 285 |
+
"""
|
| 286 |
+
Test Distribution.version when the "Version" header is missing and
|
| 287 |
+
the path can't be detected.
|
| 288 |
+
"""
|
| 289 |
+
# Create a Distribution object with no metadata argument, which results
|
| 290 |
+
# in an empty metadata provider.
|
| 291 |
+
dist = Distribution('/foo')
|
| 292 |
+
with pytest.raises(ValueError) as excinfo:
|
| 293 |
+
dist.version
|
| 294 |
+
|
| 295 |
+
msg, dist = excinfo.value.args
|
| 296 |
+
expected = (
|
| 297 |
+
"Missing 'Version:' header and/or PKG-INFO file at path: [could not detect]"
|
| 298 |
+
)
|
| 299 |
+
assert msg == expected
|
| 300 |
+
|
| 301 |
+
|
| 302 |
+
@pytest.mark.parametrize('only', [False, True])
|
| 303 |
+
def test_dist_info_is_not_dir(tmp_path, only):
|
| 304 |
+
"""Test path containing a file with dist-info extension."""
|
| 305 |
+
dist_info = tmp_path / 'foobar.dist-info'
|
| 306 |
+
dist_info.touch()
|
| 307 |
+
assert not pkg_resources.dist_factory(str(tmp_path), str(dist_info), only)
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
def test_macos_vers_fallback(monkeypatch, tmp_path):
|
| 311 |
+
"""Regression test for pkg_resources._macos_vers"""
|
| 312 |
+
orig_open = builtins.open
|
| 313 |
+
|
| 314 |
+
# Pretend we need to use the plist file
|
| 315 |
+
monkeypatch.setattr('platform.mac_ver', mock.Mock(return_value=('', (), '')))
|
| 316 |
+
|
| 317 |
+
# Create fake content for the fake plist file
|
| 318 |
+
with open(tmp_path / 'fake.plist', 'wb') as fake_file:
|
| 319 |
+
plistlib.dump({"ProductVersion": "11.4"}, fake_file)
|
| 320 |
+
|
| 321 |
+
# Pretend the fake file exists
|
| 322 |
+
monkeypatch.setattr('os.path.exists', mock.Mock(return_value=True))
|
| 323 |
+
|
| 324 |
+
def fake_open(file, *args, **kwargs):
|
| 325 |
+
return orig_open(tmp_path / 'fake.plist', *args, **kwargs)
|
| 326 |
+
|
| 327 |
+
# Ensure that the _macos_vers works correctly
|
| 328 |
+
with mock.patch('builtins.open', mock.Mock(side_effect=fake_open)) as m:
|
| 329 |
+
pkg_resources._macos_vers.cache_clear()
|
| 330 |
+
assert pkg_resources._macos_vers() == ["11", "4"]
|
| 331 |
+
pkg_resources._macos_vers.cache_clear()
|
| 332 |
+
|
| 333 |
+
m.assert_called()
|
| 334 |
+
|
| 335 |
+
|
| 336 |
+
class TestDeepVersionLookupDistutils:
|
| 337 |
+
@pytest.fixture
|
| 338 |
+
def env(self, tmpdir):
|
| 339 |
+
"""
|
| 340 |
+
Create a package environment, similar to a virtualenv,
|
| 341 |
+
in which packages are installed.
|
| 342 |
+
"""
|
| 343 |
+
|
| 344 |
+
class Environment(str):
|
| 345 |
+
pass
|
| 346 |
+
|
| 347 |
+
env = Environment(tmpdir)
|
| 348 |
+
tmpdir.chmod(stat.S_IRWXU)
|
| 349 |
+
subs = 'home', 'lib', 'scripts', 'data', 'egg-base'
|
| 350 |
+
env.paths = dict((dirname, str(tmpdir / dirname)) for dirname in subs)
|
| 351 |
+
list(map(os.mkdir, env.paths.values()))
|
| 352 |
+
return env
|
| 353 |
+
|
| 354 |
+
def create_foo_pkg(self, env, version):
|
| 355 |
+
"""
|
| 356 |
+
Create a foo package installed (distutils-style) to env.paths['lib']
|
| 357 |
+
as version.
|
| 358 |
+
"""
|
| 359 |
+
ld = "This package has unicode metadata! ❄"
|
| 360 |
+
attrs = dict(name='foo', version=version, long_description=ld)
|
| 361 |
+
dist = distutils.dist.Distribution(attrs)
|
| 362 |
+
iei_cmd = distutils.command.install_egg_info.install_egg_info(dist)
|
| 363 |
+
iei_cmd.initialize_options()
|
| 364 |
+
iei_cmd.install_dir = env.paths['lib']
|
| 365 |
+
iei_cmd.finalize_options()
|
| 366 |
+
iei_cmd.run()
|
| 367 |
+
|
| 368 |
+
def test_version_resolved_from_egg_info(self, env):
|
| 369 |
+
version = '1.11.0.dev0+2329eae'
|
| 370 |
+
self.create_foo_pkg(env, version)
|
| 371 |
+
|
| 372 |
+
# this requirement parsing will raise a VersionConflict unless the
|
| 373 |
+
# .egg-info file is parsed (see #419 on BitBucket)
|
| 374 |
+
req = pkg_resources.Requirement.parse('foo>=1.9')
|
| 375 |
+
dist = pkg_resources.WorkingSet([env.paths['lib']]).find(req)
|
| 376 |
+
assert dist.version == version
|
| 377 |
+
|
| 378 |
+
@pytest.mark.parametrize(
|
| 379 |
+
("unnormalized", "normalized"),
|
| 380 |
+
[
|
| 381 |
+
('foo', 'foo'),
|
| 382 |
+
('foo/', 'foo'),
|
| 383 |
+
('foo/bar', 'foo/bar'),
|
| 384 |
+
('foo/bar/', 'foo/bar'),
|
| 385 |
+
],
|
| 386 |
+
)
|
| 387 |
+
def test_normalize_path_trailing_sep(self, unnormalized, normalized):
|
| 388 |
+
"""Ensure the trailing slash is cleaned for path comparison.
|
| 389 |
+
|
| 390 |
+
See pypa/setuptools#1519.
|
| 391 |
+
"""
|
| 392 |
+
result_from_unnormalized = pkg_resources.normalize_path(unnormalized)
|
| 393 |
+
result_from_normalized = pkg_resources.normalize_path(normalized)
|
| 394 |
+
assert result_from_unnormalized == result_from_normalized
|
| 395 |
+
|
| 396 |
+
@pytest.mark.skipif(
|
| 397 |
+
os.path.normcase('A') != os.path.normcase('a'),
|
| 398 |
+
reason='Testing case-insensitive filesystems.',
|
| 399 |
+
)
|
| 400 |
+
@pytest.mark.parametrize(
|
| 401 |
+
("unnormalized", "normalized"),
|
| 402 |
+
[
|
| 403 |
+
('MiXeD/CasE', 'mixed/case'),
|
| 404 |
+
],
|
| 405 |
+
)
|
| 406 |
+
def test_normalize_path_normcase(self, unnormalized, normalized):
|
| 407 |
+
"""Ensure mixed case is normalized on case-insensitive filesystems."""
|
| 408 |
+
result_from_unnormalized = pkg_resources.normalize_path(unnormalized)
|
| 409 |
+
result_from_normalized = pkg_resources.normalize_path(normalized)
|
| 410 |
+
assert result_from_unnormalized == result_from_normalized
|
| 411 |
+
|
| 412 |
+
@pytest.mark.skipif(
|
| 413 |
+
os.path.sep != '\\',
|
| 414 |
+
reason='Testing systems using backslashes as path separators.',
|
| 415 |
+
)
|
| 416 |
+
@pytest.mark.parametrize(
|
| 417 |
+
("unnormalized", "expected"),
|
| 418 |
+
[
|
| 419 |
+
('forward/slash', 'forward\\slash'),
|
| 420 |
+
('forward/slash/', 'forward\\slash'),
|
| 421 |
+
('backward\\slash\\', 'backward\\slash'),
|
| 422 |
+
],
|
| 423 |
+
)
|
| 424 |
+
def test_normalize_path_backslash_sep(self, unnormalized, expected):
|
| 425 |
+
"""Ensure path seps are cleaned on backslash path sep systems."""
|
| 426 |
+
result = pkg_resources.normalize_path(unnormalized)
|
| 427 |
+
assert result.endswith(expected)
|
llava_video/lib/python3.10/site-packages/pkg_resources/tests/test_resources.py
ADDED
|
@@ -0,0 +1,869 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import itertools
|
| 2 |
+
import os
|
| 3 |
+
import platform
|
| 4 |
+
import string
|
| 5 |
+
import sys
|
| 6 |
+
|
| 7 |
+
import pytest
|
| 8 |
+
from packaging.specifiers import SpecifierSet
|
| 9 |
+
|
| 10 |
+
import pkg_resources
|
| 11 |
+
from pkg_resources import (
|
| 12 |
+
Distribution,
|
| 13 |
+
EntryPoint,
|
| 14 |
+
Requirement,
|
| 15 |
+
VersionConflict,
|
| 16 |
+
WorkingSet,
|
| 17 |
+
parse_requirements,
|
| 18 |
+
parse_version,
|
| 19 |
+
safe_name,
|
| 20 |
+
safe_version,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
# from Python 3.6 docs. Available from itertools on Python 3.10
|
| 25 |
+
def pairwise(iterable):
|
| 26 |
+
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
|
| 27 |
+
a, b = itertools.tee(iterable)
|
| 28 |
+
next(b, None)
|
| 29 |
+
return zip(a, b)
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class Metadata(pkg_resources.EmptyProvider):
|
| 33 |
+
"""Mock object to return metadata as if from an on-disk distribution"""
|
| 34 |
+
|
| 35 |
+
def __init__(self, *pairs) -> None:
|
| 36 |
+
self.metadata = dict(pairs)
|
| 37 |
+
|
| 38 |
+
def has_metadata(self, name) -> bool:
|
| 39 |
+
return name in self.metadata
|
| 40 |
+
|
| 41 |
+
def get_metadata(self, name):
|
| 42 |
+
return self.metadata[name]
|
| 43 |
+
|
| 44 |
+
def get_metadata_lines(self, name):
|
| 45 |
+
return pkg_resources.yield_lines(self.get_metadata(name))
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
dist_from_fn = pkg_resources.Distribution.from_filename
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
class TestDistro:
|
| 52 |
+
def testCollection(self):
|
| 53 |
+
# empty path should produce no distributions
|
| 54 |
+
ad = pkg_resources.Environment([], platform=None, python=None)
|
| 55 |
+
assert list(ad) == []
|
| 56 |
+
assert ad['FooPkg'] == []
|
| 57 |
+
ad.add(dist_from_fn("FooPkg-1.3_1.egg"))
|
| 58 |
+
ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg"))
|
| 59 |
+
ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg"))
|
| 60 |
+
|
| 61 |
+
# Name is in there now
|
| 62 |
+
assert ad['FooPkg']
|
| 63 |
+
# But only 1 package
|
| 64 |
+
assert list(ad) == ['foopkg']
|
| 65 |
+
|
| 66 |
+
# Distributions sort by version
|
| 67 |
+
expected = ['1.4', '1.3-1', '1.2']
|
| 68 |
+
assert [dist.version for dist in ad['FooPkg']] == expected
|
| 69 |
+
|
| 70 |
+
# Removing a distribution leaves sequence alone
|
| 71 |
+
ad.remove(ad['FooPkg'][1])
|
| 72 |
+
assert [dist.version for dist in ad['FooPkg']] == ['1.4', '1.2']
|
| 73 |
+
|
| 74 |
+
# And inserting adds them in order
|
| 75 |
+
ad.add(dist_from_fn("FooPkg-1.9.egg"))
|
| 76 |
+
assert [dist.version for dist in ad['FooPkg']] == ['1.9', '1.4', '1.2']
|
| 77 |
+
|
| 78 |
+
ws = WorkingSet([])
|
| 79 |
+
foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg")
|
| 80 |
+
foo14 = dist_from_fn("FooPkg-1.4-py2.4-win32.egg")
|
| 81 |
+
(req,) = parse_requirements("FooPkg>=1.3")
|
| 82 |
+
|
| 83 |
+
# Nominal case: no distros on path, should yield all applicable
|
| 84 |
+
assert ad.best_match(req, ws).version == '1.9'
|
| 85 |
+
# If a matching distro is already installed, should return only that
|
| 86 |
+
ws.add(foo14)
|
| 87 |
+
assert ad.best_match(req, ws).version == '1.4'
|
| 88 |
+
|
| 89 |
+
# If the first matching distro is unsuitable, it's a version conflict
|
| 90 |
+
ws = WorkingSet([])
|
| 91 |
+
ws.add(foo12)
|
| 92 |
+
ws.add(foo14)
|
| 93 |
+
with pytest.raises(VersionConflict):
|
| 94 |
+
ad.best_match(req, ws)
|
| 95 |
+
|
| 96 |
+
# If more than one match on the path, the first one takes precedence
|
| 97 |
+
ws = WorkingSet([])
|
| 98 |
+
ws.add(foo14)
|
| 99 |
+
ws.add(foo12)
|
| 100 |
+
ws.add(foo14)
|
| 101 |
+
assert ad.best_match(req, ws).version == '1.4'
|
| 102 |
+
|
| 103 |
+
def checkFooPkg(self, d):
|
| 104 |
+
assert d.project_name == "FooPkg"
|
| 105 |
+
assert d.key == "foopkg"
|
| 106 |
+
assert d.version == "1.3.post1"
|
| 107 |
+
assert d.py_version == "2.4"
|
| 108 |
+
assert d.platform == "win32"
|
| 109 |
+
assert d.parsed_version == parse_version("1.3-1")
|
| 110 |
+
|
| 111 |
+
def testDistroBasics(self):
|
| 112 |
+
d = Distribution(
|
| 113 |
+
"/some/path",
|
| 114 |
+
project_name="FooPkg",
|
| 115 |
+
version="1.3-1",
|
| 116 |
+
py_version="2.4",
|
| 117 |
+
platform="win32",
|
| 118 |
+
)
|
| 119 |
+
self.checkFooPkg(d)
|
| 120 |
+
|
| 121 |
+
d = Distribution("/some/path")
|
| 122 |
+
assert d.py_version == f'{sys.version_info.major}.{sys.version_info.minor}'
|
| 123 |
+
assert d.platform is None
|
| 124 |
+
|
| 125 |
+
def testDistroParse(self):
|
| 126 |
+
d = dist_from_fn("FooPkg-1.3.post1-py2.4-win32.egg")
|
| 127 |
+
self.checkFooPkg(d)
|
| 128 |
+
d = dist_from_fn("FooPkg-1.3.post1-py2.4-win32.egg-info")
|
| 129 |
+
self.checkFooPkg(d)
|
| 130 |
+
|
| 131 |
+
def testDistroMetadata(self):
|
| 132 |
+
d = Distribution(
|
| 133 |
+
"/some/path",
|
| 134 |
+
project_name="FooPkg",
|
| 135 |
+
py_version="2.4",
|
| 136 |
+
platform="win32",
|
| 137 |
+
metadata=Metadata(('PKG-INFO', "Metadata-Version: 1.0\nVersion: 1.3-1\n")),
|
| 138 |
+
)
|
| 139 |
+
self.checkFooPkg(d)
|
| 140 |
+
|
| 141 |
+
def distRequires(self, txt):
|
| 142 |
+
return Distribution("/foo", metadata=Metadata(('depends.txt', txt)))
|
| 143 |
+
|
| 144 |
+
def checkRequires(self, dist, txt, extras=()):
|
| 145 |
+
assert list(dist.requires(extras)) == list(parse_requirements(txt))
|
| 146 |
+
|
| 147 |
+
def testDistroDependsSimple(self):
|
| 148 |
+
for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0":
|
| 149 |
+
self.checkRequires(self.distRequires(v), v)
|
| 150 |
+
|
| 151 |
+
needs_object_dir = pytest.mark.skipif(
|
| 152 |
+
not hasattr(object, '__dir__'),
|
| 153 |
+
reason='object.__dir__ necessary for self.__dir__ implementation',
|
| 154 |
+
)
|
| 155 |
+
|
| 156 |
+
def test_distribution_dir(self):
|
| 157 |
+
d = pkg_resources.Distribution()
|
| 158 |
+
dir(d)
|
| 159 |
+
|
| 160 |
+
@needs_object_dir
|
| 161 |
+
def test_distribution_dir_includes_provider_dir(self):
|
| 162 |
+
d = pkg_resources.Distribution()
|
| 163 |
+
before = d.__dir__()
|
| 164 |
+
assert 'test_attr' not in before
|
| 165 |
+
d._provider.test_attr = None
|
| 166 |
+
after = d.__dir__()
|
| 167 |
+
assert len(after) == len(before) + 1
|
| 168 |
+
assert 'test_attr' in after
|
| 169 |
+
|
| 170 |
+
@needs_object_dir
|
| 171 |
+
def test_distribution_dir_ignores_provider_dir_leading_underscore(self):
|
| 172 |
+
d = pkg_resources.Distribution()
|
| 173 |
+
before = d.__dir__()
|
| 174 |
+
assert '_test_attr' not in before
|
| 175 |
+
d._provider._test_attr = None
|
| 176 |
+
after = d.__dir__()
|
| 177 |
+
assert len(after) == len(before)
|
| 178 |
+
assert '_test_attr' not in after
|
| 179 |
+
|
| 180 |
+
def testResolve(self):
|
| 181 |
+
ad = pkg_resources.Environment([])
|
| 182 |
+
ws = WorkingSet([])
|
| 183 |
+
# Resolving no requirements -> nothing to install
|
| 184 |
+
assert list(ws.resolve([], ad)) == []
|
| 185 |
+
# Request something not in the collection -> DistributionNotFound
|
| 186 |
+
with pytest.raises(pkg_resources.DistributionNotFound):
|
| 187 |
+
ws.resolve(parse_requirements("Foo"), ad)
|
| 188 |
+
|
| 189 |
+
Foo = Distribution.from_filename(
|
| 190 |
+
"/foo_dir/Foo-1.2.egg",
|
| 191 |
+
metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0")),
|
| 192 |
+
)
|
| 193 |
+
ad.add(Foo)
|
| 194 |
+
ad.add(Distribution.from_filename("Foo-0.9.egg"))
|
| 195 |
+
|
| 196 |
+
# Request thing(s) that are available -> list to activate
|
| 197 |
+
for i in range(3):
|
| 198 |
+
targets = list(ws.resolve(parse_requirements("Foo"), ad))
|
| 199 |
+
assert targets == [Foo]
|
| 200 |
+
list(map(ws.add, targets))
|
| 201 |
+
with pytest.raises(VersionConflict):
|
| 202 |
+
ws.resolve(parse_requirements("Foo==0.9"), ad)
|
| 203 |
+
ws = WorkingSet([]) # reset
|
| 204 |
+
|
| 205 |
+
# Request an extra that causes an unresolved dependency for "Baz"
|
| 206 |
+
with pytest.raises(pkg_resources.DistributionNotFound):
|
| 207 |
+
ws.resolve(parse_requirements("Foo[bar]"), ad)
|
| 208 |
+
Baz = Distribution.from_filename(
|
| 209 |
+
"/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo"))
|
| 210 |
+
)
|
| 211 |
+
ad.add(Baz)
|
| 212 |
+
|
| 213 |
+
# Activation list now includes resolved dependency
|
| 214 |
+
assert list(ws.resolve(parse_requirements("Foo[bar]"), ad)) == [Foo, Baz]
|
| 215 |
+
# Requests for conflicting versions produce VersionConflict
|
| 216 |
+
with pytest.raises(VersionConflict) as vc:
|
| 217 |
+
ws.resolve(parse_requirements("Foo==1.2\nFoo!=1.2"), ad)
|
| 218 |
+
|
| 219 |
+
msg = 'Foo 0.9 is installed but Foo==1.2 is required'
|
| 220 |
+
assert vc.value.report() == msg
|
| 221 |
+
|
| 222 |
+
def test_environment_marker_evaluation_negative(self):
|
| 223 |
+
"""Environment markers are evaluated at resolution time."""
|
| 224 |
+
ad = pkg_resources.Environment([])
|
| 225 |
+
ws = WorkingSet([])
|
| 226 |
+
res = ws.resolve(parse_requirements("Foo;python_version<'2'"), ad)
|
| 227 |
+
assert list(res) == []
|
| 228 |
+
|
| 229 |
+
def test_environment_marker_evaluation_positive(self):
|
| 230 |
+
ad = pkg_resources.Environment([])
|
| 231 |
+
ws = WorkingSet([])
|
| 232 |
+
Foo = Distribution.from_filename("/foo_dir/Foo-1.2.dist-info")
|
| 233 |
+
ad.add(Foo)
|
| 234 |
+
res = ws.resolve(parse_requirements("Foo;python_version>='2'"), ad)
|
| 235 |
+
assert list(res) == [Foo]
|
| 236 |
+
|
| 237 |
+
def test_environment_marker_evaluation_called(self):
|
| 238 |
+
"""
|
| 239 |
+
If one package foo requires bar without any extras,
|
| 240 |
+
markers should pass for bar without extras.
|
| 241 |
+
"""
|
| 242 |
+
(parent_req,) = parse_requirements("foo")
|
| 243 |
+
(req,) = parse_requirements("bar;python_version>='2'")
|
| 244 |
+
req_extras = pkg_resources._ReqExtras({req: parent_req.extras})
|
| 245 |
+
assert req_extras.markers_pass(req)
|
| 246 |
+
|
| 247 |
+
(parent_req,) = parse_requirements("foo[]")
|
| 248 |
+
(req,) = parse_requirements("bar;python_version>='2'")
|
| 249 |
+
req_extras = pkg_resources._ReqExtras({req: parent_req.extras})
|
| 250 |
+
assert req_extras.markers_pass(req)
|
| 251 |
+
|
| 252 |
+
def test_marker_evaluation_with_extras(self):
|
| 253 |
+
"""Extras are also evaluated as markers at resolution time."""
|
| 254 |
+
ad = pkg_resources.Environment([])
|
| 255 |
+
ws = WorkingSet([])
|
| 256 |
+
Foo = Distribution.from_filename(
|
| 257 |
+
"/foo_dir/Foo-1.2.dist-info",
|
| 258 |
+
metadata=Metadata((
|
| 259 |
+
"METADATA",
|
| 260 |
+
"Provides-Extra: baz\nRequires-Dist: quux; extra=='baz'",
|
| 261 |
+
)),
|
| 262 |
+
)
|
| 263 |
+
ad.add(Foo)
|
| 264 |
+
assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo]
|
| 265 |
+
quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info")
|
| 266 |
+
ad.add(quux)
|
| 267 |
+
res = list(ws.resolve(parse_requirements("Foo[baz]"), ad))
|
| 268 |
+
assert res == [Foo, quux]
|
| 269 |
+
|
| 270 |
+
def test_marker_evaluation_with_extras_normlized(self):
|
| 271 |
+
"""Extras are also evaluated as markers at resolution time."""
|
| 272 |
+
ad = pkg_resources.Environment([])
|
| 273 |
+
ws = WorkingSet([])
|
| 274 |
+
Foo = Distribution.from_filename(
|
| 275 |
+
"/foo_dir/Foo-1.2.dist-info",
|
| 276 |
+
metadata=Metadata((
|
| 277 |
+
"METADATA",
|
| 278 |
+
"Provides-Extra: baz-lightyear\n"
|
| 279 |
+
"Requires-Dist: quux; extra=='baz-lightyear'",
|
| 280 |
+
)),
|
| 281 |
+
)
|
| 282 |
+
ad.add(Foo)
|
| 283 |
+
assert list(ws.resolve(parse_requirements("Foo"), ad)) == [Foo]
|
| 284 |
+
quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info")
|
| 285 |
+
ad.add(quux)
|
| 286 |
+
res = list(ws.resolve(parse_requirements("Foo[baz-lightyear]"), ad))
|
| 287 |
+
assert res == [Foo, quux]
|
| 288 |
+
|
| 289 |
+
def test_marker_evaluation_with_multiple_extras(self):
|
| 290 |
+
ad = pkg_resources.Environment([])
|
| 291 |
+
ws = WorkingSet([])
|
| 292 |
+
Foo = Distribution.from_filename(
|
| 293 |
+
"/foo_dir/Foo-1.2.dist-info",
|
| 294 |
+
metadata=Metadata((
|
| 295 |
+
"METADATA",
|
| 296 |
+
"Provides-Extra: baz\n"
|
| 297 |
+
"Requires-Dist: quux; extra=='baz'\n"
|
| 298 |
+
"Provides-Extra: bar\n"
|
| 299 |
+
"Requires-Dist: fred; extra=='bar'\n",
|
| 300 |
+
)),
|
| 301 |
+
)
|
| 302 |
+
ad.add(Foo)
|
| 303 |
+
quux = Distribution.from_filename("/foo_dir/quux-1.0.dist-info")
|
| 304 |
+
ad.add(quux)
|
| 305 |
+
fred = Distribution.from_filename("/foo_dir/fred-0.1.dist-info")
|
| 306 |
+
ad.add(fred)
|
| 307 |
+
res = list(ws.resolve(parse_requirements("Foo[baz,bar]"), ad))
|
| 308 |
+
assert sorted(res) == [fred, quux, Foo]
|
| 309 |
+
|
| 310 |
+
def test_marker_evaluation_with_extras_loop(self):
|
| 311 |
+
ad = pkg_resources.Environment([])
|
| 312 |
+
ws = WorkingSet([])
|
| 313 |
+
a = Distribution.from_filename(
|
| 314 |
+
"/foo_dir/a-0.2.dist-info",
|
| 315 |
+
metadata=Metadata(("METADATA", "Requires-Dist: c[a]")),
|
| 316 |
+
)
|
| 317 |
+
b = Distribution.from_filename(
|
| 318 |
+
"/foo_dir/b-0.3.dist-info",
|
| 319 |
+
metadata=Metadata(("METADATA", "Requires-Dist: c[b]")),
|
| 320 |
+
)
|
| 321 |
+
c = Distribution.from_filename(
|
| 322 |
+
"/foo_dir/c-1.0.dist-info",
|
| 323 |
+
metadata=Metadata((
|
| 324 |
+
"METADATA",
|
| 325 |
+
"Provides-Extra: a\n"
|
| 326 |
+
"Requires-Dist: b;extra=='a'\n"
|
| 327 |
+
"Provides-Extra: b\n"
|
| 328 |
+
"Requires-Dist: foo;extra=='b'",
|
| 329 |
+
)),
|
| 330 |
+
)
|
| 331 |
+
foo = Distribution.from_filename("/foo_dir/foo-0.1.dist-info")
|
| 332 |
+
for dist in (a, b, c, foo):
|
| 333 |
+
ad.add(dist)
|
| 334 |
+
res = list(ws.resolve(parse_requirements("a"), ad))
|
| 335 |
+
assert res == [a, c, b, foo]
|
| 336 |
+
|
| 337 |
+
@pytest.mark.xfail(
|
| 338 |
+
sys.version_info[:2] == (3, 12) and sys.version_info.releaselevel != 'final',
|
| 339 |
+
reason="https://github.com/python/cpython/issues/103632",
|
| 340 |
+
)
|
| 341 |
+
def testDistroDependsOptions(self):
|
| 342 |
+
d = self.distRequires(
|
| 343 |
+
"""
|
| 344 |
+
Twisted>=1.5
|
| 345 |
+
[docgen]
|
| 346 |
+
ZConfig>=2.0
|
| 347 |
+
docutils>=0.3
|
| 348 |
+
[fastcgi]
|
| 349 |
+
fcgiapp>=0.1"""
|
| 350 |
+
)
|
| 351 |
+
self.checkRequires(d, "Twisted>=1.5")
|
| 352 |
+
self.checkRequires(
|
| 353 |
+
d, "Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"]
|
| 354 |
+
)
|
| 355 |
+
self.checkRequires(d, "Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"])
|
| 356 |
+
self.checkRequires(
|
| 357 |
+
d,
|
| 358 |
+
"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(),
|
| 359 |
+
["docgen", "fastcgi"],
|
| 360 |
+
)
|
| 361 |
+
self.checkRequires(
|
| 362 |
+
d,
|
| 363 |
+
"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(),
|
| 364 |
+
["fastcgi", "docgen"],
|
| 365 |
+
)
|
| 366 |
+
with pytest.raises(pkg_resources.UnknownExtra):
|
| 367 |
+
d.requires(["foo"])
|
| 368 |
+
|
| 369 |
+
|
| 370 |
+
class TestWorkingSet:
|
| 371 |
+
def test_find_conflicting(self):
|
| 372 |
+
ws = WorkingSet([])
|
| 373 |
+
Foo = Distribution.from_filename("/foo_dir/Foo-1.2.egg")
|
| 374 |
+
ws.add(Foo)
|
| 375 |
+
|
| 376 |
+
# create a requirement that conflicts with Foo 1.2
|
| 377 |
+
req = next(parse_requirements("Foo<1.2"))
|
| 378 |
+
|
| 379 |
+
with pytest.raises(VersionConflict) as vc:
|
| 380 |
+
ws.find(req)
|
| 381 |
+
|
| 382 |
+
msg = 'Foo 1.2 is installed but Foo<1.2 is required'
|
| 383 |
+
assert vc.value.report() == msg
|
| 384 |
+
|
| 385 |
+
def test_resolve_conflicts_with_prior(self):
|
| 386 |
+
"""
|
| 387 |
+
A ContextualVersionConflict should be raised when a requirement
|
| 388 |
+
conflicts with a prior requirement for a different package.
|
| 389 |
+
"""
|
| 390 |
+
# Create installation where Foo depends on Baz 1.0 and Bar depends on
|
| 391 |
+
# Baz 2.0.
|
| 392 |
+
ws = WorkingSet([])
|
| 393 |
+
md = Metadata(('depends.txt', "Baz==1.0"))
|
| 394 |
+
Foo = Distribution.from_filename("/foo_dir/Foo-1.0.egg", metadata=md)
|
| 395 |
+
ws.add(Foo)
|
| 396 |
+
md = Metadata(('depends.txt', "Baz==2.0"))
|
| 397 |
+
Bar = Distribution.from_filename("/foo_dir/Bar-1.0.egg", metadata=md)
|
| 398 |
+
ws.add(Bar)
|
| 399 |
+
Baz = Distribution.from_filename("/foo_dir/Baz-1.0.egg")
|
| 400 |
+
ws.add(Baz)
|
| 401 |
+
Baz = Distribution.from_filename("/foo_dir/Baz-2.0.egg")
|
| 402 |
+
ws.add(Baz)
|
| 403 |
+
|
| 404 |
+
with pytest.raises(VersionConflict) as vc:
|
| 405 |
+
ws.resolve(parse_requirements("Foo\nBar\n"))
|
| 406 |
+
|
| 407 |
+
msg = "Baz 1.0 is installed but Baz==2.0 is required by "
|
| 408 |
+
msg += repr(set(['Bar']))
|
| 409 |
+
assert vc.value.report() == msg
|
| 410 |
+
|
| 411 |
+
|
| 412 |
+
class TestEntryPoints:
|
| 413 |
+
def assertfields(self, ep):
|
| 414 |
+
assert ep.name == "foo"
|
| 415 |
+
assert ep.module_name == "pkg_resources.tests.test_resources"
|
| 416 |
+
assert ep.attrs == ("TestEntryPoints",)
|
| 417 |
+
assert ep.extras == ("x",)
|
| 418 |
+
assert ep.load() is TestEntryPoints
|
| 419 |
+
expect = "foo = pkg_resources.tests.test_resources:TestEntryPoints [x]"
|
| 420 |
+
assert str(ep) == expect
|
| 421 |
+
|
| 422 |
+
def setup_method(self, method):
|
| 423 |
+
self.dist = Distribution.from_filename(
|
| 424 |
+
"FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt', '[x]'))
|
| 425 |
+
)
|
| 426 |
+
|
| 427 |
+
def testBasics(self):
|
| 428 |
+
ep = EntryPoint(
|
| 429 |
+
"foo",
|
| 430 |
+
"pkg_resources.tests.test_resources",
|
| 431 |
+
["TestEntryPoints"],
|
| 432 |
+
["x"],
|
| 433 |
+
self.dist,
|
| 434 |
+
)
|
| 435 |
+
self.assertfields(ep)
|
| 436 |
+
|
| 437 |
+
def testParse(self):
|
| 438 |
+
s = "foo = pkg_resources.tests.test_resources:TestEntryPoints [x]"
|
| 439 |
+
ep = EntryPoint.parse(s, self.dist)
|
| 440 |
+
self.assertfields(ep)
|
| 441 |
+
|
| 442 |
+
ep = EntryPoint.parse("bar baz= spammity[PING]")
|
| 443 |
+
assert ep.name == "bar baz"
|
| 444 |
+
assert ep.module_name == "spammity"
|
| 445 |
+
assert ep.attrs == ()
|
| 446 |
+
assert ep.extras == ("ping",)
|
| 447 |
+
|
| 448 |
+
ep = EntryPoint.parse(" fizzly = wocka:foo")
|
| 449 |
+
assert ep.name == "fizzly"
|
| 450 |
+
assert ep.module_name == "wocka"
|
| 451 |
+
assert ep.attrs == ("foo",)
|
| 452 |
+
assert ep.extras == ()
|
| 453 |
+
|
| 454 |
+
# plus in the name
|
| 455 |
+
spec = "html+mako = mako.ext.pygmentplugin:MakoHtmlLexer"
|
| 456 |
+
ep = EntryPoint.parse(spec)
|
| 457 |
+
assert ep.name == 'html+mako'
|
| 458 |
+
|
| 459 |
+
reject_specs = "foo", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2"
|
| 460 |
+
|
| 461 |
+
@pytest.mark.parametrize("reject_spec", reject_specs)
|
| 462 |
+
def test_reject_spec(self, reject_spec):
|
| 463 |
+
with pytest.raises(ValueError):
|
| 464 |
+
EntryPoint.parse(reject_spec)
|
| 465 |
+
|
| 466 |
+
def test_printable_name(self):
|
| 467 |
+
"""
|
| 468 |
+
Allow any printable character in the name.
|
| 469 |
+
"""
|
| 470 |
+
# Create a name with all printable characters; strip the whitespace.
|
| 471 |
+
name = string.printable.strip()
|
| 472 |
+
spec = "{name} = module:attr".format(**locals())
|
| 473 |
+
ep = EntryPoint.parse(spec)
|
| 474 |
+
assert ep.name == name
|
| 475 |
+
|
| 476 |
+
def checkSubMap(self, m):
|
| 477 |
+
assert len(m) == len(self.submap_expect)
|
| 478 |
+
for key, ep in self.submap_expect.items():
|
| 479 |
+
assert m.get(key).name == ep.name
|
| 480 |
+
assert m.get(key).module_name == ep.module_name
|
| 481 |
+
assert sorted(m.get(key).attrs) == sorted(ep.attrs)
|
| 482 |
+
assert sorted(m.get(key).extras) == sorted(ep.extras)
|
| 483 |
+
|
| 484 |
+
submap_expect = dict(
|
| 485 |
+
feature1=EntryPoint('feature1', 'somemodule', ['somefunction']),
|
| 486 |
+
feature2=EntryPoint(
|
| 487 |
+
'feature2', 'another.module', ['SomeClass'], ['extra1', 'extra2']
|
| 488 |
+
),
|
| 489 |
+
feature3=EntryPoint('feature3', 'this.module', extras=['something']),
|
| 490 |
+
)
|
| 491 |
+
submap_str = """
|
| 492 |
+
# define features for blah blah
|
| 493 |
+
feature1 = somemodule:somefunction
|
| 494 |
+
feature2 = another.module:SomeClass [extra1,extra2]
|
| 495 |
+
feature3 = this.module [something]
|
| 496 |
+
"""
|
| 497 |
+
|
| 498 |
+
def testParseList(self):
|
| 499 |
+
self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str))
|
| 500 |
+
with pytest.raises(ValueError):
|
| 501 |
+
EntryPoint.parse_group("x a", "foo=bar")
|
| 502 |
+
with pytest.raises(ValueError):
|
| 503 |
+
EntryPoint.parse_group("x", ["foo=baz", "foo=bar"])
|
| 504 |
+
|
| 505 |
+
def testParseMap(self):
|
| 506 |
+
m = EntryPoint.parse_map({'xyz': self.submap_str})
|
| 507 |
+
self.checkSubMap(m['xyz'])
|
| 508 |
+
assert list(m.keys()) == ['xyz']
|
| 509 |
+
m = EntryPoint.parse_map("[xyz]\n" + self.submap_str)
|
| 510 |
+
self.checkSubMap(m['xyz'])
|
| 511 |
+
assert list(m.keys()) == ['xyz']
|
| 512 |
+
with pytest.raises(ValueError):
|
| 513 |
+
EntryPoint.parse_map(["[xyz]", "[xyz]"])
|
| 514 |
+
with pytest.raises(ValueError):
|
| 515 |
+
EntryPoint.parse_map(self.submap_str)
|
| 516 |
+
|
| 517 |
+
def testDeprecationWarnings(self):
|
| 518 |
+
ep = EntryPoint(
|
| 519 |
+
"foo", "pkg_resources.tests.test_resources", ["TestEntryPoints"], ["x"]
|
| 520 |
+
)
|
| 521 |
+
with pytest.warns(pkg_resources.PkgResourcesDeprecationWarning):
|
| 522 |
+
ep.load(require=False)
|
| 523 |
+
|
| 524 |
+
|
| 525 |
+
class TestRequirements:
|
| 526 |
+
def testBasics(self):
|
| 527 |
+
r = Requirement.parse("Twisted>=1.2")
|
| 528 |
+
assert str(r) == "Twisted>=1.2"
|
| 529 |
+
assert repr(r) == "Requirement.parse('Twisted>=1.2')"
|
| 530 |
+
assert r == Requirement("Twisted>=1.2")
|
| 531 |
+
assert r == Requirement("twisTed>=1.2")
|
| 532 |
+
assert r != Requirement("Twisted>=2.0")
|
| 533 |
+
assert r != Requirement("Zope>=1.2")
|
| 534 |
+
assert r != Requirement("Zope>=3.0")
|
| 535 |
+
assert r != Requirement("Twisted[extras]>=1.2")
|
| 536 |
+
|
| 537 |
+
def testOrdering(self):
|
| 538 |
+
r1 = Requirement("Twisted==1.2c1,>=1.2")
|
| 539 |
+
r2 = Requirement("Twisted>=1.2,==1.2c1")
|
| 540 |
+
assert r1 == r2
|
| 541 |
+
assert str(r1) == str(r2)
|
| 542 |
+
assert str(r2) == "Twisted==1.2c1,>=1.2"
|
| 543 |
+
assert Requirement("Twisted") != Requirement(
|
| 544 |
+
"Twisted @ https://localhost/twisted.zip"
|
| 545 |
+
)
|
| 546 |
+
|
| 547 |
+
def testBasicContains(self):
|
| 548 |
+
r = Requirement("Twisted>=1.2")
|
| 549 |
+
foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg")
|
| 550 |
+
twist11 = Distribution.from_filename("Twisted-1.1.egg")
|
| 551 |
+
twist12 = Distribution.from_filename("Twisted-1.2.egg")
|
| 552 |
+
assert parse_version('1.2') in r
|
| 553 |
+
assert parse_version('1.1') not in r
|
| 554 |
+
assert '1.2' in r
|
| 555 |
+
assert '1.1' not in r
|
| 556 |
+
assert foo_dist not in r
|
| 557 |
+
assert twist11 not in r
|
| 558 |
+
assert twist12 in r
|
| 559 |
+
|
| 560 |
+
def testOptionsAndHashing(self):
|
| 561 |
+
r1 = Requirement.parse("Twisted[foo,bar]>=1.2")
|
| 562 |
+
r2 = Requirement.parse("Twisted[bar,FOO]>=1.2")
|
| 563 |
+
assert r1 == r2
|
| 564 |
+
assert set(r1.extras) == set(("foo", "bar"))
|
| 565 |
+
assert set(r2.extras) == set(("foo", "bar"))
|
| 566 |
+
assert hash(r1) == hash(r2)
|
| 567 |
+
assert hash(r1) == hash((
|
| 568 |
+
"twisted",
|
| 569 |
+
None,
|
| 570 |
+
SpecifierSet(">=1.2"),
|
| 571 |
+
frozenset(["foo", "bar"]),
|
| 572 |
+
None,
|
| 573 |
+
))
|
| 574 |
+
assert hash(
|
| 575 |
+
Requirement.parse("Twisted @ https://localhost/twisted.zip")
|
| 576 |
+
) == hash((
|
| 577 |
+
"twisted",
|
| 578 |
+
"https://localhost/twisted.zip",
|
| 579 |
+
SpecifierSet(),
|
| 580 |
+
frozenset(),
|
| 581 |
+
None,
|
| 582 |
+
))
|
| 583 |
+
|
| 584 |
+
def testVersionEquality(self):
|
| 585 |
+
r1 = Requirement.parse("foo==0.3a2")
|
| 586 |
+
r2 = Requirement.parse("foo!=0.3a4")
|
| 587 |
+
d = Distribution.from_filename
|
| 588 |
+
|
| 589 |
+
assert d("foo-0.3a4.egg") not in r1
|
| 590 |
+
assert d("foo-0.3a1.egg") not in r1
|
| 591 |
+
assert d("foo-0.3a4.egg") not in r2
|
| 592 |
+
|
| 593 |
+
assert d("foo-0.3a2.egg") in r1
|
| 594 |
+
assert d("foo-0.3a2.egg") in r2
|
| 595 |
+
assert d("foo-0.3a3.egg") in r2
|
| 596 |
+
assert d("foo-0.3a5.egg") in r2
|
| 597 |
+
|
| 598 |
+
def testSetuptoolsProjectName(self):
|
| 599 |
+
"""
|
| 600 |
+
The setuptools project should implement the setuptools package.
|
| 601 |
+
"""
|
| 602 |
+
|
| 603 |
+
assert Requirement.parse('setuptools').project_name == 'setuptools'
|
| 604 |
+
# setuptools 0.7 and higher means setuptools.
|
| 605 |
+
assert Requirement.parse('setuptools == 0.7').project_name == 'setuptools'
|
| 606 |
+
assert Requirement.parse('setuptools == 0.7a1').project_name == 'setuptools'
|
| 607 |
+
assert Requirement.parse('setuptools >= 0.7').project_name == 'setuptools'
|
| 608 |
+
|
| 609 |
+
|
| 610 |
+
class TestParsing:
|
| 611 |
+
def testEmptyParse(self):
|
| 612 |
+
assert list(parse_requirements('')) == []
|
| 613 |
+
|
| 614 |
+
def testYielding(self):
|
| 615 |
+
for inp, out in [
|
| 616 |
+
([], []),
|
| 617 |
+
('x', ['x']),
|
| 618 |
+
([[]], []),
|
| 619 |
+
(' x\n y', ['x', 'y']),
|
| 620 |
+
(['x\n\n', 'y'], ['x', 'y']),
|
| 621 |
+
]:
|
| 622 |
+
assert list(pkg_resources.yield_lines(inp)) == out
|
| 623 |
+
|
| 624 |
+
def testSplitting(self):
|
| 625 |
+
sample = """
|
| 626 |
+
x
|
| 627 |
+
[Y]
|
| 628 |
+
z
|
| 629 |
+
|
| 630 |
+
a
|
| 631 |
+
[b ]
|
| 632 |
+
# foo
|
| 633 |
+
c
|
| 634 |
+
[ d]
|
| 635 |
+
[q]
|
| 636 |
+
v
|
| 637 |
+
"""
|
| 638 |
+
assert list(pkg_resources.split_sections(sample)) == [
|
| 639 |
+
(None, ["x"]),
|
| 640 |
+
("Y", ["z", "a"]),
|
| 641 |
+
("b", ["c"]),
|
| 642 |
+
("d", []),
|
| 643 |
+
("q", ["v"]),
|
| 644 |
+
]
|
| 645 |
+
with pytest.raises(ValueError):
|
| 646 |
+
list(pkg_resources.split_sections("[foo"))
|
| 647 |
+
|
| 648 |
+
def testSafeName(self):
|
| 649 |
+
assert safe_name("adns-python") == "adns-python"
|
| 650 |
+
assert safe_name("WSGI Utils") == "WSGI-Utils"
|
| 651 |
+
assert safe_name("WSGI Utils") == "WSGI-Utils"
|
| 652 |
+
assert safe_name("Money$$$Maker") == "Money-Maker"
|
| 653 |
+
assert safe_name("peak.web") != "peak-web"
|
| 654 |
+
|
| 655 |
+
def testSafeVersion(self):
|
| 656 |
+
assert safe_version("1.2-1") == "1.2.post1"
|
| 657 |
+
assert safe_version("1.2 alpha") == "1.2.alpha"
|
| 658 |
+
assert safe_version("2.3.4 20050521") == "2.3.4.20050521"
|
| 659 |
+
assert safe_version("Money$$$Maker") == "Money-Maker"
|
| 660 |
+
assert safe_version("peak.web") == "peak.web"
|
| 661 |
+
|
| 662 |
+
def testSimpleRequirements(self):
|
| 663 |
+
assert list(parse_requirements('Twis-Ted>=1.2-1')) == [
|
| 664 |
+
Requirement('Twis-Ted>=1.2-1')
|
| 665 |
+
]
|
| 666 |
+
assert list(parse_requirements('Twisted >=1.2, \\ # more\n<2.0')) == [
|
| 667 |
+
Requirement('Twisted>=1.2,<2.0')
|
| 668 |
+
]
|
| 669 |
+
assert Requirement.parse("FooBar==1.99a3") == Requirement("FooBar==1.99a3")
|
| 670 |
+
with pytest.raises(ValueError):
|
| 671 |
+
Requirement.parse(">=2.3")
|
| 672 |
+
with pytest.raises(ValueError):
|
| 673 |
+
Requirement.parse("x\\")
|
| 674 |
+
with pytest.raises(ValueError):
|
| 675 |
+
Requirement.parse("x==2 q")
|
| 676 |
+
with pytest.raises(ValueError):
|
| 677 |
+
Requirement.parse("X==1\nY==2")
|
| 678 |
+
with pytest.raises(ValueError):
|
| 679 |
+
Requirement.parse("#")
|
| 680 |
+
|
| 681 |
+
def test_requirements_with_markers(self):
|
| 682 |
+
assert Requirement.parse("foobar;os_name=='a'") == Requirement.parse(
|
| 683 |
+
"foobar;os_name=='a'"
|
| 684 |
+
)
|
| 685 |
+
assert Requirement.parse(
|
| 686 |
+
"name==1.1;python_version=='2.7'"
|
| 687 |
+
) != Requirement.parse("name==1.1;python_version=='3.6'")
|
| 688 |
+
assert Requirement.parse(
|
| 689 |
+
"name==1.0;python_version=='2.7'"
|
| 690 |
+
) != Requirement.parse("name==1.2;python_version=='2.7'")
|
| 691 |
+
assert Requirement.parse(
|
| 692 |
+
"name[foo]==1.0;python_version=='3.6'"
|
| 693 |
+
) != Requirement.parse("name[foo,bar]==1.0;python_version=='3.6'")
|
| 694 |
+
|
| 695 |
+
def test_local_version(self):
|
| 696 |
+
parse_requirements('foo==1.0+org1')
|
| 697 |
+
|
| 698 |
+
def test_spaces_between_multiple_versions(self):
|
| 699 |
+
parse_requirements('foo>=1.0, <3')
|
| 700 |
+
parse_requirements('foo >= 1.0, < 3')
|
| 701 |
+
|
| 702 |
+
@pytest.mark.parametrize(
|
| 703 |
+
("lower", "upper"),
|
| 704 |
+
[
|
| 705 |
+
('1.2-rc1', '1.2rc1'),
|
| 706 |
+
('0.4', '0.4.0'),
|
| 707 |
+
('0.4.0.0', '0.4.0'),
|
| 708 |
+
('0.4.0-0', '0.4-0'),
|
| 709 |
+
('0post1', '0.0post1'),
|
| 710 |
+
('0pre1', '0.0c1'),
|
| 711 |
+
('0.0.0preview1', '0c1'),
|
| 712 |
+
('0.0c1', '0-rc1'),
|
| 713 |
+
('1.2a1', '1.2.a.1'),
|
| 714 |
+
('1.2.a', '1.2a'),
|
| 715 |
+
],
|
| 716 |
+
)
|
| 717 |
+
def testVersionEquality(self, lower, upper):
|
| 718 |
+
assert parse_version(lower) == parse_version(upper)
|
| 719 |
+
|
| 720 |
+
torture = """
|
| 721 |
+
0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1
|
| 722 |
+
0.79.9999+0.80.0pre2-3 0.79.9999+0.80.0pre2-2
|
| 723 |
+
0.77.2-1 0.77.1-1 0.77.0-1
|
| 724 |
+
"""
|
| 725 |
+
|
| 726 |
+
@pytest.mark.parametrize(
|
| 727 |
+
("lower", "upper"),
|
| 728 |
+
[
|
| 729 |
+
('2.1', '2.1.1'),
|
| 730 |
+
('2a1', '2b0'),
|
| 731 |
+
('2a1', '2.1'),
|
| 732 |
+
('2.3a1', '2.3'),
|
| 733 |
+
('2.1-1', '2.1-2'),
|
| 734 |
+
('2.1-1', '2.1.1'),
|
| 735 |
+
('2.1', '2.1post4'),
|
| 736 |
+
('2.1a0-20040501', '2.1'),
|
| 737 |
+
('1.1', '02.1'),
|
| 738 |
+
('3.2', '3.2.post0'),
|
| 739 |
+
('3.2post1', '3.2post2'),
|
| 740 |
+
('0.4', '4.0'),
|
| 741 |
+
('0.0.4', '0.4.0'),
|
| 742 |
+
('0post1', '0.4post1'),
|
| 743 |
+
('2.1.0-rc1', '2.1.0'),
|
| 744 |
+
('2.1dev', '2.1a0'),
|
| 745 |
+
]
|
| 746 |
+
+ list(pairwise(reversed(torture.split()))),
|
| 747 |
+
)
|
| 748 |
+
def testVersionOrdering(self, lower, upper):
|
| 749 |
+
assert parse_version(lower) < parse_version(upper)
|
| 750 |
+
|
| 751 |
+
def testVersionHashable(self):
|
| 752 |
+
"""
|
| 753 |
+
Ensure that our versions stay hashable even though we've subclassed
|
| 754 |
+
them and added some shim code to them.
|
| 755 |
+
"""
|
| 756 |
+
assert hash(parse_version("1.0")) == hash(parse_version("1.0"))
|
| 757 |
+
|
| 758 |
+
|
| 759 |
+
class TestNamespaces:
|
| 760 |
+
ns_str = "__import__('pkg_resources').declare_namespace(__name__)\n"
|
| 761 |
+
|
| 762 |
+
@pytest.fixture
|
| 763 |
+
def symlinked_tmpdir(self, tmpdir):
|
| 764 |
+
"""
|
| 765 |
+
Where available, return the tempdir as a symlink,
|
| 766 |
+
which as revealed in #231 is more fragile than
|
| 767 |
+
a natural tempdir.
|
| 768 |
+
"""
|
| 769 |
+
if not hasattr(os, 'symlink'):
|
| 770 |
+
yield str(tmpdir)
|
| 771 |
+
return
|
| 772 |
+
|
| 773 |
+
link_name = str(tmpdir) + '-linked'
|
| 774 |
+
os.symlink(str(tmpdir), link_name)
|
| 775 |
+
try:
|
| 776 |
+
yield type(tmpdir)(link_name)
|
| 777 |
+
finally:
|
| 778 |
+
os.unlink(link_name)
|
| 779 |
+
|
| 780 |
+
@pytest.fixture(autouse=True)
|
| 781 |
+
def patched_path(self, tmpdir):
|
| 782 |
+
"""
|
| 783 |
+
Patch sys.path to include the 'site-pkgs' dir. Also
|
| 784 |
+
restore pkg_resources._namespace_packages to its
|
| 785 |
+
former state.
|
| 786 |
+
"""
|
| 787 |
+
saved_ns_pkgs = pkg_resources._namespace_packages.copy()
|
| 788 |
+
saved_sys_path = sys.path[:]
|
| 789 |
+
site_pkgs = tmpdir.mkdir('site-pkgs')
|
| 790 |
+
sys.path.append(str(site_pkgs))
|
| 791 |
+
try:
|
| 792 |
+
yield
|
| 793 |
+
finally:
|
| 794 |
+
pkg_resources._namespace_packages = saved_ns_pkgs
|
| 795 |
+
sys.path = saved_sys_path
|
| 796 |
+
|
| 797 |
+
issue591 = pytest.mark.xfail(platform.system() == 'Windows', reason="#591")
|
| 798 |
+
|
| 799 |
+
@issue591
|
| 800 |
+
def test_two_levels_deep(self, symlinked_tmpdir):
|
| 801 |
+
"""
|
| 802 |
+
Test nested namespace packages
|
| 803 |
+
Create namespace packages in the following tree :
|
| 804 |
+
site-packages-1/pkg1/pkg2
|
| 805 |
+
site-packages-2/pkg1/pkg2
|
| 806 |
+
Check both are in the _namespace_packages dict and that their __path__
|
| 807 |
+
is correct
|
| 808 |
+
"""
|
| 809 |
+
real_tmpdir = symlinked_tmpdir.realpath()
|
| 810 |
+
tmpdir = symlinked_tmpdir
|
| 811 |
+
sys.path.append(str(tmpdir / 'site-pkgs2'))
|
| 812 |
+
site_dirs = tmpdir / 'site-pkgs', tmpdir / 'site-pkgs2'
|
| 813 |
+
for site in site_dirs:
|
| 814 |
+
pkg1 = site / 'pkg1'
|
| 815 |
+
pkg2 = pkg1 / 'pkg2'
|
| 816 |
+
pkg2.ensure_dir()
|
| 817 |
+
(pkg1 / '__init__.py').write_text(self.ns_str, encoding='utf-8')
|
| 818 |
+
(pkg2 / '__init__.py').write_text(self.ns_str, encoding='utf-8')
|
| 819 |
+
with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"):
|
| 820 |
+
import pkg1 # pyright: ignore[reportMissingImports] # Temporary package for test
|
| 821 |
+
assert "pkg1" in pkg_resources._namespace_packages
|
| 822 |
+
# attempt to import pkg2 from site-pkgs2
|
| 823 |
+
with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"):
|
| 824 |
+
import pkg1.pkg2 # pyright: ignore[reportMissingImports] # Temporary package for test
|
| 825 |
+
# check the _namespace_packages dict
|
| 826 |
+
assert "pkg1.pkg2" in pkg_resources._namespace_packages
|
| 827 |
+
assert pkg_resources._namespace_packages["pkg1"] == ["pkg1.pkg2"]
|
| 828 |
+
# check the __path__ attribute contains both paths
|
| 829 |
+
expected = [
|
| 830 |
+
str(real_tmpdir / "site-pkgs" / "pkg1" / "pkg2"),
|
| 831 |
+
str(real_tmpdir / "site-pkgs2" / "pkg1" / "pkg2"),
|
| 832 |
+
]
|
| 833 |
+
assert pkg1.pkg2.__path__ == expected
|
| 834 |
+
|
| 835 |
+
@issue591
|
| 836 |
+
def test_path_order(self, symlinked_tmpdir):
|
| 837 |
+
"""
|
| 838 |
+
Test that if multiple versions of the same namespace package subpackage
|
| 839 |
+
are on different sys.path entries, that only the one earliest on
|
| 840 |
+
sys.path is imported, and that the namespace package's __path__ is in
|
| 841 |
+
the correct order.
|
| 842 |
+
|
| 843 |
+
Regression test for https://github.com/pypa/setuptools/issues/207
|
| 844 |
+
"""
|
| 845 |
+
|
| 846 |
+
tmpdir = symlinked_tmpdir
|
| 847 |
+
site_dirs = (
|
| 848 |
+
tmpdir / "site-pkgs",
|
| 849 |
+
tmpdir / "site-pkgs2",
|
| 850 |
+
tmpdir / "site-pkgs3",
|
| 851 |
+
)
|
| 852 |
+
|
| 853 |
+
vers_str = "__version__ = %r"
|
| 854 |
+
|
| 855 |
+
for number, site in enumerate(site_dirs, 1):
|
| 856 |
+
if number > 1:
|
| 857 |
+
sys.path.append(str(site))
|
| 858 |
+
nspkg = site / 'nspkg'
|
| 859 |
+
subpkg = nspkg / 'subpkg'
|
| 860 |
+
subpkg.ensure_dir()
|
| 861 |
+
(nspkg / '__init__.py').write_text(self.ns_str, encoding='utf-8')
|
| 862 |
+
(subpkg / '__init__.py').write_text(vers_str % number, encoding='utf-8')
|
| 863 |
+
|
| 864 |
+
with pytest.warns(DeprecationWarning, match="pkg_resources.declare_namespace"):
|
| 865 |
+
import nspkg # pyright: ignore[reportMissingImports] # Temporary package for test
|
| 866 |
+
import nspkg.subpkg # pyright: ignore[reportMissingImports] # Temporary package for test
|
| 867 |
+
expected = [str(site.realpath() / 'nspkg') for site in site_dirs]
|
| 868 |
+
assert nspkg.__path__ == expected
|
| 869 |
+
assert nspkg.subpkg.__version__ == 1
|
llava_video/lib/python3.10/site-packages/pybind11/__init__.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
|
| 5 |
+
if sys.version_info < (3, 7): # noqa: UP036
|
| 6 |
+
msg = "pybind11 does not support Python < 3.7. v2.12 was the last release supporting Python 3.6."
|
| 7 |
+
raise ImportError(msg)
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
from ._version import __version__, version_info
|
| 11 |
+
from .commands import get_cmake_dir, get_include, get_pkgconfig_dir
|
| 12 |
+
|
| 13 |
+
__all__ = (
|
| 14 |
+
"version_info",
|
| 15 |
+
"__version__",
|
| 16 |
+
"get_include",
|
| 17 |
+
"get_cmake_dir",
|
| 18 |
+
"get_pkgconfig_dir",
|
| 19 |
+
)
|
llava_video/lib/python3.10/site-packages/pybind11/__main__.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# pylint: disable=missing-function-docstring
|
| 2 |
+
from __future__ import annotations
|
| 3 |
+
|
| 4 |
+
import argparse
|
| 5 |
+
import re
|
| 6 |
+
import sys
|
| 7 |
+
import sysconfig
|
| 8 |
+
|
| 9 |
+
from ._version import __version__
|
| 10 |
+
from .commands import get_cmake_dir, get_include, get_pkgconfig_dir
|
| 11 |
+
|
| 12 |
+
# This is the conditional used for os.path being posixpath
|
| 13 |
+
if "posix" in sys.builtin_module_names:
|
| 14 |
+
from shlex import quote
|
| 15 |
+
elif "nt" in sys.builtin_module_names:
|
| 16 |
+
# See https://github.com/mesonbuild/meson/blob/db22551ed9d2dd7889abea01cc1c7bba02bf1c75/mesonbuild/utils/universal.py#L1092-L1121
|
| 17 |
+
# and the original documents:
|
| 18 |
+
# https://docs.microsoft.com/en-us/cpp/c-language/parsing-c-command-line-arguments and
|
| 19 |
+
# https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/
|
| 20 |
+
UNSAFE = re.compile("[ \t\n\r]")
|
| 21 |
+
|
| 22 |
+
def quote(s: str) -> str:
|
| 23 |
+
if s and not UNSAFE.search(s):
|
| 24 |
+
return s
|
| 25 |
+
|
| 26 |
+
# Paths cannot contain a '"' on Windows, so we don't need to worry
|
| 27 |
+
# about nuanced counting here.
|
| 28 |
+
return f'"{s}\\"' if s.endswith("\\") else f'"{s}"'
|
| 29 |
+
else:
|
| 30 |
+
|
| 31 |
+
def quote(s: str) -> str:
|
| 32 |
+
return s
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def print_includes() -> None:
|
| 36 |
+
dirs = [
|
| 37 |
+
sysconfig.get_path("include"),
|
| 38 |
+
sysconfig.get_path("platinclude"),
|
| 39 |
+
get_include(),
|
| 40 |
+
]
|
| 41 |
+
|
| 42 |
+
# Make unique but preserve order
|
| 43 |
+
unique_dirs = []
|
| 44 |
+
for d in dirs:
|
| 45 |
+
if d and d not in unique_dirs:
|
| 46 |
+
unique_dirs.append(d)
|
| 47 |
+
|
| 48 |
+
print(" ".join(quote(f"-I{d}") for d in unique_dirs))
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def main() -> None:
|
| 52 |
+
parser = argparse.ArgumentParser()
|
| 53 |
+
parser.add_argument(
|
| 54 |
+
"--version",
|
| 55 |
+
action="version",
|
| 56 |
+
version=__version__,
|
| 57 |
+
help="Print the version and exit.",
|
| 58 |
+
)
|
| 59 |
+
parser.add_argument(
|
| 60 |
+
"--includes",
|
| 61 |
+
action="store_true",
|
| 62 |
+
help="Include flags for both pybind11 and Python headers.",
|
| 63 |
+
)
|
| 64 |
+
parser.add_argument(
|
| 65 |
+
"--cmakedir",
|
| 66 |
+
action="store_true",
|
| 67 |
+
help="Print the CMake module directory, ideal for setting -Dpybind11_ROOT in CMake.",
|
| 68 |
+
)
|
| 69 |
+
parser.add_argument(
|
| 70 |
+
"--pkgconfigdir",
|
| 71 |
+
action="store_true",
|
| 72 |
+
help="Print the pkgconfig directory, ideal for setting $PKG_CONFIG_PATH.",
|
| 73 |
+
)
|
| 74 |
+
args = parser.parse_args()
|
| 75 |
+
if not sys.argv[1:]:
|
| 76 |
+
parser.print_help()
|
| 77 |
+
if args.includes:
|
| 78 |
+
print_includes()
|
| 79 |
+
if args.cmakedir:
|
| 80 |
+
print(quote(get_cmake_dir()))
|
| 81 |
+
if args.pkgconfigdir:
|
| 82 |
+
print(quote(get_pkgconfig_dir()))
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
if __name__ == "__main__":
|
| 86 |
+
main()
|
llava_video/lib/python3.10/site-packages/pybind11/commands.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
|
| 5 |
+
DIR = os.path.abspath(os.path.dirname(__file__))
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def get_include(user: bool = False) -> str: # noqa: ARG001
|
| 9 |
+
"""
|
| 10 |
+
Return the path to the pybind11 include directory. The historical "user"
|
| 11 |
+
argument is unused, and may be removed.
|
| 12 |
+
"""
|
| 13 |
+
installed_path = os.path.join(DIR, "include")
|
| 14 |
+
source_path = os.path.join(os.path.dirname(DIR), "include")
|
| 15 |
+
return installed_path if os.path.exists(installed_path) else source_path
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def get_cmake_dir() -> str:
|
| 19 |
+
"""
|
| 20 |
+
Return the path to the pybind11 CMake module directory.
|
| 21 |
+
"""
|
| 22 |
+
cmake_installed_path = os.path.join(DIR, "share", "cmake", "pybind11")
|
| 23 |
+
if os.path.exists(cmake_installed_path):
|
| 24 |
+
return cmake_installed_path
|
| 25 |
+
|
| 26 |
+
msg = "pybind11 not installed, installation required to access the CMake files"
|
| 27 |
+
raise ImportError(msg)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def get_pkgconfig_dir() -> str:
|
| 31 |
+
"""
|
| 32 |
+
Return the path to the pybind11 pkgconfig directory.
|
| 33 |
+
"""
|
| 34 |
+
pkgconfig_installed_path = os.path.join(DIR, "share", "pkgconfig")
|
| 35 |
+
if os.path.exists(pkgconfig_installed_path):
|
| 36 |
+
return pkgconfig_installed_path
|
| 37 |
+
|
| 38 |
+
msg = "pybind11 not installed, installation required to access the pkgconfig files"
|
| 39 |
+
raise ImportError(msg)
|
pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_add_relu.h
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <optional>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/_add_relu_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::_add_relu.Tensor(Tensor self, Tensor other, *, Scalar alpha=1) -> Tensor
|
| 26 |
+
inline at::Tensor _add_relu(const at::Tensor & self, const at::Tensor & other, const at::Scalar & alpha=1) {
|
| 27 |
+
return at::_ops::_add_relu_Tensor::call(self, other, alpha);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
// aten::_add_relu_.Tensor(Tensor(a!) self, Tensor other, *, Scalar alpha=1) -> Tensor(a!)
|
| 31 |
+
inline at::Tensor & _add_relu_(at::Tensor & self, const at::Tensor & other, const at::Scalar & alpha=1) {
|
| 32 |
+
return at::_ops::_add_relu__Tensor::call(self, other, alpha);
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
// aten::_add_relu.out(Tensor self, Tensor other, *, Scalar alpha=1, Tensor(a!) out) -> Tensor(a!)
|
| 36 |
+
inline at::Tensor & _add_relu_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & other, const at::Scalar & alpha=1) {
|
| 37 |
+
return at::_ops::_add_relu_out::call(self, other, alpha, out);
|
| 38 |
+
}
|
| 39 |
+
// aten::_add_relu.out(Tensor self, Tensor other, *, Scalar alpha=1, Tensor(a!) out) -> Tensor(a!)
|
| 40 |
+
inline at::Tensor & _add_relu_outf(const at::Tensor & self, const at::Tensor & other, const at::Scalar & alpha, at::Tensor & out) {
|
| 41 |
+
return at::_ops::_add_relu_out::call(self, other, alpha, out);
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
// aten::_add_relu.Scalar(Tensor self, Scalar other, Scalar alpha=1) -> Tensor
|
| 45 |
+
inline at::Tensor _add_relu(const at::Tensor & self, const at::Scalar & other, const at::Scalar & alpha=1) {
|
| 46 |
+
return at::_ops::_add_relu_Scalar::call(self, other, alpha);
|
| 47 |
+
}
|
| 48 |
+
|
| 49 |
+
// aten::_add_relu_.Scalar(Tensor(a!) self, Scalar other, Scalar alpha=1) -> Tensor(a!)
|
| 50 |
+
inline at::Tensor & _add_relu_(at::Tensor & self, const at::Scalar & other, const at::Scalar & alpha=1) {
|
| 51 |
+
return at::_ops::_add_relu__Scalar::call(self, other, alpha);
|
| 52 |
+
}
|
| 53 |
+
|
| 54 |
+
// aten::_add_relu.Scalar_out(Tensor self, Scalar other, Scalar alpha=1, *, Tensor(a!) out) -> Tensor(a!)
|
| 55 |
+
inline at::Tensor & _add_relu_out(at::Tensor & out, const at::Tensor & self, const at::Scalar & other, const at::Scalar & alpha=1) {
|
| 56 |
+
return at::_ops::_add_relu_Scalar_out::call(self, other, alpha, out);
|
| 57 |
+
}
|
| 58 |
+
// aten::_add_relu.Scalar_out(Tensor self, Scalar other, Scalar alpha=1, *, Tensor(a!) out) -> Tensor(a!)
|
| 59 |
+
inline at::Tensor & _add_relu_outf(const at::Tensor & self, const at::Scalar & other, const at::Scalar & alpha, at::Tensor & out) {
|
| 60 |
+
return at::_ops::_add_relu_Scalar_out::call(self, other, alpha, out);
|
| 61 |
+
}
|
| 62 |
+
|
| 63 |
+
}
|
pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_amp_update_scale_cpu_dispatch.h
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace cpu {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor & _amp_update_scale_(at::Tensor & self, at::Tensor & growth_tracker, const at::Tensor & found_inf, double scale_growth_factor, double scale_backoff_factor, int64_t growth_interval);
|
| 21 |
+
|
| 22 |
+
} // namespace cpu
|
| 23 |
+
} // namespace at
|
pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_fake_quantize_learnable_per_channel_affine_backward_cuda_dispatch.h
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace cuda {
|
| 19 |
+
|
| 20 |
+
TORCH_API ::std::tuple<at::Tensor,at::Tensor,at::Tensor> _fake_quantize_learnable_per_channel_affine_backward(const at::Tensor & grad, const at::Tensor & self, const at::Tensor & scale, const at::Tensor & zero_point, int64_t axis, int64_t quant_min, int64_t quant_max, double grad_factor=1.0);
|
| 21 |
+
|
| 22 |
+
} // namespace cuda
|
| 23 |
+
} // namespace at
|
pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_fused_dropout_cuda_dispatch.h
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace cuda {
|
| 19 |
+
|
| 20 |
+
TORCH_API ::std::tuple<at::Tensor,at::Tensor> _fused_dropout(const at::Tensor & self, double p, ::std::optional<at::Generator> generator=::std::nullopt);
|
| 21 |
+
|
| 22 |
+
} // namespace cuda
|
| 23 |
+
} // namespace at
|
pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_histogramdd_bin_edges_native.h
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <optional>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API void _histogramdd_bin_edges_out(const at::Tensor & self, at::IntArrayRef bins, ::std::optional<at::ArrayRef<double>> range, const ::std::optional<at::Tensor> & weight, bool density, at::TensorList out);
|
| 20 |
+
TORCH_API ::std::vector<at::Tensor> histogramdd_bin_edges(const at::Tensor & self, at::IntArrayRef bins, ::std::optional<at::ArrayRef<double>> range=::std::nullopt, const ::std::optional<at::Tensor> & weight={}, bool density=false);
|
| 21 |
+
} // namespace native
|
| 22 |
+
} // namespace at
|
pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_mps_convolution_native.h
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <optional>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API at::Tensor & _mps_convolution_out_symint(const at::Tensor & self, const at::Tensor & weight, const ::std::optional<at::Tensor> & bias, c10::SymIntArrayRef padding, c10::SymIntArrayRef stride, c10::SymIntArrayRef dilation, c10::SymInt groups, at::Tensor & out);
|
| 20 |
+
} // namespace native
|
| 21 |
+
} // namespace at
|
pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_native_batch_norm_legit_no_training_native.h
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <optional>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API ::std::tuple<at::Tensor,at::Tensor,at::Tensor> _batch_norm_legit_no_training(const at::Tensor & input, const ::std::optional<at::Tensor> & weight, const ::std::optional<at::Tensor> & bias, const at::Tensor & running_mean, const at::Tensor & running_var, double momentum, double eps);
|
| 20 |
+
TORCH_API ::std::tuple<at::Tensor &,at::Tensor &,at::Tensor &> _native_batch_norm_legit_no_training_out(const at::Tensor & input, const ::std::optional<at::Tensor> & weight, const ::std::optional<at::Tensor> & bias, const at::Tensor & running_mean, const at::Tensor & running_var, double momentum, double eps, at::Tensor & out0, at::Tensor & out1, at::Tensor & out2);
|
| 21 |
+
} // namespace native
|
| 22 |
+
} // namespace at
|
pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_safe_softmax.h
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <optional>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/_safe_softmax_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::_safe_softmax(Tensor self, int dim, ScalarType? dtype=None) -> Tensor
|
| 26 |
+
inline at::Tensor _safe_softmax(const at::Tensor & self, int64_t dim, ::std::optional<at::ScalarType> dtype=::std::nullopt) {
|
| 27 |
+
return at::_ops::_safe_softmax::call(self, dim, dtype);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
}
|
pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_scaled_dot_product_flash_attention_for_cpu_backward_ops.h
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API _scaled_dot_product_flash_attention_for_cpu_backward {
|
| 18 |
+
using schema = ::std::tuple<at::Tensor,at::Tensor,at::Tensor> (const at::Tensor &, const at::Tensor &, const at::Tensor &, const at::Tensor &, const at::Tensor &, const at::Tensor &, double, bool, const ::std::optional<at::Tensor> &, ::std::optional<double>);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_scaled_dot_product_flash_attention_for_cpu_backward")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_scaled_dot_product_flash_attention_for_cpu_backward(Tensor grad_out, Tensor query, Tensor key, Tensor value, Tensor out, Tensor logsumexp, float dropout_p, bool is_causal, *, Tensor? attn_mask=None, float? scale=None) -> (Tensor grad_query, Tensor grad_key, Tensor grad_value)")
|
| 24 |
+
static ::std::tuple<at::Tensor,at::Tensor,at::Tensor> call(const at::Tensor & grad_out, const at::Tensor & query, const at::Tensor & key, const at::Tensor & value, const at::Tensor & out, const at::Tensor & logsumexp, double dropout_p, bool is_causal, const ::std::optional<at::Tensor> & attn_mask, ::std::optional<double> scale);
|
| 25 |
+
static ::std::tuple<at::Tensor,at::Tensor,at::Tensor> redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & grad_out, const at::Tensor & query, const at::Tensor & key, const at::Tensor & value, const at::Tensor & out, const at::Tensor & logsumexp, double dropout_p, bool is_causal, const ::std::optional<at::Tensor> & attn_mask, ::std::optional<double> scale);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
}} // namespace at::_ops
|
pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_scaled_dot_product_flash_attention_for_cpu_ops.h
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API _scaled_dot_product_flash_attention_for_cpu {
|
| 18 |
+
using schema = ::std::tuple<at::Tensor,at::Tensor> (const at::Tensor &, const at::Tensor &, const at::Tensor &, double, bool, const ::std::optional<at::Tensor> &, ::std::optional<double>);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_scaled_dot_product_flash_attention_for_cpu")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_scaled_dot_product_flash_attention_for_cpu(Tensor query, Tensor key, Tensor value, float dropout_p=0.0, bool is_causal=False, *, Tensor? attn_mask=None, float? scale=None) -> (Tensor output, Tensor logsumexp)")
|
| 24 |
+
static ::std::tuple<at::Tensor,at::Tensor> call(const at::Tensor & query, const at::Tensor & key, const at::Tensor & value, double dropout_p, bool is_causal, const ::std::optional<at::Tensor> & attn_mask, ::std::optional<double> scale);
|
| 25 |
+
static ::std::tuple<at::Tensor,at::Tensor> redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & query, const at::Tensor & key, const at::Tensor & value, double dropout_p, bool is_causal, const ::std::optional<at::Tensor> & attn_mask, ::std::optional<double> scale);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
}} // namespace at::_ops
|
pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_test_functorch_fallback_ops.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API _test_functorch_fallback {
|
| 18 |
+
using schema = at::Tensor (const at::Tensor &, const at::Tensor &);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_test_functorch_fallback")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_test_functorch_fallback(Tensor self, Tensor other) -> Tensor")
|
| 24 |
+
static at::Tensor call(const at::Tensor & self, const at::Tensor & other);
|
| 25 |
+
static at::Tensor redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Tensor & other);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
struct TORCH_API _test_functorch_fallback_out {
|
| 29 |
+
using schema = at::Tensor & (const at::Tensor &, const at::Tensor &, at::Tensor &);
|
| 30 |
+
using ptr_schema = schema*;
|
| 31 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 32 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_test_functorch_fallback")
|
| 33 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "out")
|
| 34 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_test_functorch_fallback.out(Tensor self, Tensor other, *, Tensor(a!) out) -> Tensor(a!)")
|
| 35 |
+
static at::Tensor & call(const at::Tensor & self, const at::Tensor & other, at::Tensor & out);
|
| 36 |
+
static at::Tensor & redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & self, const at::Tensor & other, at::Tensor & out);
|
| 37 |
+
};
|
| 38 |
+
|
| 39 |
+
}} // namespace at::_ops
|
pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_transform_bias_rescale_qkv.h
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <optional>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/_transform_bias_rescale_qkv_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::_transform_bias_rescale_qkv(Tensor qkv, Tensor qkv_bias, int num_heads) -> (Tensor, Tensor, Tensor)
|
| 26 |
+
inline ::std::tuple<at::Tensor,at::Tensor,at::Tensor> _transform_bias_rescale_qkv(const at::Tensor & qkv, const at::Tensor & qkv_bias, int64_t num_heads) {
|
| 27 |
+
return at::_ops::_transform_bias_rescale_qkv::call(qkv, qkv_bias, num_heads);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
// aten::_transform_bias_rescale_qkv.out(Tensor qkv, Tensor qkv_bias, int num_heads, *, Tensor(a!) out0, Tensor(b!) out1, Tensor(c!) out2) -> (Tensor(a!), Tensor(b!), Tensor(c!))
|
| 31 |
+
inline ::std::tuple<at::Tensor &,at::Tensor &,at::Tensor &> _transform_bias_rescale_qkv_out(at::Tensor & out0, at::Tensor & out1, at::Tensor & out2, const at::Tensor & qkv, const at::Tensor & qkv_bias, int64_t num_heads) {
|
| 32 |
+
return at::_ops::_transform_bias_rescale_qkv_out::call(qkv, qkv_bias, num_heads, out0, out1, out2);
|
| 33 |
+
}
|
| 34 |
+
// aten::_transform_bias_rescale_qkv.out(Tensor qkv, Tensor qkv_bias, int num_heads, *, Tensor(a!) out0, Tensor(b!) out1, Tensor(c!) out2) -> (Tensor(a!), Tensor(b!), Tensor(c!))
|
| 35 |
+
inline ::std::tuple<at::Tensor &,at::Tensor &,at::Tensor &> _transform_bias_rescale_qkv_outf(const at::Tensor & qkv, const at::Tensor & qkv_bias, int64_t num_heads, at::Tensor & out0, at::Tensor & out1, at::Tensor & out2) {
|
| 36 |
+
return at::_ops::_transform_bias_rescale_qkv_out::call(qkv, qkv_bias, num_heads, out0, out1, out2);
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
}
|
pllava/lib/python3.10/site-packages/torch/include/ATen/ops/_validate_sparse_compressed_tensor_args_ops.h
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Operator.h
|
| 4 |
+
|
| 5 |
+
#include <tuple>
|
| 6 |
+
#include <vector>
|
| 7 |
+
|
| 8 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 9 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 10 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 11 |
+
#include <ATen/core/ATen_fwd.h>
|
| 12 |
+
|
| 13 |
+
namespace at {
|
| 14 |
+
namespace _ops {
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
struct TORCH_API _validate_sparse_compressed_tensor_args {
|
| 18 |
+
using schema = void (const at::Tensor &, const at::Tensor &, const at::Tensor &, at::IntArrayRef, at::Layout);
|
| 19 |
+
using ptr_schema = schema*;
|
| 20 |
+
// See Note [static constexpr char* members for windows NVCC]
|
| 21 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(name, "aten::_validate_sparse_compressed_tensor_args")
|
| 22 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(overload_name, "")
|
| 23 |
+
STATIC_CONSTEXPR_STR_INL_EXCEPT_WIN_CUDA(schema_str, "_validate_sparse_compressed_tensor_args(Tensor compressed_indices, Tensor plain_indices, Tensor values, int[] size, Layout layout) -> ()")
|
| 24 |
+
static void call(const at::Tensor & compressed_indices, const at::Tensor & plain_indices, const at::Tensor & values, at::IntArrayRef size, at::Layout layout);
|
| 25 |
+
static void redispatch(c10::DispatchKeySet dispatchKeySet, const at::Tensor & compressed_indices, const at::Tensor & plain_indices, const at::Tensor & values, at::IntArrayRef size, at::Layout layout);
|
| 26 |
+
};
|
| 27 |
+
|
| 28 |
+
}} // namespace at::_ops
|
pllava/lib/python3.10/site-packages/torch/include/ATen/ops/bitwise_or_compositeexplicitautogradnonfunctional_dispatch.h
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeexplicitautogradnonfunctional {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor bitwise_or(const at::Tensor & self, const at::Tensor & other);
|
| 21 |
+
TORCH_API at::Tensor & bitwise_or_(at::Tensor & self, const at::Tensor & other);
|
| 22 |
+
|
| 23 |
+
} // namespace compositeexplicitautogradnonfunctional
|
| 24 |
+
} // namespace at
|
pllava/lib/python3.10/site-packages/torch/include/ATen/ops/ceil.h
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from Function.h
|
| 4 |
+
|
| 5 |
+
#include <ATen/Context.h>
|
| 6 |
+
#include <ATen/DeviceGuard.h>
|
| 7 |
+
#include <ATen/TensorUtils.h>
|
| 8 |
+
#include <ATen/TracerMode.h>
|
| 9 |
+
#include <ATen/core/Generator.h>
|
| 10 |
+
#include <ATen/core/Reduction.h>
|
| 11 |
+
#include <ATen/core/Tensor.h>
|
| 12 |
+
#include <c10/core/Scalar.h>
|
| 13 |
+
#include <c10/core/Storage.h>
|
| 14 |
+
#include <c10/core/TensorOptions.h>
|
| 15 |
+
#include <c10/util/Deprecated.h>
|
| 16 |
+
#include <optional>
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
#include <ATen/ops/ceil_ops.h>
|
| 21 |
+
|
| 22 |
+
namespace at {
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
// aten::ceil(Tensor self) -> Tensor
|
| 26 |
+
inline at::Tensor ceil(const at::Tensor & self) {
|
| 27 |
+
return at::_ops::ceil::call(self);
|
| 28 |
+
}
|
| 29 |
+
|
| 30 |
+
// aten::ceil_(Tensor(a!) self) -> Tensor(a!)
|
| 31 |
+
inline at::Tensor & ceil_(at::Tensor & self) {
|
| 32 |
+
return at::_ops::ceil_::call(self);
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
// aten::ceil.out(Tensor self, *, Tensor(a!) out) -> Tensor(a!)
|
| 36 |
+
inline at::Tensor & ceil_out(at::Tensor & out, const at::Tensor & self) {
|
| 37 |
+
return at::_ops::ceil_out::call(self, out);
|
| 38 |
+
}
|
| 39 |
+
// aten::ceil.out(Tensor self, *, Tensor(a!) out) -> Tensor(a!)
|
| 40 |
+
inline at::Tensor & ceil_outf(const at::Tensor & self, at::Tensor & out) {
|
| 41 |
+
return at::_ops::ceil_out::call(self, out);
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
}
|
pllava/lib/python3.10/site-packages/torch/include/ATen/ops/conv_depthwise3d_compositeexplicitautograd_dispatch.h
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeexplicitautograd {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor & conv_depthwise3d_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & weight, at::IntArrayRef kernel_size, const ::std::optional<at::Tensor> & bias, at::IntArrayRef stride, at::IntArrayRef padding, at::IntArrayRef dilation);
|
| 21 |
+
TORCH_API at::Tensor & conv_depthwise3d_outf(const at::Tensor & self, const at::Tensor & weight, at::IntArrayRef kernel_size, const ::std::optional<at::Tensor> & bias, at::IntArrayRef stride, at::IntArrayRef padding, at::IntArrayRef dilation, at::Tensor & out);
|
| 22 |
+
TORCH_API at::Tensor & conv_depthwise3d_symint_out(at::Tensor & out, const at::Tensor & self, const at::Tensor & weight, c10::SymIntArrayRef kernel_size, const ::std::optional<at::Tensor> & bias, c10::SymIntArrayRef stride, c10::SymIntArrayRef padding, c10::SymIntArrayRef dilation);
|
| 23 |
+
TORCH_API at::Tensor & conv_depthwise3d_symint_outf(const at::Tensor & self, const at::Tensor & weight, c10::SymIntArrayRef kernel_size, const ::std::optional<at::Tensor> & bias, c10::SymIntArrayRef stride, c10::SymIntArrayRef padding, c10::SymIntArrayRef dilation, at::Tensor & out);
|
| 24 |
+
|
| 25 |
+
} // namespace compositeexplicitautograd
|
| 26 |
+
} // namespace at
|
pllava/lib/python3.10/site-packages/torch/include/ATen/ops/expand_as_native.h
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <optional>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API at::Tensor expand_as(const at::Tensor & self, const at::Tensor & other);
|
| 20 |
+
} // namespace native
|
| 21 |
+
} // namespace at
|
pllava/lib/python3.10/site-packages/torch/include/ATen/ops/eye_meta_dispatch.h
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace meta {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor & eye_out(at::Tensor & out, int64_t n);
|
| 21 |
+
TORCH_API at::Tensor & eye_outf(int64_t n, at::Tensor & out);
|
| 22 |
+
TORCH_API at::Tensor & eye_symint_out(at::Tensor & out, c10::SymInt n);
|
| 23 |
+
TORCH_API at::Tensor & eye_symint_outf(c10::SymInt n, at::Tensor & out);
|
| 24 |
+
TORCH_API at::Tensor & eye_out(at::Tensor & out, int64_t n, int64_t m);
|
| 25 |
+
TORCH_API at::Tensor & eye_outf(int64_t n, int64_t m, at::Tensor & out);
|
| 26 |
+
TORCH_API at::Tensor & eye_symint_out(at::Tensor & out, c10::SymInt n, c10::SymInt m);
|
| 27 |
+
TORCH_API at::Tensor & eye_symint_outf(c10::SymInt n, c10::SymInt m, at::Tensor & out);
|
| 28 |
+
|
| 29 |
+
} // namespace meta
|
| 30 |
+
} // namespace at
|
pllava/lib/python3.10/site-packages/torch/include/ATen/ops/fft_fftshift_native.h
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
|
| 3 |
+
// @generated by torchgen/gen.py from NativeFunction.h
|
| 4 |
+
|
| 5 |
+
#include <c10/core/Scalar.h>
|
| 6 |
+
#include <c10/core/Storage.h>
|
| 7 |
+
#include <c10/core/TensorOptions.h>
|
| 8 |
+
#include <c10/util/Deprecated.h>
|
| 9 |
+
#include <optional>
|
| 10 |
+
#include <c10/core/QScheme.h>
|
| 11 |
+
#include <ATen/core/Reduction.h>
|
| 12 |
+
#include <ATen/core/Tensor.h>
|
| 13 |
+
#include <tuple>
|
| 14 |
+
#include <vector>
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
namespace at {
|
| 18 |
+
namespace native {
|
| 19 |
+
TORCH_API at::Tensor fft_fftshift(const at::Tensor & self, at::OptionalIntArrayRef dim=::std::nullopt);
|
| 20 |
+
} // namespace native
|
| 21 |
+
} // namespace at
|
pllava/lib/python3.10/site-packages/torch/include/ATen/ops/fix_compositeimplicitautograd_dispatch.h
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#pragma once
|
| 2 |
+
// @generated by torchgen/gen.py from DispatchKeyFunction.h
|
| 3 |
+
|
| 4 |
+
// NB: The implementing C++ file is RegisterDispatchKey.cpp
|
| 5 |
+
|
| 6 |
+
// The only #includes we need are for custom classes that have defaults in the C++ API
|
| 7 |
+
#include <c10/core/MemoryFormat.h>
|
| 8 |
+
#include <c10/core/Scalar.h>
|
| 9 |
+
#include <ATen/core/Reduction.h>
|
| 10 |
+
|
| 11 |
+
// Forward declarations of any types needed in the operator signatures.
|
| 12 |
+
// We can't directly include these classes because it will cause circular include dependencies.
|
| 13 |
+
// This file is included by TensorBody.h, which defines the Tensor class.
|
| 14 |
+
#include <ATen/core/ATen_fwd.h>
|
| 15 |
+
|
| 16 |
+
namespace at {
|
| 17 |
+
|
| 18 |
+
namespace compositeimplicitautograd {
|
| 19 |
+
|
| 20 |
+
TORCH_API at::Tensor fix(const at::Tensor & self);
|
| 21 |
+
TORCH_API at::Tensor & fix_out(at::Tensor & out, const at::Tensor & self);
|
| 22 |
+
TORCH_API at::Tensor & fix_outf(const at::Tensor & self, at::Tensor & out);
|
| 23 |
+
TORCH_API at::Tensor & fix_(at::Tensor & self);
|
| 24 |
+
|
| 25 |
+
} // namespace compositeimplicitautograd
|
| 26 |
+
} // namespace at
|