Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +2 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/__init__.py +1 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/__init__.py +0 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/__main__.py +5 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/__pycache__/__init__.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/compat/__init__.py +0 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/compat/__pycache__/py38.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/compat/py38.py +24 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools-10.3.0.dist-info/LICENSE +19 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools/__pycache__/more.cpython-310.pyc +3 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools/__pycache__/recipes.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools/recipes.pyi +136 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/_setuptools_logging.py +26 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/bdist_wheel.py +595 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/macosx_libfile.py +469 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/util.py +26 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/__init__.py +0 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__init__.py +0 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/__init__.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/_musllinux.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/_parser.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/_structures.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/_tokenizer.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/markers.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/requirements.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/tags.cpython-310.pyc +0 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_elffile.py +108 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_manylinux.py +260 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_musllinux.py +83 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_parser.py +356 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_structures.py +61 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_tokenizer.py +192 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/markers.py +253 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/requirements.py +90 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/specifiers.py +1011 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/tags.py +571 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/utils.py +172 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/version.py +561 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/vendor.txt +1 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/wheelfile.py +196 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/fixtures.py +157 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/script-with-bom.py +1 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_bdist_egg.py +73 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_build_meta.py +970 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_build_py.py +480 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_develop.py +175 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_editable_install.py +1289 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_glob.py +45 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_namespaces.py +138 -0
- mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/textwrap.py +6 -0
.gitattributes
CHANGED
|
@@ -539,3 +539,5 @@ mantis_evalkit/lib/python3.10/site-packages/sklearn/metrics/_pairwise_distances_
|
|
| 539 |
mantis_evalkit/lib/python3.10/site-packages/sklearn/metrics/_pairwise_distances_reduction/_radius_neighbors_classmode.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 540 |
parrot/lib/python3.10/site-packages/kiwisolver/_cext.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 541 |
mantis_evalkit/lib/python3.10/site-packages/sklearn/preprocessing/__pycache__/_data.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
| 539 |
mantis_evalkit/lib/python3.10/site-packages/sklearn/metrics/_pairwise_distances_reduction/_radius_neighbors_classmode.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 540 |
parrot/lib/python3.10/site-packages/kiwisolver/_cext.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 541 |
mantis_evalkit/lib/python3.10/site-packages/sklearn/preprocessing/__pycache__/_data.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 542 |
+
moondream/lib/python3.10/site-packages/torch/utils/hipify/__pycache__/cuda_to_hip_mappings.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 543 |
+
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools/__pycache__/more.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/__init__.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/__main__.py
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from . import main
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
if __name__ == '__main__':
|
| 5 |
+
main()
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (71.9 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/compat/__init__.py
ADDED
|
File without changes
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/compat/__pycache__/py38.cpython-310.pyc
ADDED
|
Binary file (759 Bytes). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/backports/tarfile/compat/py38.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
if sys.version_info < (3, 9):
|
| 5 |
+
|
| 6 |
+
def removesuffix(self, suffix):
|
| 7 |
+
# suffix='' should not call self[:-0].
|
| 8 |
+
if suffix and self.endswith(suffix):
|
| 9 |
+
return self[: -len(suffix)]
|
| 10 |
+
else:
|
| 11 |
+
return self[:]
|
| 12 |
+
|
| 13 |
+
def removeprefix(self, prefix):
|
| 14 |
+
if self.startswith(prefix):
|
| 15 |
+
return self[len(prefix) :]
|
| 16 |
+
else:
|
| 17 |
+
return self[:]
|
| 18 |
+
else:
|
| 19 |
+
|
| 20 |
+
def removesuffix(self, suffix):
|
| 21 |
+
return self.removesuffix(suffix)
|
| 22 |
+
|
| 23 |
+
def removeprefix(self, prefix):
|
| 24 |
+
return self.removeprefix(prefix)
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools-10.3.0.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright (c) 2012 Erik Rose
|
| 2 |
+
|
| 3 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
| 4 |
+
this software and associated documentation files (the "Software"), to deal in
|
| 5 |
+
the Software without restriction, including without limitation the rights to
|
| 6 |
+
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
| 7 |
+
of the Software, and to permit persons to whom the Software is furnished to do
|
| 8 |
+
so, subject to the following conditions:
|
| 9 |
+
|
| 10 |
+
The above copyright notice and this permission notice shall be included in all
|
| 11 |
+
copies or substantial portions of the Software.
|
| 12 |
+
|
| 13 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 14 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 15 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 16 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 17 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 18 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 19 |
+
SOFTWARE.
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools/__pycache__/more.cpython-310.pyc
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a29978deee94c64619445fc042926f0fe515aafecb26d5e071b5df8adfcaa49d
|
| 3 |
+
size 138269
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools/__pycache__/recipes.cpython-310.pyc
ADDED
|
Binary file (29.2 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/more_itertools/recipes.pyi
ADDED
|
@@ -0,0 +1,136 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Stubs for more_itertools.recipes"""
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
from typing import (
|
| 6 |
+
Any,
|
| 7 |
+
Callable,
|
| 8 |
+
Iterable,
|
| 9 |
+
Iterator,
|
| 10 |
+
overload,
|
| 11 |
+
Sequence,
|
| 12 |
+
Type,
|
| 13 |
+
TypeVar,
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
# Type and type variable definitions
|
| 17 |
+
_T = TypeVar('_T')
|
| 18 |
+
_T1 = TypeVar('_T1')
|
| 19 |
+
_T2 = TypeVar('_T2')
|
| 20 |
+
_U = TypeVar('_U')
|
| 21 |
+
|
| 22 |
+
def take(n: int, iterable: Iterable[_T]) -> list[_T]: ...
|
| 23 |
+
def tabulate(
|
| 24 |
+
function: Callable[[int], _T], start: int = ...
|
| 25 |
+
) -> Iterator[_T]: ...
|
| 26 |
+
def tail(n: int, iterable: Iterable[_T]) -> Iterator[_T]: ...
|
| 27 |
+
def consume(iterator: Iterable[_T], n: int | None = ...) -> None: ...
|
| 28 |
+
@overload
|
| 29 |
+
def nth(iterable: Iterable[_T], n: int) -> _T | None: ...
|
| 30 |
+
@overload
|
| 31 |
+
def nth(iterable: Iterable[_T], n: int, default: _U) -> _T | _U: ...
|
| 32 |
+
def all_equal(
|
| 33 |
+
iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
|
| 34 |
+
) -> bool: ...
|
| 35 |
+
def quantify(
|
| 36 |
+
iterable: Iterable[_T], pred: Callable[[_T], bool] = ...
|
| 37 |
+
) -> int: ...
|
| 38 |
+
def pad_none(iterable: Iterable[_T]) -> Iterator[_T | None]: ...
|
| 39 |
+
def padnone(iterable: Iterable[_T]) -> Iterator[_T | None]: ...
|
| 40 |
+
def ncycles(iterable: Iterable[_T], n: int) -> Iterator[_T]: ...
|
| 41 |
+
def dotproduct(vec1: Iterable[_T1], vec2: Iterable[_T2]) -> Any: ...
|
| 42 |
+
def flatten(listOfLists: Iterable[Iterable[_T]]) -> Iterator[_T]: ...
|
| 43 |
+
def repeatfunc(
|
| 44 |
+
func: Callable[..., _U], times: int | None = ..., *args: Any
|
| 45 |
+
) -> Iterator[_U]: ...
|
| 46 |
+
def pairwise(iterable: Iterable[_T]) -> Iterator[tuple[_T, _T]]: ...
|
| 47 |
+
def grouper(
|
| 48 |
+
iterable: Iterable[_T],
|
| 49 |
+
n: int,
|
| 50 |
+
incomplete: str = ...,
|
| 51 |
+
fillvalue: _U = ...,
|
| 52 |
+
) -> Iterator[tuple[_T | _U, ...]]: ...
|
| 53 |
+
def roundrobin(*iterables: Iterable[_T]) -> Iterator[_T]: ...
|
| 54 |
+
def partition(
|
| 55 |
+
pred: Callable[[_T], object] | None, iterable: Iterable[_T]
|
| 56 |
+
) -> tuple[Iterator[_T], Iterator[_T]]: ...
|
| 57 |
+
def powerset(iterable: Iterable[_T]) -> Iterator[tuple[_T, ...]]: ...
|
| 58 |
+
def unique_everseen(
|
| 59 |
+
iterable: Iterable[_T], key: Callable[[_T], _U] | None = ...
|
| 60 |
+
) -> Iterator[_T]: ...
|
| 61 |
+
def unique_justseen(
|
| 62 |
+
iterable: Iterable[_T], key: Callable[[_T], object] | None = ...
|
| 63 |
+
) -> Iterator[_T]: ...
|
| 64 |
+
def unique(
|
| 65 |
+
iterable: Iterable[_T],
|
| 66 |
+
key: Callable[[_T], object] | None = ...,
|
| 67 |
+
reverse: bool = False,
|
| 68 |
+
) -> Iterator[_T]: ...
|
| 69 |
+
@overload
|
| 70 |
+
def iter_except(
|
| 71 |
+
func: Callable[[], _T],
|
| 72 |
+
exception: Type[BaseException] | tuple[Type[BaseException], ...],
|
| 73 |
+
first: None = ...,
|
| 74 |
+
) -> Iterator[_T]: ...
|
| 75 |
+
@overload
|
| 76 |
+
def iter_except(
|
| 77 |
+
func: Callable[[], _T],
|
| 78 |
+
exception: Type[BaseException] | tuple[Type[BaseException], ...],
|
| 79 |
+
first: Callable[[], _U],
|
| 80 |
+
) -> Iterator[_T | _U]: ...
|
| 81 |
+
@overload
|
| 82 |
+
def first_true(
|
| 83 |
+
iterable: Iterable[_T], *, pred: Callable[[_T], object] | None = ...
|
| 84 |
+
) -> _T | None: ...
|
| 85 |
+
@overload
|
| 86 |
+
def first_true(
|
| 87 |
+
iterable: Iterable[_T],
|
| 88 |
+
default: _U,
|
| 89 |
+
pred: Callable[[_T], object] | None = ...,
|
| 90 |
+
) -> _T | _U: ...
|
| 91 |
+
def random_product(
|
| 92 |
+
*args: Iterable[_T], repeat: int = ...
|
| 93 |
+
) -> tuple[_T, ...]: ...
|
| 94 |
+
def random_permutation(
|
| 95 |
+
iterable: Iterable[_T], r: int | None = ...
|
| 96 |
+
) -> tuple[_T, ...]: ...
|
| 97 |
+
def random_combination(iterable: Iterable[_T], r: int) -> tuple[_T, ...]: ...
|
| 98 |
+
def random_combination_with_replacement(
|
| 99 |
+
iterable: Iterable[_T], r: int
|
| 100 |
+
) -> tuple[_T, ...]: ...
|
| 101 |
+
def nth_combination(
|
| 102 |
+
iterable: Iterable[_T], r: int, index: int
|
| 103 |
+
) -> tuple[_T, ...]: ...
|
| 104 |
+
def prepend(value: _T, iterator: Iterable[_U]) -> Iterator[_T | _U]: ...
|
| 105 |
+
def convolve(signal: Iterable[_T], kernel: Iterable[_T]) -> Iterator[_T]: ...
|
| 106 |
+
def before_and_after(
|
| 107 |
+
predicate: Callable[[_T], bool], it: Iterable[_T]
|
| 108 |
+
) -> tuple[Iterator[_T], Iterator[_T]]: ...
|
| 109 |
+
def triplewise(iterable: Iterable[_T]) -> Iterator[tuple[_T, _T, _T]]: ...
|
| 110 |
+
def sliding_window(
|
| 111 |
+
iterable: Iterable[_T], n: int
|
| 112 |
+
) -> Iterator[tuple[_T, ...]]: ...
|
| 113 |
+
def subslices(iterable: Iterable[_T]) -> Iterator[list[_T]]: ...
|
| 114 |
+
def polynomial_from_roots(roots: Sequence[_T]) -> list[_T]: ...
|
| 115 |
+
def iter_index(
|
| 116 |
+
iterable: Iterable[_T],
|
| 117 |
+
value: Any,
|
| 118 |
+
start: int | None = ...,
|
| 119 |
+
stop: int | None = ...,
|
| 120 |
+
) -> Iterator[int]: ...
|
| 121 |
+
def sieve(n: int) -> Iterator[int]: ...
|
| 122 |
+
def batched(
|
| 123 |
+
iterable: Iterable[_T], n: int, *, strict: bool = False
|
| 124 |
+
) -> Iterator[tuple[_T]]: ...
|
| 125 |
+
def transpose(
|
| 126 |
+
it: Iterable[Iterable[_T]],
|
| 127 |
+
) -> Iterator[tuple[_T, ...]]: ...
|
| 128 |
+
def reshape(
|
| 129 |
+
matrix: Iterable[Iterable[_T]], cols: int
|
| 130 |
+
) -> Iterator[tuple[_T, ...]]: ...
|
| 131 |
+
def matmul(m1: Sequence[_T], m2: Sequence[_T]) -> Iterator[tuple[_T]]: ...
|
| 132 |
+
def factor(n: int) -> Iterator[int]: ...
|
| 133 |
+
def polynomial_eval(coefficients: Sequence[_T], x: _U) -> _U: ...
|
| 134 |
+
def sum_of_squares(it: Iterable[_T]) -> _T: ...
|
| 135 |
+
def polynomial_derivative(coefficients: Sequence[_T]) -> list[_T]: ...
|
| 136 |
+
def totient(n: int) -> int: ...
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/_setuptools_logging.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# copied from setuptools.logging, omitting monkeypatching
|
| 2 |
+
from __future__ import annotations
|
| 3 |
+
|
| 4 |
+
import logging
|
| 5 |
+
import sys
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def _not_warning(record):
|
| 9 |
+
return record.levelno < logging.WARNING
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def configure():
|
| 13 |
+
"""
|
| 14 |
+
Configure logging to emit warning and above to stderr
|
| 15 |
+
and everything else to stdout. This behavior is provided
|
| 16 |
+
for compatibility with distutils.log but may change in
|
| 17 |
+
the future.
|
| 18 |
+
"""
|
| 19 |
+
err_handler = logging.StreamHandler()
|
| 20 |
+
err_handler.setLevel(logging.WARNING)
|
| 21 |
+
out_handler = logging.StreamHandler(sys.stdout)
|
| 22 |
+
out_handler.addFilter(_not_warning)
|
| 23 |
+
handlers = err_handler, out_handler
|
| 24 |
+
logging.basicConfig(
|
| 25 |
+
format="{message}", style="{", handlers=handlers, level=logging.DEBUG
|
| 26 |
+
)
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/bdist_wheel.py
ADDED
|
@@ -0,0 +1,595 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Create a wheel (.whl) distribution.
|
| 3 |
+
|
| 4 |
+
A wheel is a built archive format.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
from __future__ import annotations
|
| 8 |
+
|
| 9 |
+
import os
|
| 10 |
+
import re
|
| 11 |
+
import shutil
|
| 12 |
+
import stat
|
| 13 |
+
import struct
|
| 14 |
+
import sys
|
| 15 |
+
import sysconfig
|
| 16 |
+
import warnings
|
| 17 |
+
from email.generator import BytesGenerator, Generator
|
| 18 |
+
from email.policy import EmailPolicy
|
| 19 |
+
from glob import iglob
|
| 20 |
+
from shutil import rmtree
|
| 21 |
+
from zipfile import ZIP_DEFLATED, ZIP_STORED
|
| 22 |
+
|
| 23 |
+
import setuptools
|
| 24 |
+
from setuptools import Command
|
| 25 |
+
|
| 26 |
+
from . import __version__ as wheel_version
|
| 27 |
+
from .macosx_libfile import calculate_macosx_platform_tag
|
| 28 |
+
from .metadata import pkginfo_to_metadata
|
| 29 |
+
from .util import log
|
| 30 |
+
from .vendored.packaging import tags
|
| 31 |
+
from .vendored.packaging import version as _packaging_version
|
| 32 |
+
from .wheelfile import WheelFile
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def safe_name(name):
|
| 36 |
+
"""Convert an arbitrary string to a standard distribution name
|
| 37 |
+
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
|
| 38 |
+
"""
|
| 39 |
+
return re.sub("[^A-Za-z0-9.]+", "-", name)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
def safe_version(version):
|
| 43 |
+
"""
|
| 44 |
+
Convert an arbitrary string to a standard version string
|
| 45 |
+
"""
|
| 46 |
+
try:
|
| 47 |
+
# normalize the version
|
| 48 |
+
return str(_packaging_version.Version(version))
|
| 49 |
+
except _packaging_version.InvalidVersion:
|
| 50 |
+
version = version.replace(" ", ".")
|
| 51 |
+
return re.sub("[^A-Za-z0-9.]+", "-", version)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
setuptools_major_version = int(setuptools.__version__.split(".")[0])
|
| 55 |
+
|
| 56 |
+
PY_LIMITED_API_PATTERN = r"cp3\d"
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def _is_32bit_interpreter():
|
| 60 |
+
return struct.calcsize("P") == 4
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def python_tag():
|
| 64 |
+
return f"py{sys.version_info[0]}"
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def get_platform(archive_root):
|
| 68 |
+
"""Return our platform name 'win32', 'linux_x86_64'"""
|
| 69 |
+
result = sysconfig.get_platform()
|
| 70 |
+
if result.startswith("macosx") and archive_root is not None:
|
| 71 |
+
result = calculate_macosx_platform_tag(archive_root, result)
|
| 72 |
+
elif _is_32bit_interpreter():
|
| 73 |
+
if result == "linux-x86_64":
|
| 74 |
+
# pip pull request #3497
|
| 75 |
+
result = "linux-i686"
|
| 76 |
+
elif result == "linux-aarch64":
|
| 77 |
+
# packaging pull request #234
|
| 78 |
+
# TODO armv8l, packaging pull request #690 => this did not land
|
| 79 |
+
# in pip/packaging yet
|
| 80 |
+
result = "linux-armv7l"
|
| 81 |
+
|
| 82 |
+
return result.replace("-", "_")
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def get_flag(var, fallback, expected=True, warn=True):
|
| 86 |
+
"""Use a fallback value for determining SOABI flags if the needed config
|
| 87 |
+
var is unset or unavailable."""
|
| 88 |
+
val = sysconfig.get_config_var(var)
|
| 89 |
+
if val is None:
|
| 90 |
+
if warn:
|
| 91 |
+
warnings.warn(
|
| 92 |
+
f"Config variable '{var}' is unset, Python ABI tag may " "be incorrect",
|
| 93 |
+
RuntimeWarning,
|
| 94 |
+
stacklevel=2,
|
| 95 |
+
)
|
| 96 |
+
return fallback
|
| 97 |
+
return val == expected
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
def get_abi_tag():
|
| 101 |
+
"""Return the ABI tag based on SOABI (if available) or emulate SOABI (PyPy2)."""
|
| 102 |
+
soabi = sysconfig.get_config_var("SOABI")
|
| 103 |
+
impl = tags.interpreter_name()
|
| 104 |
+
if not soabi and impl in ("cp", "pp") and hasattr(sys, "maxunicode"):
|
| 105 |
+
d = ""
|
| 106 |
+
m = ""
|
| 107 |
+
u = ""
|
| 108 |
+
if get_flag("Py_DEBUG", hasattr(sys, "gettotalrefcount"), warn=(impl == "cp")):
|
| 109 |
+
d = "d"
|
| 110 |
+
|
| 111 |
+
if get_flag(
|
| 112 |
+
"WITH_PYMALLOC",
|
| 113 |
+
impl == "cp",
|
| 114 |
+
warn=(impl == "cp" and sys.version_info < (3, 8)),
|
| 115 |
+
) and sys.version_info < (3, 8):
|
| 116 |
+
m = "m"
|
| 117 |
+
|
| 118 |
+
abi = f"{impl}{tags.interpreter_version()}{d}{m}{u}"
|
| 119 |
+
elif soabi and impl == "cp" and soabi.startswith("cpython"):
|
| 120 |
+
# non-Windows
|
| 121 |
+
abi = "cp" + soabi.split("-")[1]
|
| 122 |
+
elif soabi and impl == "cp" and soabi.startswith("cp"):
|
| 123 |
+
# Windows
|
| 124 |
+
abi = soabi.split("-")[0]
|
| 125 |
+
elif soabi and impl == "pp":
|
| 126 |
+
# we want something like pypy36-pp73
|
| 127 |
+
abi = "-".join(soabi.split("-")[:2])
|
| 128 |
+
abi = abi.replace(".", "_").replace("-", "_")
|
| 129 |
+
elif soabi and impl == "graalpy":
|
| 130 |
+
abi = "-".join(soabi.split("-")[:3])
|
| 131 |
+
abi = abi.replace(".", "_").replace("-", "_")
|
| 132 |
+
elif soabi:
|
| 133 |
+
abi = soabi.replace(".", "_").replace("-", "_")
|
| 134 |
+
else:
|
| 135 |
+
abi = None
|
| 136 |
+
|
| 137 |
+
return abi
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
def safer_name(name):
|
| 141 |
+
return safe_name(name).replace("-", "_")
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def safer_version(version):
|
| 145 |
+
return safe_version(version).replace("-", "_")
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def remove_readonly(func, path, excinfo):
|
| 149 |
+
remove_readonly_exc(func, path, excinfo[1])
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
def remove_readonly_exc(func, path, exc):
|
| 153 |
+
os.chmod(path, stat.S_IWRITE)
|
| 154 |
+
func(path)
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
class bdist_wheel(Command):
|
| 158 |
+
description = "create a wheel distribution"
|
| 159 |
+
|
| 160 |
+
supported_compressions = {
|
| 161 |
+
"stored": ZIP_STORED,
|
| 162 |
+
"deflated": ZIP_DEFLATED,
|
| 163 |
+
}
|
| 164 |
+
|
| 165 |
+
user_options = [
|
| 166 |
+
("bdist-dir=", "b", "temporary directory for creating the distribution"),
|
| 167 |
+
(
|
| 168 |
+
"plat-name=",
|
| 169 |
+
"p",
|
| 170 |
+
"platform name to embed in generated filenames "
|
| 171 |
+
"(default: %s)" % get_platform(None),
|
| 172 |
+
),
|
| 173 |
+
(
|
| 174 |
+
"keep-temp",
|
| 175 |
+
"k",
|
| 176 |
+
"keep the pseudo-installation tree around after "
|
| 177 |
+
"creating the distribution archive",
|
| 178 |
+
),
|
| 179 |
+
("dist-dir=", "d", "directory to put final built distributions in"),
|
| 180 |
+
("skip-build", None, "skip rebuilding everything (for testing/debugging)"),
|
| 181 |
+
(
|
| 182 |
+
"relative",
|
| 183 |
+
None,
|
| 184 |
+
"build the archive using relative paths " "(default: false)",
|
| 185 |
+
),
|
| 186 |
+
(
|
| 187 |
+
"owner=",
|
| 188 |
+
"u",
|
| 189 |
+
"Owner name used when creating a tar file" " [default: current user]",
|
| 190 |
+
),
|
| 191 |
+
(
|
| 192 |
+
"group=",
|
| 193 |
+
"g",
|
| 194 |
+
"Group name used when creating a tar file" " [default: current group]",
|
| 195 |
+
),
|
| 196 |
+
("universal", None, "make a universal wheel" " (default: false)"),
|
| 197 |
+
(
|
| 198 |
+
"compression=",
|
| 199 |
+
None,
|
| 200 |
+
"zipfile compression (one of: {})" " (default: 'deflated')".format(
|
| 201 |
+
", ".join(supported_compressions)
|
| 202 |
+
),
|
| 203 |
+
),
|
| 204 |
+
(
|
| 205 |
+
"python-tag=",
|
| 206 |
+
None,
|
| 207 |
+
"Python implementation compatibility tag"
|
| 208 |
+
" (default: '%s')" % (python_tag()),
|
| 209 |
+
),
|
| 210 |
+
(
|
| 211 |
+
"build-number=",
|
| 212 |
+
None,
|
| 213 |
+
"Build number for this particular version. "
|
| 214 |
+
"As specified in PEP-0427, this must start with a digit. "
|
| 215 |
+
"[default: None]",
|
| 216 |
+
),
|
| 217 |
+
(
|
| 218 |
+
"py-limited-api=",
|
| 219 |
+
None,
|
| 220 |
+
"Python tag (cp32|cp33|cpNN) for abi3 wheel tag" " (default: false)",
|
| 221 |
+
),
|
| 222 |
+
]
|
| 223 |
+
|
| 224 |
+
boolean_options = ["keep-temp", "skip-build", "relative", "universal"]
|
| 225 |
+
|
| 226 |
+
def initialize_options(self):
|
| 227 |
+
self.bdist_dir = None
|
| 228 |
+
self.data_dir = None
|
| 229 |
+
self.plat_name = None
|
| 230 |
+
self.plat_tag = None
|
| 231 |
+
self.format = "zip"
|
| 232 |
+
self.keep_temp = False
|
| 233 |
+
self.dist_dir = None
|
| 234 |
+
self.egginfo_dir = None
|
| 235 |
+
self.root_is_pure = None
|
| 236 |
+
self.skip_build = None
|
| 237 |
+
self.relative = False
|
| 238 |
+
self.owner = None
|
| 239 |
+
self.group = None
|
| 240 |
+
self.universal = False
|
| 241 |
+
self.compression = "deflated"
|
| 242 |
+
self.python_tag = python_tag()
|
| 243 |
+
self.build_number = None
|
| 244 |
+
self.py_limited_api = False
|
| 245 |
+
self.plat_name_supplied = False
|
| 246 |
+
|
| 247 |
+
def finalize_options(self):
|
| 248 |
+
if self.bdist_dir is None:
|
| 249 |
+
bdist_base = self.get_finalized_command("bdist").bdist_base
|
| 250 |
+
self.bdist_dir = os.path.join(bdist_base, "wheel")
|
| 251 |
+
|
| 252 |
+
egg_info = self.distribution.get_command_obj("egg_info")
|
| 253 |
+
egg_info.ensure_finalized() # needed for correct `wheel_dist_name`
|
| 254 |
+
|
| 255 |
+
self.data_dir = self.wheel_dist_name + ".data"
|
| 256 |
+
self.plat_name_supplied = self.plat_name is not None
|
| 257 |
+
|
| 258 |
+
try:
|
| 259 |
+
self.compression = self.supported_compressions[self.compression]
|
| 260 |
+
except KeyError:
|
| 261 |
+
raise ValueError(f"Unsupported compression: {self.compression}") from None
|
| 262 |
+
|
| 263 |
+
need_options = ("dist_dir", "plat_name", "skip_build")
|
| 264 |
+
|
| 265 |
+
self.set_undefined_options("bdist", *zip(need_options, need_options))
|
| 266 |
+
|
| 267 |
+
self.root_is_pure = not (
|
| 268 |
+
self.distribution.has_ext_modules() or self.distribution.has_c_libraries()
|
| 269 |
+
)
|
| 270 |
+
|
| 271 |
+
if self.py_limited_api and not re.match(
|
| 272 |
+
PY_LIMITED_API_PATTERN, self.py_limited_api
|
| 273 |
+
):
|
| 274 |
+
raise ValueError("py-limited-api must match '%s'" % PY_LIMITED_API_PATTERN)
|
| 275 |
+
|
| 276 |
+
# Support legacy [wheel] section for setting universal
|
| 277 |
+
wheel = self.distribution.get_option_dict("wheel")
|
| 278 |
+
if "universal" in wheel:
|
| 279 |
+
# please don't define this in your global configs
|
| 280 |
+
log.warning(
|
| 281 |
+
"The [wheel] section is deprecated. Use [bdist_wheel] instead.",
|
| 282 |
+
)
|
| 283 |
+
val = wheel["universal"][1].strip()
|
| 284 |
+
if val.lower() in ("1", "true", "yes"):
|
| 285 |
+
self.universal = True
|
| 286 |
+
|
| 287 |
+
if self.build_number is not None and not self.build_number[:1].isdigit():
|
| 288 |
+
raise ValueError("Build tag (build-number) must start with a digit.")
|
| 289 |
+
|
| 290 |
+
@property
|
| 291 |
+
def wheel_dist_name(self):
|
| 292 |
+
"""Return distribution full name with - replaced with _"""
|
| 293 |
+
components = (
|
| 294 |
+
safer_name(self.distribution.get_name()),
|
| 295 |
+
safer_version(self.distribution.get_version()),
|
| 296 |
+
)
|
| 297 |
+
if self.build_number:
|
| 298 |
+
components += (self.build_number,)
|
| 299 |
+
return "-".join(components)
|
| 300 |
+
|
| 301 |
+
def get_tag(self):
|
| 302 |
+
# bdist sets self.plat_name if unset, we should only use it for purepy
|
| 303 |
+
# wheels if the user supplied it.
|
| 304 |
+
if self.plat_name_supplied:
|
| 305 |
+
plat_name = self.plat_name
|
| 306 |
+
elif self.root_is_pure:
|
| 307 |
+
plat_name = "any"
|
| 308 |
+
else:
|
| 309 |
+
# macosx contains system version in platform name so need special handle
|
| 310 |
+
if self.plat_name and not self.plat_name.startswith("macosx"):
|
| 311 |
+
plat_name = self.plat_name
|
| 312 |
+
else:
|
| 313 |
+
# on macosx always limit the platform name to comply with any
|
| 314 |
+
# c-extension modules in bdist_dir, since the user can specify
|
| 315 |
+
# a higher MACOSX_DEPLOYMENT_TARGET via tools like CMake
|
| 316 |
+
|
| 317 |
+
# on other platforms, and on macosx if there are no c-extension
|
| 318 |
+
# modules, use the default platform name.
|
| 319 |
+
plat_name = get_platform(self.bdist_dir)
|
| 320 |
+
|
| 321 |
+
if _is_32bit_interpreter():
|
| 322 |
+
if plat_name in ("linux-x86_64", "linux_x86_64"):
|
| 323 |
+
plat_name = "linux_i686"
|
| 324 |
+
if plat_name in ("linux-aarch64", "linux_aarch64"):
|
| 325 |
+
# TODO armv8l, packaging pull request #690 => this did not land
|
| 326 |
+
# in pip/packaging yet
|
| 327 |
+
plat_name = "linux_armv7l"
|
| 328 |
+
|
| 329 |
+
plat_name = (
|
| 330 |
+
plat_name.lower().replace("-", "_").replace(".", "_").replace(" ", "_")
|
| 331 |
+
)
|
| 332 |
+
|
| 333 |
+
if self.root_is_pure:
|
| 334 |
+
if self.universal:
|
| 335 |
+
impl = "py2.py3"
|
| 336 |
+
else:
|
| 337 |
+
impl = self.python_tag
|
| 338 |
+
tag = (impl, "none", plat_name)
|
| 339 |
+
else:
|
| 340 |
+
impl_name = tags.interpreter_name()
|
| 341 |
+
impl_ver = tags.interpreter_version()
|
| 342 |
+
impl = impl_name + impl_ver
|
| 343 |
+
# We don't work on CPython 3.1, 3.0.
|
| 344 |
+
if self.py_limited_api and (impl_name + impl_ver).startswith("cp3"):
|
| 345 |
+
impl = self.py_limited_api
|
| 346 |
+
abi_tag = "abi3"
|
| 347 |
+
else:
|
| 348 |
+
abi_tag = str(get_abi_tag()).lower()
|
| 349 |
+
tag = (impl, abi_tag, plat_name)
|
| 350 |
+
# issue gh-374: allow overriding plat_name
|
| 351 |
+
supported_tags = [
|
| 352 |
+
(t.interpreter, t.abi, plat_name) for t in tags.sys_tags()
|
| 353 |
+
]
|
| 354 |
+
assert (
|
| 355 |
+
tag in supported_tags
|
| 356 |
+
), f"would build wheel with unsupported tag {tag}"
|
| 357 |
+
return tag
|
| 358 |
+
|
| 359 |
+
def run(self):
|
| 360 |
+
build_scripts = self.reinitialize_command("build_scripts")
|
| 361 |
+
build_scripts.executable = "python"
|
| 362 |
+
build_scripts.force = True
|
| 363 |
+
|
| 364 |
+
build_ext = self.reinitialize_command("build_ext")
|
| 365 |
+
build_ext.inplace = False
|
| 366 |
+
|
| 367 |
+
if not self.skip_build:
|
| 368 |
+
self.run_command("build")
|
| 369 |
+
|
| 370 |
+
install = self.reinitialize_command("install", reinit_subcommands=True)
|
| 371 |
+
install.root = self.bdist_dir
|
| 372 |
+
install.compile = False
|
| 373 |
+
install.skip_build = self.skip_build
|
| 374 |
+
install.warn_dir = False
|
| 375 |
+
|
| 376 |
+
# A wheel without setuptools scripts is more cross-platform.
|
| 377 |
+
# Use the (undocumented) `no_ep` option to setuptools'
|
| 378 |
+
# install_scripts command to avoid creating entry point scripts.
|
| 379 |
+
install_scripts = self.reinitialize_command("install_scripts")
|
| 380 |
+
install_scripts.no_ep = True
|
| 381 |
+
|
| 382 |
+
# Use a custom scheme for the archive, because we have to decide
|
| 383 |
+
# at installation time which scheme to use.
|
| 384 |
+
for key in ("headers", "scripts", "data", "purelib", "platlib"):
|
| 385 |
+
setattr(install, "install_" + key, os.path.join(self.data_dir, key))
|
| 386 |
+
|
| 387 |
+
basedir_observed = ""
|
| 388 |
+
|
| 389 |
+
if os.name == "nt":
|
| 390 |
+
# win32 barfs if any of these are ''; could be '.'?
|
| 391 |
+
# (distutils.command.install:change_roots bug)
|
| 392 |
+
basedir_observed = os.path.normpath(os.path.join(self.data_dir, ".."))
|
| 393 |
+
self.install_libbase = self.install_lib = basedir_observed
|
| 394 |
+
|
| 395 |
+
setattr(
|
| 396 |
+
install,
|
| 397 |
+
"install_purelib" if self.root_is_pure else "install_platlib",
|
| 398 |
+
basedir_observed,
|
| 399 |
+
)
|
| 400 |
+
|
| 401 |
+
log.info(f"installing to {self.bdist_dir}")
|
| 402 |
+
|
| 403 |
+
self.run_command("install")
|
| 404 |
+
|
| 405 |
+
impl_tag, abi_tag, plat_tag = self.get_tag()
|
| 406 |
+
archive_basename = f"{self.wheel_dist_name}-{impl_tag}-{abi_tag}-{plat_tag}"
|
| 407 |
+
if not self.relative:
|
| 408 |
+
archive_root = self.bdist_dir
|
| 409 |
+
else:
|
| 410 |
+
archive_root = os.path.join(
|
| 411 |
+
self.bdist_dir, self._ensure_relative(install.install_base)
|
| 412 |
+
)
|
| 413 |
+
|
| 414 |
+
self.set_undefined_options("install_egg_info", ("target", "egginfo_dir"))
|
| 415 |
+
distinfo_dirname = (
|
| 416 |
+
f"{safer_name(self.distribution.get_name())}-"
|
| 417 |
+
f"{safer_version(self.distribution.get_version())}.dist-info"
|
| 418 |
+
)
|
| 419 |
+
distinfo_dir = os.path.join(self.bdist_dir, distinfo_dirname)
|
| 420 |
+
self.egg2dist(self.egginfo_dir, distinfo_dir)
|
| 421 |
+
|
| 422 |
+
self.write_wheelfile(distinfo_dir)
|
| 423 |
+
|
| 424 |
+
# Make the archive
|
| 425 |
+
if not os.path.exists(self.dist_dir):
|
| 426 |
+
os.makedirs(self.dist_dir)
|
| 427 |
+
|
| 428 |
+
wheel_path = os.path.join(self.dist_dir, archive_basename + ".whl")
|
| 429 |
+
with WheelFile(wheel_path, "w", self.compression) as wf:
|
| 430 |
+
wf.write_files(archive_root)
|
| 431 |
+
|
| 432 |
+
# Add to 'Distribution.dist_files' so that the "upload" command works
|
| 433 |
+
getattr(self.distribution, "dist_files", []).append(
|
| 434 |
+
(
|
| 435 |
+
"bdist_wheel",
|
| 436 |
+
"{}.{}".format(*sys.version_info[:2]), # like 3.7
|
| 437 |
+
wheel_path,
|
| 438 |
+
)
|
| 439 |
+
)
|
| 440 |
+
|
| 441 |
+
if not self.keep_temp:
|
| 442 |
+
log.info(f"removing {self.bdist_dir}")
|
| 443 |
+
if not self.dry_run:
|
| 444 |
+
if sys.version_info < (3, 12):
|
| 445 |
+
rmtree(self.bdist_dir, onerror=remove_readonly)
|
| 446 |
+
else:
|
| 447 |
+
rmtree(self.bdist_dir, onexc=remove_readonly_exc)
|
| 448 |
+
|
| 449 |
+
def write_wheelfile(
|
| 450 |
+
self, wheelfile_base, generator="bdist_wheel (" + wheel_version + ")"
|
| 451 |
+
):
|
| 452 |
+
from email.message import Message
|
| 453 |
+
|
| 454 |
+
msg = Message()
|
| 455 |
+
msg["Wheel-Version"] = "1.0" # of the spec
|
| 456 |
+
msg["Generator"] = generator
|
| 457 |
+
msg["Root-Is-Purelib"] = str(self.root_is_pure).lower()
|
| 458 |
+
if self.build_number is not None:
|
| 459 |
+
msg["Build"] = self.build_number
|
| 460 |
+
|
| 461 |
+
# Doesn't work for bdist_wininst
|
| 462 |
+
impl_tag, abi_tag, plat_tag = self.get_tag()
|
| 463 |
+
for impl in impl_tag.split("."):
|
| 464 |
+
for abi in abi_tag.split("."):
|
| 465 |
+
for plat in plat_tag.split("."):
|
| 466 |
+
msg["Tag"] = "-".join((impl, abi, plat))
|
| 467 |
+
|
| 468 |
+
wheelfile_path = os.path.join(wheelfile_base, "WHEEL")
|
| 469 |
+
log.info(f"creating {wheelfile_path}")
|
| 470 |
+
with open(wheelfile_path, "wb") as f:
|
| 471 |
+
BytesGenerator(f, maxheaderlen=0).flatten(msg)
|
| 472 |
+
|
| 473 |
+
def _ensure_relative(self, path):
|
| 474 |
+
# copied from dir_util, deleted
|
| 475 |
+
drive, path = os.path.splitdrive(path)
|
| 476 |
+
if path[0:1] == os.sep:
|
| 477 |
+
path = drive + path[1:]
|
| 478 |
+
return path
|
| 479 |
+
|
| 480 |
+
@property
|
| 481 |
+
def license_paths(self):
|
| 482 |
+
if setuptools_major_version >= 57:
|
| 483 |
+
# Setuptools has resolved any patterns to actual file names
|
| 484 |
+
return self.distribution.metadata.license_files or ()
|
| 485 |
+
|
| 486 |
+
files = set()
|
| 487 |
+
metadata = self.distribution.get_option_dict("metadata")
|
| 488 |
+
if setuptools_major_version >= 42:
|
| 489 |
+
# Setuptools recognizes the license_files option but does not do globbing
|
| 490 |
+
patterns = self.distribution.metadata.license_files
|
| 491 |
+
else:
|
| 492 |
+
# Prior to those, wheel is entirely responsible for handling license files
|
| 493 |
+
if "license_files" in metadata:
|
| 494 |
+
patterns = metadata["license_files"][1].split()
|
| 495 |
+
else:
|
| 496 |
+
patterns = ()
|
| 497 |
+
|
| 498 |
+
if "license_file" in metadata:
|
| 499 |
+
warnings.warn(
|
| 500 |
+
'The "license_file" option is deprecated. Use "license_files" instead.',
|
| 501 |
+
DeprecationWarning,
|
| 502 |
+
stacklevel=2,
|
| 503 |
+
)
|
| 504 |
+
files.add(metadata["license_file"][1])
|
| 505 |
+
|
| 506 |
+
if not files and not patterns and not isinstance(patterns, list):
|
| 507 |
+
patterns = ("LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*")
|
| 508 |
+
|
| 509 |
+
for pattern in patterns:
|
| 510 |
+
for path in iglob(pattern):
|
| 511 |
+
if path.endswith("~"):
|
| 512 |
+
log.debug(
|
| 513 |
+
f'ignoring license file "{path}" as it looks like a backup'
|
| 514 |
+
)
|
| 515 |
+
continue
|
| 516 |
+
|
| 517 |
+
if path not in files and os.path.isfile(path):
|
| 518 |
+
log.info(
|
| 519 |
+
f'adding license file "{path}" (matched pattern "{pattern}")'
|
| 520 |
+
)
|
| 521 |
+
files.add(path)
|
| 522 |
+
|
| 523 |
+
return files
|
| 524 |
+
|
| 525 |
+
def egg2dist(self, egginfo_path, distinfo_path):
|
| 526 |
+
"""Convert an .egg-info directory into a .dist-info directory"""
|
| 527 |
+
|
| 528 |
+
def adios(p):
|
| 529 |
+
"""Appropriately delete directory, file or link."""
|
| 530 |
+
if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
|
| 531 |
+
shutil.rmtree(p)
|
| 532 |
+
elif os.path.exists(p):
|
| 533 |
+
os.unlink(p)
|
| 534 |
+
|
| 535 |
+
adios(distinfo_path)
|
| 536 |
+
|
| 537 |
+
if not os.path.exists(egginfo_path):
|
| 538 |
+
# There is no egg-info. This is probably because the egg-info
|
| 539 |
+
# file/directory is not named matching the distribution name used
|
| 540 |
+
# to name the archive file. Check for this case and report
|
| 541 |
+
# accordingly.
|
| 542 |
+
import glob
|
| 543 |
+
|
| 544 |
+
pat = os.path.join(os.path.dirname(egginfo_path), "*.egg-info")
|
| 545 |
+
possible = glob.glob(pat)
|
| 546 |
+
err = f"Egg metadata expected at {egginfo_path} but not found"
|
| 547 |
+
if possible:
|
| 548 |
+
alt = os.path.basename(possible[0])
|
| 549 |
+
err += f" ({alt} found - possible misnamed archive file?)"
|
| 550 |
+
|
| 551 |
+
raise ValueError(err)
|
| 552 |
+
|
| 553 |
+
if os.path.isfile(egginfo_path):
|
| 554 |
+
# .egg-info is a single file
|
| 555 |
+
pkginfo_path = egginfo_path
|
| 556 |
+
pkg_info = pkginfo_to_metadata(egginfo_path, egginfo_path)
|
| 557 |
+
os.mkdir(distinfo_path)
|
| 558 |
+
else:
|
| 559 |
+
# .egg-info is a directory
|
| 560 |
+
pkginfo_path = os.path.join(egginfo_path, "PKG-INFO")
|
| 561 |
+
pkg_info = pkginfo_to_metadata(egginfo_path, pkginfo_path)
|
| 562 |
+
|
| 563 |
+
# ignore common egg metadata that is useless to wheel
|
| 564 |
+
shutil.copytree(
|
| 565 |
+
egginfo_path,
|
| 566 |
+
distinfo_path,
|
| 567 |
+
ignore=lambda x, y: {
|
| 568 |
+
"PKG-INFO",
|
| 569 |
+
"requires.txt",
|
| 570 |
+
"SOURCES.txt",
|
| 571 |
+
"not-zip-safe",
|
| 572 |
+
},
|
| 573 |
+
)
|
| 574 |
+
|
| 575 |
+
# delete dependency_links if it is only whitespace
|
| 576 |
+
dependency_links_path = os.path.join(distinfo_path, "dependency_links.txt")
|
| 577 |
+
with open(dependency_links_path, encoding="utf-8") as dependency_links_file:
|
| 578 |
+
dependency_links = dependency_links_file.read().strip()
|
| 579 |
+
if not dependency_links:
|
| 580 |
+
adios(dependency_links_path)
|
| 581 |
+
|
| 582 |
+
pkg_info_path = os.path.join(distinfo_path, "METADATA")
|
| 583 |
+
serialization_policy = EmailPolicy(
|
| 584 |
+
utf8=True,
|
| 585 |
+
mangle_from_=False,
|
| 586 |
+
max_line_length=0,
|
| 587 |
+
)
|
| 588 |
+
with open(pkg_info_path, "w", encoding="utf-8") as out:
|
| 589 |
+
Generator(out, policy=serialization_policy).flatten(pkg_info)
|
| 590 |
+
|
| 591 |
+
for license_path in self.license_paths:
|
| 592 |
+
filename = os.path.basename(license_path)
|
| 593 |
+
shutil.copy(license_path, os.path.join(distinfo_path, filename))
|
| 594 |
+
|
| 595 |
+
adios(egginfo_path)
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/macosx_libfile.py
ADDED
|
@@ -0,0 +1,469 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
This module contains function to analyse dynamic library
|
| 3 |
+
headers to extract system information
|
| 4 |
+
|
| 5 |
+
Currently only for MacOSX
|
| 6 |
+
|
| 7 |
+
Library file on macosx system starts with Mach-O or Fat field.
|
| 8 |
+
This can be distinguish by first 32 bites and it is called magic number.
|
| 9 |
+
Proper value of magic number is with suffix _MAGIC. Suffix _CIGAM means
|
| 10 |
+
reversed bytes order.
|
| 11 |
+
Both fields can occur in two types: 32 and 64 bytes.
|
| 12 |
+
|
| 13 |
+
FAT field inform that this library contains few version of library
|
| 14 |
+
(typically for different types version). It contains
|
| 15 |
+
information where Mach-O headers starts.
|
| 16 |
+
|
| 17 |
+
Each section started with Mach-O header contains one library
|
| 18 |
+
(So if file starts with this field it contains only one version).
|
| 19 |
+
|
| 20 |
+
After filed Mach-O there are section fields.
|
| 21 |
+
Each of them starts with two fields:
|
| 22 |
+
cmd - magic number for this command
|
| 23 |
+
cmdsize - total size occupied by this section information.
|
| 24 |
+
|
| 25 |
+
In this case only sections LC_VERSION_MIN_MACOSX (for macosx 10.13 and earlier)
|
| 26 |
+
and LC_BUILD_VERSION (for macosx 10.14 and newer) are interesting,
|
| 27 |
+
because them contains information about minimal system version.
|
| 28 |
+
|
| 29 |
+
Important remarks:
|
| 30 |
+
- For fat files this implementation looks for maximum number version.
|
| 31 |
+
It not check if it is 32 or 64 and do not compare it with currently built package.
|
| 32 |
+
So it is possible to false report higher version that needed.
|
| 33 |
+
- All structures signatures are taken form macosx header files.
|
| 34 |
+
- I think that binary format will be more stable than `otool` output.
|
| 35 |
+
and if apple introduce some changes both implementation will need to be updated.
|
| 36 |
+
- The system compile will set the deployment target no lower than
|
| 37 |
+
11.0 for arm64 builds. For "Universal 2" builds use the x86_64 deployment
|
| 38 |
+
target when the arm64 target is 11.0.
|
| 39 |
+
"""
|
| 40 |
+
|
| 41 |
+
from __future__ import annotations
|
| 42 |
+
|
| 43 |
+
import ctypes
|
| 44 |
+
import os
|
| 45 |
+
import sys
|
| 46 |
+
|
| 47 |
+
"""here the needed const and struct from mach-o header files"""
|
| 48 |
+
|
| 49 |
+
FAT_MAGIC = 0xCAFEBABE
|
| 50 |
+
FAT_CIGAM = 0xBEBAFECA
|
| 51 |
+
FAT_MAGIC_64 = 0xCAFEBABF
|
| 52 |
+
FAT_CIGAM_64 = 0xBFBAFECA
|
| 53 |
+
MH_MAGIC = 0xFEEDFACE
|
| 54 |
+
MH_CIGAM = 0xCEFAEDFE
|
| 55 |
+
MH_MAGIC_64 = 0xFEEDFACF
|
| 56 |
+
MH_CIGAM_64 = 0xCFFAEDFE
|
| 57 |
+
|
| 58 |
+
LC_VERSION_MIN_MACOSX = 0x24
|
| 59 |
+
LC_BUILD_VERSION = 0x32
|
| 60 |
+
|
| 61 |
+
CPU_TYPE_ARM64 = 0x0100000C
|
| 62 |
+
|
| 63 |
+
mach_header_fields = [
|
| 64 |
+
("magic", ctypes.c_uint32),
|
| 65 |
+
("cputype", ctypes.c_int),
|
| 66 |
+
("cpusubtype", ctypes.c_int),
|
| 67 |
+
("filetype", ctypes.c_uint32),
|
| 68 |
+
("ncmds", ctypes.c_uint32),
|
| 69 |
+
("sizeofcmds", ctypes.c_uint32),
|
| 70 |
+
("flags", ctypes.c_uint32),
|
| 71 |
+
]
|
| 72 |
+
"""
|
| 73 |
+
struct mach_header {
|
| 74 |
+
uint32_t magic; /* mach magic number identifier */
|
| 75 |
+
cpu_type_t cputype; /* cpu specifier */
|
| 76 |
+
cpu_subtype_t cpusubtype; /* machine specifier */
|
| 77 |
+
uint32_t filetype; /* type of file */
|
| 78 |
+
uint32_t ncmds; /* number of load commands */
|
| 79 |
+
uint32_t sizeofcmds; /* the size of all the load commands */
|
| 80 |
+
uint32_t flags; /* flags */
|
| 81 |
+
};
|
| 82 |
+
typedef integer_t cpu_type_t;
|
| 83 |
+
typedef integer_t cpu_subtype_t;
|
| 84 |
+
"""
|
| 85 |
+
|
| 86 |
+
mach_header_fields_64 = mach_header_fields + [("reserved", ctypes.c_uint32)]
|
| 87 |
+
"""
|
| 88 |
+
struct mach_header_64 {
|
| 89 |
+
uint32_t magic; /* mach magic number identifier */
|
| 90 |
+
cpu_type_t cputype; /* cpu specifier */
|
| 91 |
+
cpu_subtype_t cpusubtype; /* machine specifier */
|
| 92 |
+
uint32_t filetype; /* type of file */
|
| 93 |
+
uint32_t ncmds; /* number of load commands */
|
| 94 |
+
uint32_t sizeofcmds; /* the size of all the load commands */
|
| 95 |
+
uint32_t flags; /* flags */
|
| 96 |
+
uint32_t reserved; /* reserved */
|
| 97 |
+
};
|
| 98 |
+
"""
|
| 99 |
+
|
| 100 |
+
fat_header_fields = [("magic", ctypes.c_uint32), ("nfat_arch", ctypes.c_uint32)]
|
| 101 |
+
"""
|
| 102 |
+
struct fat_header {
|
| 103 |
+
uint32_t magic; /* FAT_MAGIC or FAT_MAGIC_64 */
|
| 104 |
+
uint32_t nfat_arch; /* number of structs that follow */
|
| 105 |
+
};
|
| 106 |
+
"""
|
| 107 |
+
|
| 108 |
+
fat_arch_fields = [
|
| 109 |
+
("cputype", ctypes.c_int),
|
| 110 |
+
("cpusubtype", ctypes.c_int),
|
| 111 |
+
("offset", ctypes.c_uint32),
|
| 112 |
+
("size", ctypes.c_uint32),
|
| 113 |
+
("align", ctypes.c_uint32),
|
| 114 |
+
]
|
| 115 |
+
"""
|
| 116 |
+
struct fat_arch {
|
| 117 |
+
cpu_type_t cputype; /* cpu specifier (int) */
|
| 118 |
+
cpu_subtype_t cpusubtype; /* machine specifier (int) */
|
| 119 |
+
uint32_t offset; /* file offset to this object file */
|
| 120 |
+
uint32_t size; /* size of this object file */
|
| 121 |
+
uint32_t align; /* alignment as a power of 2 */
|
| 122 |
+
};
|
| 123 |
+
"""
|
| 124 |
+
|
| 125 |
+
fat_arch_64_fields = [
|
| 126 |
+
("cputype", ctypes.c_int),
|
| 127 |
+
("cpusubtype", ctypes.c_int),
|
| 128 |
+
("offset", ctypes.c_uint64),
|
| 129 |
+
("size", ctypes.c_uint64),
|
| 130 |
+
("align", ctypes.c_uint32),
|
| 131 |
+
("reserved", ctypes.c_uint32),
|
| 132 |
+
]
|
| 133 |
+
"""
|
| 134 |
+
struct fat_arch_64 {
|
| 135 |
+
cpu_type_t cputype; /* cpu specifier (int) */
|
| 136 |
+
cpu_subtype_t cpusubtype; /* machine specifier (int) */
|
| 137 |
+
uint64_t offset; /* file offset to this object file */
|
| 138 |
+
uint64_t size; /* size of this object file */
|
| 139 |
+
uint32_t align; /* alignment as a power of 2 */
|
| 140 |
+
uint32_t reserved; /* reserved */
|
| 141 |
+
};
|
| 142 |
+
"""
|
| 143 |
+
|
| 144 |
+
segment_base_fields = [("cmd", ctypes.c_uint32), ("cmdsize", ctypes.c_uint32)]
|
| 145 |
+
"""base for reading segment info"""
|
| 146 |
+
|
| 147 |
+
segment_command_fields = [
|
| 148 |
+
("cmd", ctypes.c_uint32),
|
| 149 |
+
("cmdsize", ctypes.c_uint32),
|
| 150 |
+
("segname", ctypes.c_char * 16),
|
| 151 |
+
("vmaddr", ctypes.c_uint32),
|
| 152 |
+
("vmsize", ctypes.c_uint32),
|
| 153 |
+
("fileoff", ctypes.c_uint32),
|
| 154 |
+
("filesize", ctypes.c_uint32),
|
| 155 |
+
("maxprot", ctypes.c_int),
|
| 156 |
+
("initprot", ctypes.c_int),
|
| 157 |
+
("nsects", ctypes.c_uint32),
|
| 158 |
+
("flags", ctypes.c_uint32),
|
| 159 |
+
]
|
| 160 |
+
"""
|
| 161 |
+
struct segment_command { /* for 32-bit architectures */
|
| 162 |
+
uint32_t cmd; /* LC_SEGMENT */
|
| 163 |
+
uint32_t cmdsize; /* includes sizeof section structs */
|
| 164 |
+
char segname[16]; /* segment name */
|
| 165 |
+
uint32_t vmaddr; /* memory address of this segment */
|
| 166 |
+
uint32_t vmsize; /* memory size of this segment */
|
| 167 |
+
uint32_t fileoff; /* file offset of this segment */
|
| 168 |
+
uint32_t filesize; /* amount to map from the file */
|
| 169 |
+
vm_prot_t maxprot; /* maximum VM protection */
|
| 170 |
+
vm_prot_t initprot; /* initial VM protection */
|
| 171 |
+
uint32_t nsects; /* number of sections in segment */
|
| 172 |
+
uint32_t flags; /* flags */
|
| 173 |
+
};
|
| 174 |
+
typedef int vm_prot_t;
|
| 175 |
+
"""
|
| 176 |
+
|
| 177 |
+
segment_command_fields_64 = [
|
| 178 |
+
("cmd", ctypes.c_uint32),
|
| 179 |
+
("cmdsize", ctypes.c_uint32),
|
| 180 |
+
("segname", ctypes.c_char * 16),
|
| 181 |
+
("vmaddr", ctypes.c_uint64),
|
| 182 |
+
("vmsize", ctypes.c_uint64),
|
| 183 |
+
("fileoff", ctypes.c_uint64),
|
| 184 |
+
("filesize", ctypes.c_uint64),
|
| 185 |
+
("maxprot", ctypes.c_int),
|
| 186 |
+
("initprot", ctypes.c_int),
|
| 187 |
+
("nsects", ctypes.c_uint32),
|
| 188 |
+
("flags", ctypes.c_uint32),
|
| 189 |
+
]
|
| 190 |
+
"""
|
| 191 |
+
struct segment_command_64 { /* for 64-bit architectures */
|
| 192 |
+
uint32_t cmd; /* LC_SEGMENT_64 */
|
| 193 |
+
uint32_t cmdsize; /* includes sizeof section_64 structs */
|
| 194 |
+
char segname[16]; /* segment name */
|
| 195 |
+
uint64_t vmaddr; /* memory address of this segment */
|
| 196 |
+
uint64_t vmsize; /* memory size of this segment */
|
| 197 |
+
uint64_t fileoff; /* file offset of this segment */
|
| 198 |
+
uint64_t filesize; /* amount to map from the file */
|
| 199 |
+
vm_prot_t maxprot; /* maximum VM protection */
|
| 200 |
+
vm_prot_t initprot; /* initial VM protection */
|
| 201 |
+
uint32_t nsects; /* number of sections in segment */
|
| 202 |
+
uint32_t flags; /* flags */
|
| 203 |
+
};
|
| 204 |
+
"""
|
| 205 |
+
|
| 206 |
+
version_min_command_fields = segment_base_fields + [
|
| 207 |
+
("version", ctypes.c_uint32),
|
| 208 |
+
("sdk", ctypes.c_uint32),
|
| 209 |
+
]
|
| 210 |
+
"""
|
| 211 |
+
struct version_min_command {
|
| 212 |
+
uint32_t cmd; /* LC_VERSION_MIN_MACOSX or
|
| 213 |
+
LC_VERSION_MIN_IPHONEOS or
|
| 214 |
+
LC_VERSION_MIN_WATCHOS or
|
| 215 |
+
LC_VERSION_MIN_TVOS */
|
| 216 |
+
uint32_t cmdsize; /* sizeof(struct min_version_command) */
|
| 217 |
+
uint32_t version; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
|
| 218 |
+
uint32_t sdk; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
|
| 219 |
+
};
|
| 220 |
+
"""
|
| 221 |
+
|
| 222 |
+
build_version_command_fields = segment_base_fields + [
|
| 223 |
+
("platform", ctypes.c_uint32),
|
| 224 |
+
("minos", ctypes.c_uint32),
|
| 225 |
+
("sdk", ctypes.c_uint32),
|
| 226 |
+
("ntools", ctypes.c_uint32),
|
| 227 |
+
]
|
| 228 |
+
"""
|
| 229 |
+
struct build_version_command {
|
| 230 |
+
uint32_t cmd; /* LC_BUILD_VERSION */
|
| 231 |
+
uint32_t cmdsize; /* sizeof(struct build_version_command) plus */
|
| 232 |
+
/* ntools * sizeof(struct build_tool_version) */
|
| 233 |
+
uint32_t platform; /* platform */
|
| 234 |
+
uint32_t minos; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
|
| 235 |
+
uint32_t sdk; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
|
| 236 |
+
uint32_t ntools; /* number of tool entries following this */
|
| 237 |
+
};
|
| 238 |
+
"""
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
def swap32(x):
|
| 242 |
+
return (
|
| 243 |
+
((x << 24) & 0xFF000000)
|
| 244 |
+
| ((x << 8) & 0x00FF0000)
|
| 245 |
+
| ((x >> 8) & 0x0000FF00)
|
| 246 |
+
| ((x >> 24) & 0x000000FF)
|
| 247 |
+
)
|
| 248 |
+
|
| 249 |
+
|
| 250 |
+
def get_base_class_and_magic_number(lib_file, seek=None):
|
| 251 |
+
if seek is None:
|
| 252 |
+
seek = lib_file.tell()
|
| 253 |
+
else:
|
| 254 |
+
lib_file.seek(seek)
|
| 255 |
+
magic_number = ctypes.c_uint32.from_buffer_copy(
|
| 256 |
+
lib_file.read(ctypes.sizeof(ctypes.c_uint32))
|
| 257 |
+
).value
|
| 258 |
+
|
| 259 |
+
# Handle wrong byte order
|
| 260 |
+
if magic_number in [FAT_CIGAM, FAT_CIGAM_64, MH_CIGAM, MH_CIGAM_64]:
|
| 261 |
+
if sys.byteorder == "little":
|
| 262 |
+
BaseClass = ctypes.BigEndianStructure
|
| 263 |
+
else:
|
| 264 |
+
BaseClass = ctypes.LittleEndianStructure
|
| 265 |
+
|
| 266 |
+
magic_number = swap32(magic_number)
|
| 267 |
+
else:
|
| 268 |
+
BaseClass = ctypes.Structure
|
| 269 |
+
|
| 270 |
+
lib_file.seek(seek)
|
| 271 |
+
return BaseClass, magic_number
|
| 272 |
+
|
| 273 |
+
|
| 274 |
+
def read_data(struct_class, lib_file):
|
| 275 |
+
return struct_class.from_buffer_copy(lib_file.read(ctypes.sizeof(struct_class)))
|
| 276 |
+
|
| 277 |
+
|
| 278 |
+
def extract_macosx_min_system_version(path_to_lib):
|
| 279 |
+
with open(path_to_lib, "rb") as lib_file:
|
| 280 |
+
BaseClass, magic_number = get_base_class_and_magic_number(lib_file, 0)
|
| 281 |
+
if magic_number not in [FAT_MAGIC, FAT_MAGIC_64, MH_MAGIC, MH_MAGIC_64]:
|
| 282 |
+
return
|
| 283 |
+
|
| 284 |
+
if magic_number in [FAT_MAGIC, FAT_CIGAM_64]:
|
| 285 |
+
|
| 286 |
+
class FatHeader(BaseClass):
|
| 287 |
+
_fields_ = fat_header_fields
|
| 288 |
+
|
| 289 |
+
fat_header = read_data(FatHeader, lib_file)
|
| 290 |
+
if magic_number == FAT_MAGIC:
|
| 291 |
+
|
| 292 |
+
class FatArch(BaseClass):
|
| 293 |
+
_fields_ = fat_arch_fields
|
| 294 |
+
|
| 295 |
+
else:
|
| 296 |
+
|
| 297 |
+
class FatArch(BaseClass):
|
| 298 |
+
_fields_ = fat_arch_64_fields
|
| 299 |
+
|
| 300 |
+
fat_arch_list = [
|
| 301 |
+
read_data(FatArch, lib_file) for _ in range(fat_header.nfat_arch)
|
| 302 |
+
]
|
| 303 |
+
|
| 304 |
+
versions_list = []
|
| 305 |
+
for el in fat_arch_list:
|
| 306 |
+
try:
|
| 307 |
+
version = read_mach_header(lib_file, el.offset)
|
| 308 |
+
if version is not None:
|
| 309 |
+
if el.cputype == CPU_TYPE_ARM64 and len(fat_arch_list) != 1:
|
| 310 |
+
# Xcode will not set the deployment target below 11.0.0
|
| 311 |
+
# for the arm64 architecture. Ignore the arm64 deployment
|
| 312 |
+
# in fat binaries when the target is 11.0.0, that way
|
| 313 |
+
# the other architectures can select a lower deployment
|
| 314 |
+
# target.
|
| 315 |
+
# This is safe because there is no arm64 variant for
|
| 316 |
+
# macOS 10.15 or earlier.
|
| 317 |
+
if version == (11, 0, 0):
|
| 318 |
+
continue
|
| 319 |
+
versions_list.append(version)
|
| 320 |
+
except ValueError:
|
| 321 |
+
pass
|
| 322 |
+
|
| 323 |
+
if len(versions_list) > 0:
|
| 324 |
+
return max(versions_list)
|
| 325 |
+
else:
|
| 326 |
+
return None
|
| 327 |
+
|
| 328 |
+
else:
|
| 329 |
+
try:
|
| 330 |
+
return read_mach_header(lib_file, 0)
|
| 331 |
+
except ValueError:
|
| 332 |
+
"""when some error during read library files"""
|
| 333 |
+
return None
|
| 334 |
+
|
| 335 |
+
|
| 336 |
+
def read_mach_header(lib_file, seek=None):
|
| 337 |
+
"""
|
| 338 |
+
This function parses a Mach-O header and extracts
|
| 339 |
+
information about the minimal macOS version.
|
| 340 |
+
|
| 341 |
+
:param lib_file: reference to opened library file with pointer
|
| 342 |
+
"""
|
| 343 |
+
base_class, magic_number = get_base_class_and_magic_number(lib_file, seek)
|
| 344 |
+
arch = "32" if magic_number == MH_MAGIC else "64"
|
| 345 |
+
|
| 346 |
+
class SegmentBase(base_class):
|
| 347 |
+
_fields_ = segment_base_fields
|
| 348 |
+
|
| 349 |
+
if arch == "32":
|
| 350 |
+
|
| 351 |
+
class MachHeader(base_class):
|
| 352 |
+
_fields_ = mach_header_fields
|
| 353 |
+
|
| 354 |
+
else:
|
| 355 |
+
|
| 356 |
+
class MachHeader(base_class):
|
| 357 |
+
_fields_ = mach_header_fields_64
|
| 358 |
+
|
| 359 |
+
mach_header = read_data(MachHeader, lib_file)
|
| 360 |
+
for _i in range(mach_header.ncmds):
|
| 361 |
+
pos = lib_file.tell()
|
| 362 |
+
segment_base = read_data(SegmentBase, lib_file)
|
| 363 |
+
lib_file.seek(pos)
|
| 364 |
+
if segment_base.cmd == LC_VERSION_MIN_MACOSX:
|
| 365 |
+
|
| 366 |
+
class VersionMinCommand(base_class):
|
| 367 |
+
_fields_ = version_min_command_fields
|
| 368 |
+
|
| 369 |
+
version_info = read_data(VersionMinCommand, lib_file)
|
| 370 |
+
return parse_version(version_info.version)
|
| 371 |
+
elif segment_base.cmd == LC_BUILD_VERSION:
|
| 372 |
+
|
| 373 |
+
class VersionBuild(base_class):
|
| 374 |
+
_fields_ = build_version_command_fields
|
| 375 |
+
|
| 376 |
+
version_info = read_data(VersionBuild, lib_file)
|
| 377 |
+
return parse_version(version_info.minos)
|
| 378 |
+
else:
|
| 379 |
+
lib_file.seek(pos + segment_base.cmdsize)
|
| 380 |
+
continue
|
| 381 |
+
|
| 382 |
+
|
| 383 |
+
def parse_version(version):
|
| 384 |
+
x = (version & 0xFFFF0000) >> 16
|
| 385 |
+
y = (version & 0x0000FF00) >> 8
|
| 386 |
+
z = version & 0x000000FF
|
| 387 |
+
return x, y, z
|
| 388 |
+
|
| 389 |
+
|
| 390 |
+
def calculate_macosx_platform_tag(archive_root, platform_tag):
|
| 391 |
+
"""
|
| 392 |
+
Calculate proper macosx platform tag basing on files which are included to wheel
|
| 393 |
+
|
| 394 |
+
Example platform tag `macosx-10.14-x86_64`
|
| 395 |
+
"""
|
| 396 |
+
prefix, base_version, suffix = platform_tag.split("-")
|
| 397 |
+
base_version = tuple(int(x) for x in base_version.split("."))
|
| 398 |
+
base_version = base_version[:2]
|
| 399 |
+
if base_version[0] > 10:
|
| 400 |
+
base_version = (base_version[0], 0)
|
| 401 |
+
assert len(base_version) == 2
|
| 402 |
+
if "MACOSX_DEPLOYMENT_TARGET" in os.environ:
|
| 403 |
+
deploy_target = tuple(
|
| 404 |
+
int(x) for x in os.environ["MACOSX_DEPLOYMENT_TARGET"].split(".")
|
| 405 |
+
)
|
| 406 |
+
deploy_target = deploy_target[:2]
|
| 407 |
+
if deploy_target[0] > 10:
|
| 408 |
+
deploy_target = (deploy_target[0], 0)
|
| 409 |
+
if deploy_target < base_version:
|
| 410 |
+
sys.stderr.write(
|
| 411 |
+
"[WARNING] MACOSX_DEPLOYMENT_TARGET is set to a lower value ({}) than "
|
| 412 |
+
"the version on which the Python interpreter was compiled ({}), and "
|
| 413 |
+
"will be ignored.\n".format(
|
| 414 |
+
".".join(str(x) for x in deploy_target),
|
| 415 |
+
".".join(str(x) for x in base_version),
|
| 416 |
+
)
|
| 417 |
+
)
|
| 418 |
+
else:
|
| 419 |
+
base_version = deploy_target
|
| 420 |
+
|
| 421 |
+
assert len(base_version) == 2
|
| 422 |
+
start_version = base_version
|
| 423 |
+
versions_dict = {}
|
| 424 |
+
for dirpath, _dirnames, filenames in os.walk(archive_root):
|
| 425 |
+
for filename in filenames:
|
| 426 |
+
if filename.endswith(".dylib") or filename.endswith(".so"):
|
| 427 |
+
lib_path = os.path.join(dirpath, filename)
|
| 428 |
+
min_ver = extract_macosx_min_system_version(lib_path)
|
| 429 |
+
if min_ver is not None:
|
| 430 |
+
min_ver = min_ver[0:2]
|
| 431 |
+
if min_ver[0] > 10:
|
| 432 |
+
min_ver = (min_ver[0], 0)
|
| 433 |
+
versions_dict[lib_path] = min_ver
|
| 434 |
+
|
| 435 |
+
if len(versions_dict) > 0:
|
| 436 |
+
base_version = max(base_version, max(versions_dict.values()))
|
| 437 |
+
|
| 438 |
+
# macosx platform tag do not support minor bugfix release
|
| 439 |
+
fin_base_version = "_".join([str(x) for x in base_version])
|
| 440 |
+
if start_version < base_version:
|
| 441 |
+
problematic_files = [k for k, v in versions_dict.items() if v > start_version]
|
| 442 |
+
problematic_files = "\n".join(problematic_files)
|
| 443 |
+
if len(problematic_files) == 1:
|
| 444 |
+
files_form = "this file"
|
| 445 |
+
else:
|
| 446 |
+
files_form = "these files"
|
| 447 |
+
error_message = (
|
| 448 |
+
"[WARNING] This wheel needs a higher macOS version than {} "
|
| 449 |
+
"To silence this warning, set MACOSX_DEPLOYMENT_TARGET to at least "
|
| 450 |
+
+ fin_base_version
|
| 451 |
+
+ " or recreate "
|
| 452 |
+
+ files_form
|
| 453 |
+
+ " with lower "
|
| 454 |
+
"MACOSX_DEPLOYMENT_TARGET: \n" + problematic_files
|
| 455 |
+
)
|
| 456 |
+
|
| 457 |
+
if "MACOSX_DEPLOYMENT_TARGET" in os.environ:
|
| 458 |
+
error_message = error_message.format(
|
| 459 |
+
"is set in MACOSX_DEPLOYMENT_TARGET variable."
|
| 460 |
+
)
|
| 461 |
+
else:
|
| 462 |
+
error_message = error_message.format(
|
| 463 |
+
"the version your Python interpreter is compiled against."
|
| 464 |
+
)
|
| 465 |
+
|
| 466 |
+
sys.stderr.write(error_message)
|
| 467 |
+
|
| 468 |
+
platform_tag = prefix + "_" + fin_base_version + "_" + suffix
|
| 469 |
+
return platform_tag
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/util.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import base64
|
| 4 |
+
import logging
|
| 5 |
+
|
| 6 |
+
log = logging.getLogger("wheel")
|
| 7 |
+
|
| 8 |
+
# ensure Python logging is configured
|
| 9 |
+
try:
|
| 10 |
+
__import__("setuptools.logging")
|
| 11 |
+
except ImportError:
|
| 12 |
+
# setuptools < ??
|
| 13 |
+
from . import _setuptools_logging
|
| 14 |
+
|
| 15 |
+
_setuptools_logging.configure()
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def urlsafe_b64encode(data: bytes) -> bytes:
|
| 19 |
+
"""urlsafe_b64encode without padding"""
|
| 20 |
+
return base64.urlsafe_b64encode(data).rstrip(b"=")
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def urlsafe_b64decode(data: bytes) -> bytes:
|
| 24 |
+
"""urlsafe_b64decode without padding"""
|
| 25 |
+
pad = b"=" * (4 - (len(data) & 3))
|
| 26 |
+
return base64.urlsafe_b64decode(data + pad)
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/__init__.py
ADDED
|
File without changes
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__init__.py
ADDED
|
File without changes
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (177 Bytes). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/_musllinux.cpython-310.pyc
ADDED
|
Binary file (3.33 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/_parser.cpython-310.pyc
ADDED
|
Binary file (8.95 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/_structures.cpython-310.pyc
ADDED
|
Binary file (2.69 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/_tokenizer.cpython-310.pyc
ADDED
|
Binary file (5.81 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/markers.cpython-310.pyc
ADDED
|
Binary file (6.9 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/requirements.cpython-310.pyc
ADDED
|
Binary file (2.82 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/__pycache__/tags.cpython-310.pyc
ADDED
|
Binary file (13.8 kB). View file
|
|
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_elffile.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
ELF file parser.
|
| 3 |
+
|
| 4 |
+
This provides a class ``ELFFile`` that parses an ELF executable in a similar
|
| 5 |
+
interface to ``ZipFile``. Only the read interface is implemented.
|
| 6 |
+
|
| 7 |
+
Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
|
| 8 |
+
ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import enum
|
| 12 |
+
import os
|
| 13 |
+
import struct
|
| 14 |
+
from typing import IO, Optional, Tuple
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class ELFInvalid(ValueError):
|
| 18 |
+
pass
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class EIClass(enum.IntEnum):
|
| 22 |
+
C32 = 1
|
| 23 |
+
C64 = 2
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class EIData(enum.IntEnum):
|
| 27 |
+
Lsb = 1
|
| 28 |
+
Msb = 2
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class EMachine(enum.IntEnum):
|
| 32 |
+
I386 = 3
|
| 33 |
+
S390 = 22
|
| 34 |
+
Arm = 40
|
| 35 |
+
X8664 = 62
|
| 36 |
+
AArc64 = 183
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class ELFFile:
|
| 40 |
+
"""
|
| 41 |
+
Representation of an ELF executable.
|
| 42 |
+
"""
|
| 43 |
+
|
| 44 |
+
def __init__(self, f: IO[bytes]) -> None:
|
| 45 |
+
self._f = f
|
| 46 |
+
|
| 47 |
+
try:
|
| 48 |
+
ident = self._read("16B")
|
| 49 |
+
except struct.error:
|
| 50 |
+
raise ELFInvalid("unable to parse identification")
|
| 51 |
+
magic = bytes(ident[:4])
|
| 52 |
+
if magic != b"\x7fELF":
|
| 53 |
+
raise ELFInvalid(f"invalid magic: {magic!r}")
|
| 54 |
+
|
| 55 |
+
self.capacity = ident[4] # Format for program header (bitness).
|
| 56 |
+
self.encoding = ident[5] # Data structure encoding (endianness).
|
| 57 |
+
|
| 58 |
+
try:
|
| 59 |
+
# e_fmt: Format for program header.
|
| 60 |
+
# p_fmt: Format for section header.
|
| 61 |
+
# p_idx: Indexes to find p_type, p_offset, and p_filesz.
|
| 62 |
+
e_fmt, self._p_fmt, self._p_idx = {
|
| 63 |
+
(1, 1): ("<HHIIIIIHHH", "<IIIIIIII", (0, 1, 4)), # 32-bit LSB.
|
| 64 |
+
(1, 2): (">HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB.
|
| 65 |
+
(2, 1): ("<HHIQQQIHHH", "<IIQQQQQQ", (0, 2, 5)), # 64-bit LSB.
|
| 66 |
+
(2, 2): (">HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB.
|
| 67 |
+
}[(self.capacity, self.encoding)]
|
| 68 |
+
except KeyError:
|
| 69 |
+
raise ELFInvalid(
|
| 70 |
+
f"unrecognized capacity ({self.capacity}) or "
|
| 71 |
+
f"encoding ({self.encoding})"
|
| 72 |
+
)
|
| 73 |
+
|
| 74 |
+
try:
|
| 75 |
+
(
|
| 76 |
+
_,
|
| 77 |
+
self.machine, # Architecture type.
|
| 78 |
+
_,
|
| 79 |
+
_,
|
| 80 |
+
self._e_phoff, # Offset of program header.
|
| 81 |
+
_,
|
| 82 |
+
self.flags, # Processor-specific flags.
|
| 83 |
+
_,
|
| 84 |
+
self._e_phentsize, # Size of section.
|
| 85 |
+
self._e_phnum, # Number of sections.
|
| 86 |
+
) = self._read(e_fmt)
|
| 87 |
+
except struct.error as e:
|
| 88 |
+
raise ELFInvalid("unable to parse machine and section information") from e
|
| 89 |
+
|
| 90 |
+
def _read(self, fmt: str) -> Tuple[int, ...]:
|
| 91 |
+
return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
|
| 92 |
+
|
| 93 |
+
@property
|
| 94 |
+
def interpreter(self) -> Optional[str]:
|
| 95 |
+
"""
|
| 96 |
+
The path recorded in the ``PT_INTERP`` section header.
|
| 97 |
+
"""
|
| 98 |
+
for index in range(self._e_phnum):
|
| 99 |
+
self._f.seek(self._e_phoff + self._e_phentsize * index)
|
| 100 |
+
try:
|
| 101 |
+
data = self._read(self._p_fmt)
|
| 102 |
+
except struct.error:
|
| 103 |
+
continue
|
| 104 |
+
if data[self._p_idx[0]] != 3: # Not PT_INTERP.
|
| 105 |
+
continue
|
| 106 |
+
self._f.seek(data[self._p_idx[1]])
|
| 107 |
+
return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0")
|
| 108 |
+
return None
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_manylinux.py
ADDED
|
@@ -0,0 +1,260 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
import contextlib
|
| 3 |
+
import functools
|
| 4 |
+
import os
|
| 5 |
+
import re
|
| 6 |
+
import sys
|
| 7 |
+
import warnings
|
| 8 |
+
from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple
|
| 9 |
+
|
| 10 |
+
from ._elffile import EIClass, EIData, ELFFile, EMachine
|
| 11 |
+
|
| 12 |
+
EF_ARM_ABIMASK = 0xFF000000
|
| 13 |
+
EF_ARM_ABI_VER5 = 0x05000000
|
| 14 |
+
EF_ARM_ABI_FLOAT_HARD = 0x00000400
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
# `os.PathLike` not a generic type until Python 3.9, so sticking with `str`
|
| 18 |
+
# as the type for `path` until then.
|
| 19 |
+
@contextlib.contextmanager
|
| 20 |
+
def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]:
|
| 21 |
+
try:
|
| 22 |
+
with open(path, "rb") as f:
|
| 23 |
+
yield ELFFile(f)
|
| 24 |
+
except (OSError, TypeError, ValueError):
|
| 25 |
+
yield None
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def _is_linux_armhf(executable: str) -> bool:
|
| 29 |
+
# hard-float ABI can be detected from the ELF header of the running
|
| 30 |
+
# process
|
| 31 |
+
# https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
|
| 32 |
+
with _parse_elf(executable) as f:
|
| 33 |
+
return (
|
| 34 |
+
f is not None
|
| 35 |
+
and f.capacity == EIClass.C32
|
| 36 |
+
and f.encoding == EIData.Lsb
|
| 37 |
+
and f.machine == EMachine.Arm
|
| 38 |
+
and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5
|
| 39 |
+
and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def _is_linux_i686(executable: str) -> bool:
|
| 44 |
+
with _parse_elf(executable) as f:
|
| 45 |
+
return (
|
| 46 |
+
f is not None
|
| 47 |
+
and f.capacity == EIClass.C32
|
| 48 |
+
and f.encoding == EIData.Lsb
|
| 49 |
+
and f.machine == EMachine.I386
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
|
| 54 |
+
if "armv7l" in archs:
|
| 55 |
+
return _is_linux_armhf(executable)
|
| 56 |
+
if "i686" in archs:
|
| 57 |
+
return _is_linux_i686(executable)
|
| 58 |
+
allowed_archs = {
|
| 59 |
+
"x86_64",
|
| 60 |
+
"aarch64",
|
| 61 |
+
"ppc64",
|
| 62 |
+
"ppc64le",
|
| 63 |
+
"s390x",
|
| 64 |
+
"loongarch64",
|
| 65 |
+
"riscv64",
|
| 66 |
+
}
|
| 67 |
+
return any(arch in allowed_archs for arch in archs)
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
# If glibc ever changes its major version, we need to know what the last
|
| 71 |
+
# minor version was, so we can build the complete list of all versions.
|
| 72 |
+
# For now, guess what the highest minor version might be, assume it will
|
| 73 |
+
# be 50 for testing. Once this actually happens, update the dictionary
|
| 74 |
+
# with the actual value.
|
| 75 |
+
_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
class _GLibCVersion(NamedTuple):
|
| 79 |
+
major: int
|
| 80 |
+
minor: int
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def _glibc_version_string_confstr() -> Optional[str]:
|
| 84 |
+
"""
|
| 85 |
+
Primary implementation of glibc_version_string using os.confstr.
|
| 86 |
+
"""
|
| 87 |
+
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
|
| 88 |
+
# to be broken or missing. This strategy is used in the standard library
|
| 89 |
+
# platform module.
|
| 90 |
+
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
|
| 91 |
+
try:
|
| 92 |
+
# Should be a string like "glibc 2.17".
|
| 93 |
+
version_string: Optional[str] = os.confstr("CS_GNU_LIBC_VERSION")
|
| 94 |
+
assert version_string is not None
|
| 95 |
+
_, version = version_string.rsplit()
|
| 96 |
+
except (AssertionError, AttributeError, OSError, ValueError):
|
| 97 |
+
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
|
| 98 |
+
return None
|
| 99 |
+
return version
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
def _glibc_version_string_ctypes() -> Optional[str]:
|
| 103 |
+
"""
|
| 104 |
+
Fallback implementation of glibc_version_string using ctypes.
|
| 105 |
+
"""
|
| 106 |
+
try:
|
| 107 |
+
import ctypes
|
| 108 |
+
except ImportError:
|
| 109 |
+
return None
|
| 110 |
+
|
| 111 |
+
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
| 112 |
+
# manpage says, "If filename is NULL, then the returned handle is for the
|
| 113 |
+
# main program". This way we can let the linker do the work to figure out
|
| 114 |
+
# which libc our process is actually using.
|
| 115 |
+
#
|
| 116 |
+
# We must also handle the special case where the executable is not a
|
| 117 |
+
# dynamically linked executable. This can occur when using musl libc,
|
| 118 |
+
# for example. In this situation, dlopen() will error, leading to an
|
| 119 |
+
# OSError. Interestingly, at least in the case of musl, there is no
|
| 120 |
+
# errno set on the OSError. The single string argument used to construct
|
| 121 |
+
# OSError comes from libc itself and is therefore not portable to
|
| 122 |
+
# hard code here. In any case, failure to call dlopen() means we
|
| 123 |
+
# can proceed, so we bail on our attempt.
|
| 124 |
+
try:
|
| 125 |
+
process_namespace = ctypes.CDLL(None)
|
| 126 |
+
except OSError:
|
| 127 |
+
return None
|
| 128 |
+
|
| 129 |
+
try:
|
| 130 |
+
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
| 131 |
+
except AttributeError:
|
| 132 |
+
# Symbol doesn't exist -> therefore, we are not linked to
|
| 133 |
+
# glibc.
|
| 134 |
+
return None
|
| 135 |
+
|
| 136 |
+
# Call gnu_get_libc_version, which returns a string like "2.5"
|
| 137 |
+
gnu_get_libc_version.restype = ctypes.c_char_p
|
| 138 |
+
version_str: str = gnu_get_libc_version()
|
| 139 |
+
# py2 / py3 compatibility:
|
| 140 |
+
if not isinstance(version_str, str):
|
| 141 |
+
version_str = version_str.decode("ascii")
|
| 142 |
+
|
| 143 |
+
return version_str
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def _glibc_version_string() -> Optional[str]:
|
| 147 |
+
"""Returns glibc version string, or None if not using glibc."""
|
| 148 |
+
return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
|
| 152 |
+
"""Parse glibc version.
|
| 153 |
+
|
| 154 |
+
We use a regexp instead of str.split because we want to discard any
|
| 155 |
+
random junk that might come after the minor version -- this might happen
|
| 156 |
+
in patched/forked versions of glibc (e.g. Linaro's version of glibc
|
| 157 |
+
uses version strings like "2.20-2014.11"). See gh-3588.
|
| 158 |
+
"""
|
| 159 |
+
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
|
| 160 |
+
if not m:
|
| 161 |
+
warnings.warn(
|
| 162 |
+
f"Expected glibc version with 2 components major.minor,"
|
| 163 |
+
f" got: {version_str}",
|
| 164 |
+
RuntimeWarning,
|
| 165 |
+
)
|
| 166 |
+
return -1, -1
|
| 167 |
+
return int(m.group("major")), int(m.group("minor"))
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
@functools.lru_cache
|
| 171 |
+
def _get_glibc_version() -> Tuple[int, int]:
|
| 172 |
+
version_str = _glibc_version_string()
|
| 173 |
+
if version_str is None:
|
| 174 |
+
return (-1, -1)
|
| 175 |
+
return _parse_glibc_version(version_str)
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
# From PEP 513, PEP 600
|
| 179 |
+
def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
|
| 180 |
+
sys_glibc = _get_glibc_version()
|
| 181 |
+
if sys_glibc < version:
|
| 182 |
+
return False
|
| 183 |
+
# Check for presence of _manylinux module.
|
| 184 |
+
try:
|
| 185 |
+
import _manylinux
|
| 186 |
+
except ImportError:
|
| 187 |
+
return True
|
| 188 |
+
if hasattr(_manylinux, "manylinux_compatible"):
|
| 189 |
+
result = _manylinux.manylinux_compatible(version[0], version[1], arch)
|
| 190 |
+
if result is not None:
|
| 191 |
+
return bool(result)
|
| 192 |
+
return True
|
| 193 |
+
if version == _GLibCVersion(2, 5):
|
| 194 |
+
if hasattr(_manylinux, "manylinux1_compatible"):
|
| 195 |
+
return bool(_manylinux.manylinux1_compatible)
|
| 196 |
+
if version == _GLibCVersion(2, 12):
|
| 197 |
+
if hasattr(_manylinux, "manylinux2010_compatible"):
|
| 198 |
+
return bool(_manylinux.manylinux2010_compatible)
|
| 199 |
+
if version == _GLibCVersion(2, 17):
|
| 200 |
+
if hasattr(_manylinux, "manylinux2014_compatible"):
|
| 201 |
+
return bool(_manylinux.manylinux2014_compatible)
|
| 202 |
+
return True
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
_LEGACY_MANYLINUX_MAP = {
|
| 206 |
+
# CentOS 7 w/ glibc 2.17 (PEP 599)
|
| 207 |
+
(2, 17): "manylinux2014",
|
| 208 |
+
# CentOS 6 w/ glibc 2.12 (PEP 571)
|
| 209 |
+
(2, 12): "manylinux2010",
|
| 210 |
+
# CentOS 5 w/ glibc 2.5 (PEP 513)
|
| 211 |
+
(2, 5): "manylinux1",
|
| 212 |
+
}
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
|
| 216 |
+
"""Generate manylinux tags compatible to the current platform.
|
| 217 |
+
|
| 218 |
+
:param archs: Sequence of compatible architectures.
|
| 219 |
+
The first one shall be the closest to the actual architecture and be the part of
|
| 220 |
+
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
|
| 221 |
+
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
|
| 222 |
+
be manylinux-compatible.
|
| 223 |
+
|
| 224 |
+
:returns: An iterator of compatible manylinux tags.
|
| 225 |
+
"""
|
| 226 |
+
if not _have_compatible_abi(sys.executable, archs):
|
| 227 |
+
return
|
| 228 |
+
# Oldest glibc to be supported regardless of architecture is (2, 17).
|
| 229 |
+
too_old_glibc2 = _GLibCVersion(2, 16)
|
| 230 |
+
if set(archs) & {"x86_64", "i686"}:
|
| 231 |
+
# On x86/i686 also oldest glibc to be supported is (2, 5).
|
| 232 |
+
too_old_glibc2 = _GLibCVersion(2, 4)
|
| 233 |
+
current_glibc = _GLibCVersion(*_get_glibc_version())
|
| 234 |
+
glibc_max_list = [current_glibc]
|
| 235 |
+
# We can assume compatibility across glibc major versions.
|
| 236 |
+
# https://sourceware.org/bugzilla/show_bug.cgi?id=24636
|
| 237 |
+
#
|
| 238 |
+
# Build a list of maximum glibc versions so that we can
|
| 239 |
+
# output the canonical list of all glibc from current_glibc
|
| 240 |
+
# down to too_old_glibc2, including all intermediary versions.
|
| 241 |
+
for glibc_major in range(current_glibc.major - 1, 1, -1):
|
| 242 |
+
glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
|
| 243 |
+
glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
|
| 244 |
+
for arch in archs:
|
| 245 |
+
for glibc_max in glibc_max_list:
|
| 246 |
+
if glibc_max.major == too_old_glibc2.major:
|
| 247 |
+
min_minor = too_old_glibc2.minor
|
| 248 |
+
else:
|
| 249 |
+
# For other glibc major versions oldest supported is (x, 0).
|
| 250 |
+
min_minor = -1
|
| 251 |
+
for glibc_minor in range(glibc_max.minor, min_minor, -1):
|
| 252 |
+
glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
|
| 253 |
+
tag = "manylinux_{}_{}".format(*glibc_version)
|
| 254 |
+
if _is_compatible(arch, glibc_version):
|
| 255 |
+
yield f"{tag}_{arch}"
|
| 256 |
+
# Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
|
| 257 |
+
if glibc_version in _LEGACY_MANYLINUX_MAP:
|
| 258 |
+
legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
|
| 259 |
+
if _is_compatible(arch, glibc_version):
|
| 260 |
+
yield f"{legacy_tag}_{arch}"
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_musllinux.py
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""PEP 656 support.
|
| 2 |
+
|
| 3 |
+
This module implements logic to detect if the currently running Python is
|
| 4 |
+
linked against musl, and what musl version is used.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import functools
|
| 8 |
+
import re
|
| 9 |
+
import subprocess
|
| 10 |
+
import sys
|
| 11 |
+
from typing import Iterator, NamedTuple, Optional, Sequence
|
| 12 |
+
|
| 13 |
+
from ._elffile import ELFFile
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class _MuslVersion(NamedTuple):
|
| 17 |
+
major: int
|
| 18 |
+
minor: int
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
|
| 22 |
+
lines = [n for n in (n.strip() for n in output.splitlines()) if n]
|
| 23 |
+
if len(lines) < 2 or lines[0][:4] != "musl":
|
| 24 |
+
return None
|
| 25 |
+
m = re.match(r"Version (\d+)\.(\d+)", lines[1])
|
| 26 |
+
if not m:
|
| 27 |
+
return None
|
| 28 |
+
return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
@functools.lru_cache
|
| 32 |
+
def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
|
| 33 |
+
"""Detect currently-running musl runtime version.
|
| 34 |
+
|
| 35 |
+
This is done by checking the specified executable's dynamic linking
|
| 36 |
+
information, and invoking the loader to parse its output for a version
|
| 37 |
+
string. If the loader is musl, the output would be something like::
|
| 38 |
+
|
| 39 |
+
musl libc (x86_64)
|
| 40 |
+
Version 1.2.2
|
| 41 |
+
Dynamic Program Loader
|
| 42 |
+
"""
|
| 43 |
+
try:
|
| 44 |
+
with open(executable, "rb") as f:
|
| 45 |
+
ld = ELFFile(f).interpreter
|
| 46 |
+
except (OSError, TypeError, ValueError):
|
| 47 |
+
return None
|
| 48 |
+
if ld is None or "musl" not in ld:
|
| 49 |
+
return None
|
| 50 |
+
proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)
|
| 51 |
+
return _parse_musl_version(proc.stderr)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
|
| 55 |
+
"""Generate musllinux tags compatible to the current platform.
|
| 56 |
+
|
| 57 |
+
:param archs: Sequence of compatible architectures.
|
| 58 |
+
The first one shall be the closest to the actual architecture and be the part of
|
| 59 |
+
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
|
| 60 |
+
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
|
| 61 |
+
be musllinux-compatible.
|
| 62 |
+
|
| 63 |
+
:returns: An iterator of compatible musllinux tags.
|
| 64 |
+
"""
|
| 65 |
+
sys_musl = _get_musl_version(sys.executable)
|
| 66 |
+
if sys_musl is None: # Python not dynamically linked against musl.
|
| 67 |
+
return
|
| 68 |
+
for arch in archs:
|
| 69 |
+
for minor in range(sys_musl.minor, -1, -1):
|
| 70 |
+
yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
if __name__ == "__main__": # pragma: no cover
|
| 74 |
+
import sysconfig
|
| 75 |
+
|
| 76 |
+
plat = sysconfig.get_platform()
|
| 77 |
+
assert plat.startswith("linux-"), "not linux"
|
| 78 |
+
|
| 79 |
+
print("plat:", plat)
|
| 80 |
+
print("musl:", _get_musl_version(sys.executable))
|
| 81 |
+
print("tags:", end=" ")
|
| 82 |
+
for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
|
| 83 |
+
print(t, end="\n ")
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_parser.py
ADDED
|
@@ -0,0 +1,356 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Handwritten parser of dependency specifiers.
|
| 2 |
+
|
| 3 |
+
The docstring for each __parse_* function contains EBNF-inspired grammar representing
|
| 4 |
+
the implementation.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import ast
|
| 8 |
+
from typing import Any, List, NamedTuple, Optional, Tuple, Union
|
| 9 |
+
|
| 10 |
+
from ._tokenizer import DEFAULT_RULES, Tokenizer
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class Node:
|
| 14 |
+
def __init__(self, value: str) -> None:
|
| 15 |
+
self.value = value
|
| 16 |
+
|
| 17 |
+
def __str__(self) -> str:
|
| 18 |
+
return self.value
|
| 19 |
+
|
| 20 |
+
def __repr__(self) -> str:
|
| 21 |
+
return f"<{self.__class__.__name__}('{self}')>"
|
| 22 |
+
|
| 23 |
+
def serialize(self) -> str:
|
| 24 |
+
raise NotImplementedError
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class Variable(Node):
|
| 28 |
+
def serialize(self) -> str:
|
| 29 |
+
return str(self)
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class Value(Node):
|
| 33 |
+
def serialize(self) -> str:
|
| 34 |
+
return f'"{self}"'
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class Op(Node):
|
| 38 |
+
def serialize(self) -> str:
|
| 39 |
+
return str(self)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
MarkerVar = Union[Variable, Value]
|
| 43 |
+
MarkerItem = Tuple[MarkerVar, Op, MarkerVar]
|
| 44 |
+
# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]]
|
| 45 |
+
# MarkerList = List[Union["MarkerList", MarkerAtom, str]]
|
| 46 |
+
# mypy does not support recursive type definition
|
| 47 |
+
# https://github.com/python/mypy/issues/731
|
| 48 |
+
MarkerAtom = Any
|
| 49 |
+
MarkerList = List[Any]
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class ParsedRequirement(NamedTuple):
|
| 53 |
+
name: str
|
| 54 |
+
url: str
|
| 55 |
+
extras: List[str]
|
| 56 |
+
specifier: str
|
| 57 |
+
marker: Optional[MarkerList]
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
# --------------------------------------------------------------------------------------
|
| 61 |
+
# Recursive descent parser for dependency specifier
|
| 62 |
+
# --------------------------------------------------------------------------------------
|
| 63 |
+
def parse_requirement(source: str) -> ParsedRequirement:
|
| 64 |
+
return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES))
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement:
|
| 68 |
+
"""
|
| 69 |
+
requirement = WS? IDENTIFIER WS? extras WS? requirement_details
|
| 70 |
+
"""
|
| 71 |
+
tokenizer.consume("WS")
|
| 72 |
+
|
| 73 |
+
name_token = tokenizer.expect(
|
| 74 |
+
"IDENTIFIER", expected="package name at the start of dependency specifier"
|
| 75 |
+
)
|
| 76 |
+
name = name_token.text
|
| 77 |
+
tokenizer.consume("WS")
|
| 78 |
+
|
| 79 |
+
extras = _parse_extras(tokenizer)
|
| 80 |
+
tokenizer.consume("WS")
|
| 81 |
+
|
| 82 |
+
url, specifier, marker = _parse_requirement_details(tokenizer)
|
| 83 |
+
tokenizer.expect("END", expected="end of dependency specifier")
|
| 84 |
+
|
| 85 |
+
return ParsedRequirement(name, url, extras, specifier, marker)
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
def _parse_requirement_details(
|
| 89 |
+
tokenizer: Tokenizer,
|
| 90 |
+
) -> Tuple[str, str, Optional[MarkerList]]:
|
| 91 |
+
"""
|
| 92 |
+
requirement_details = AT URL (WS requirement_marker?)?
|
| 93 |
+
| specifier WS? (requirement_marker)?
|
| 94 |
+
"""
|
| 95 |
+
|
| 96 |
+
specifier = ""
|
| 97 |
+
url = ""
|
| 98 |
+
marker = None
|
| 99 |
+
|
| 100 |
+
if tokenizer.check("AT"):
|
| 101 |
+
tokenizer.read()
|
| 102 |
+
tokenizer.consume("WS")
|
| 103 |
+
|
| 104 |
+
url_start = tokenizer.position
|
| 105 |
+
url = tokenizer.expect("URL", expected="URL after @").text
|
| 106 |
+
if tokenizer.check("END", peek=True):
|
| 107 |
+
return (url, specifier, marker)
|
| 108 |
+
|
| 109 |
+
tokenizer.expect("WS", expected="whitespace after URL")
|
| 110 |
+
|
| 111 |
+
# The input might end after whitespace.
|
| 112 |
+
if tokenizer.check("END", peek=True):
|
| 113 |
+
return (url, specifier, marker)
|
| 114 |
+
|
| 115 |
+
marker = _parse_requirement_marker(
|
| 116 |
+
tokenizer, span_start=url_start, after="URL and whitespace"
|
| 117 |
+
)
|
| 118 |
+
else:
|
| 119 |
+
specifier_start = tokenizer.position
|
| 120 |
+
specifier = _parse_specifier(tokenizer)
|
| 121 |
+
tokenizer.consume("WS")
|
| 122 |
+
|
| 123 |
+
if tokenizer.check("END", peek=True):
|
| 124 |
+
return (url, specifier, marker)
|
| 125 |
+
|
| 126 |
+
marker = _parse_requirement_marker(
|
| 127 |
+
tokenizer,
|
| 128 |
+
span_start=specifier_start,
|
| 129 |
+
after=(
|
| 130 |
+
"version specifier"
|
| 131 |
+
if specifier
|
| 132 |
+
else "name and no valid version specifier"
|
| 133 |
+
),
|
| 134 |
+
)
|
| 135 |
+
|
| 136 |
+
return (url, specifier, marker)
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
def _parse_requirement_marker(
|
| 140 |
+
tokenizer: Tokenizer, *, span_start: int, after: str
|
| 141 |
+
) -> MarkerList:
|
| 142 |
+
"""
|
| 143 |
+
requirement_marker = SEMICOLON marker WS?
|
| 144 |
+
"""
|
| 145 |
+
|
| 146 |
+
if not tokenizer.check("SEMICOLON"):
|
| 147 |
+
tokenizer.raise_syntax_error(
|
| 148 |
+
f"Expected end or semicolon (after {after})",
|
| 149 |
+
span_start=span_start,
|
| 150 |
+
)
|
| 151 |
+
tokenizer.read()
|
| 152 |
+
|
| 153 |
+
marker = _parse_marker(tokenizer)
|
| 154 |
+
tokenizer.consume("WS")
|
| 155 |
+
|
| 156 |
+
return marker
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
def _parse_extras(tokenizer: Tokenizer) -> List[str]:
|
| 160 |
+
"""
|
| 161 |
+
extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)?
|
| 162 |
+
"""
|
| 163 |
+
if not tokenizer.check("LEFT_BRACKET", peek=True):
|
| 164 |
+
return []
|
| 165 |
+
|
| 166 |
+
with tokenizer.enclosing_tokens(
|
| 167 |
+
"LEFT_BRACKET",
|
| 168 |
+
"RIGHT_BRACKET",
|
| 169 |
+
around="extras",
|
| 170 |
+
):
|
| 171 |
+
tokenizer.consume("WS")
|
| 172 |
+
extras = _parse_extras_list(tokenizer)
|
| 173 |
+
tokenizer.consume("WS")
|
| 174 |
+
|
| 175 |
+
return extras
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
def _parse_extras_list(tokenizer: Tokenizer) -> List[str]:
|
| 179 |
+
"""
|
| 180 |
+
extras_list = identifier (wsp* ',' wsp* identifier)*
|
| 181 |
+
"""
|
| 182 |
+
extras: List[str] = []
|
| 183 |
+
|
| 184 |
+
if not tokenizer.check("IDENTIFIER"):
|
| 185 |
+
return extras
|
| 186 |
+
|
| 187 |
+
extras.append(tokenizer.read().text)
|
| 188 |
+
|
| 189 |
+
while True:
|
| 190 |
+
tokenizer.consume("WS")
|
| 191 |
+
if tokenizer.check("IDENTIFIER", peek=True):
|
| 192 |
+
tokenizer.raise_syntax_error("Expected comma between extra names")
|
| 193 |
+
elif not tokenizer.check("COMMA"):
|
| 194 |
+
break
|
| 195 |
+
|
| 196 |
+
tokenizer.read()
|
| 197 |
+
tokenizer.consume("WS")
|
| 198 |
+
|
| 199 |
+
extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma")
|
| 200 |
+
extras.append(extra_token.text)
|
| 201 |
+
|
| 202 |
+
return extras
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
def _parse_specifier(tokenizer: Tokenizer) -> str:
|
| 206 |
+
"""
|
| 207 |
+
specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS
|
| 208 |
+
| WS? version_many WS?
|
| 209 |
+
"""
|
| 210 |
+
with tokenizer.enclosing_tokens(
|
| 211 |
+
"LEFT_PARENTHESIS",
|
| 212 |
+
"RIGHT_PARENTHESIS",
|
| 213 |
+
around="version specifier",
|
| 214 |
+
):
|
| 215 |
+
tokenizer.consume("WS")
|
| 216 |
+
parsed_specifiers = _parse_version_many(tokenizer)
|
| 217 |
+
tokenizer.consume("WS")
|
| 218 |
+
|
| 219 |
+
return parsed_specifiers
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
def _parse_version_many(tokenizer: Tokenizer) -> str:
|
| 223 |
+
"""
|
| 224 |
+
version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)?
|
| 225 |
+
"""
|
| 226 |
+
parsed_specifiers = ""
|
| 227 |
+
while tokenizer.check("SPECIFIER"):
|
| 228 |
+
span_start = tokenizer.position
|
| 229 |
+
parsed_specifiers += tokenizer.read().text
|
| 230 |
+
if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True):
|
| 231 |
+
tokenizer.raise_syntax_error(
|
| 232 |
+
".* suffix can only be used with `==` or `!=` operators",
|
| 233 |
+
span_start=span_start,
|
| 234 |
+
span_end=tokenizer.position + 1,
|
| 235 |
+
)
|
| 236 |
+
if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True):
|
| 237 |
+
tokenizer.raise_syntax_error(
|
| 238 |
+
"Local version label can only be used with `==` or `!=` operators",
|
| 239 |
+
span_start=span_start,
|
| 240 |
+
span_end=tokenizer.position,
|
| 241 |
+
)
|
| 242 |
+
tokenizer.consume("WS")
|
| 243 |
+
if not tokenizer.check("COMMA"):
|
| 244 |
+
break
|
| 245 |
+
parsed_specifiers += tokenizer.read().text
|
| 246 |
+
tokenizer.consume("WS")
|
| 247 |
+
|
| 248 |
+
return parsed_specifiers
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
# --------------------------------------------------------------------------------------
|
| 252 |
+
# Recursive descent parser for marker expression
|
| 253 |
+
# --------------------------------------------------------------------------------------
|
| 254 |
+
def parse_marker(source: str) -> MarkerList:
|
| 255 |
+
return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES))
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList:
|
| 259 |
+
retval = _parse_marker(tokenizer)
|
| 260 |
+
tokenizer.expect("END", expected="end of marker expression")
|
| 261 |
+
return retval
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
|
| 265 |
+
"""
|
| 266 |
+
marker = marker_atom (BOOLOP marker_atom)+
|
| 267 |
+
"""
|
| 268 |
+
expression = [_parse_marker_atom(tokenizer)]
|
| 269 |
+
while tokenizer.check("BOOLOP"):
|
| 270 |
+
token = tokenizer.read()
|
| 271 |
+
expr_right = _parse_marker_atom(tokenizer)
|
| 272 |
+
expression.extend((token.text, expr_right))
|
| 273 |
+
return expression
|
| 274 |
+
|
| 275 |
+
|
| 276 |
+
def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom:
|
| 277 |
+
"""
|
| 278 |
+
marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS?
|
| 279 |
+
| WS? marker_item WS?
|
| 280 |
+
"""
|
| 281 |
+
|
| 282 |
+
tokenizer.consume("WS")
|
| 283 |
+
if tokenizer.check("LEFT_PARENTHESIS", peek=True):
|
| 284 |
+
with tokenizer.enclosing_tokens(
|
| 285 |
+
"LEFT_PARENTHESIS",
|
| 286 |
+
"RIGHT_PARENTHESIS",
|
| 287 |
+
around="marker expression",
|
| 288 |
+
):
|
| 289 |
+
tokenizer.consume("WS")
|
| 290 |
+
marker: MarkerAtom = _parse_marker(tokenizer)
|
| 291 |
+
tokenizer.consume("WS")
|
| 292 |
+
else:
|
| 293 |
+
marker = _parse_marker_item(tokenizer)
|
| 294 |
+
tokenizer.consume("WS")
|
| 295 |
+
return marker
|
| 296 |
+
|
| 297 |
+
|
| 298 |
+
def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem:
|
| 299 |
+
"""
|
| 300 |
+
marker_item = WS? marker_var WS? marker_op WS? marker_var WS?
|
| 301 |
+
"""
|
| 302 |
+
tokenizer.consume("WS")
|
| 303 |
+
marker_var_left = _parse_marker_var(tokenizer)
|
| 304 |
+
tokenizer.consume("WS")
|
| 305 |
+
marker_op = _parse_marker_op(tokenizer)
|
| 306 |
+
tokenizer.consume("WS")
|
| 307 |
+
marker_var_right = _parse_marker_var(tokenizer)
|
| 308 |
+
tokenizer.consume("WS")
|
| 309 |
+
return (marker_var_left, marker_op, marker_var_right)
|
| 310 |
+
|
| 311 |
+
|
| 312 |
+
def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
|
| 313 |
+
"""
|
| 314 |
+
marker_var = VARIABLE | QUOTED_STRING
|
| 315 |
+
"""
|
| 316 |
+
if tokenizer.check("VARIABLE"):
|
| 317 |
+
return process_env_var(tokenizer.read().text.replace(".", "_"))
|
| 318 |
+
elif tokenizer.check("QUOTED_STRING"):
|
| 319 |
+
return process_python_str(tokenizer.read().text)
|
| 320 |
+
else:
|
| 321 |
+
tokenizer.raise_syntax_error(
|
| 322 |
+
message="Expected a marker variable or quoted string"
|
| 323 |
+
)
|
| 324 |
+
|
| 325 |
+
|
| 326 |
+
def process_env_var(env_var: str) -> Variable:
|
| 327 |
+
if env_var in ("platform_python_implementation", "python_implementation"):
|
| 328 |
+
return Variable("platform_python_implementation")
|
| 329 |
+
else:
|
| 330 |
+
return Variable(env_var)
|
| 331 |
+
|
| 332 |
+
|
| 333 |
+
def process_python_str(python_str: str) -> Value:
|
| 334 |
+
value = ast.literal_eval(python_str)
|
| 335 |
+
return Value(str(value))
|
| 336 |
+
|
| 337 |
+
|
| 338 |
+
def _parse_marker_op(tokenizer: Tokenizer) -> Op:
|
| 339 |
+
"""
|
| 340 |
+
marker_op = IN | NOT IN | OP
|
| 341 |
+
"""
|
| 342 |
+
if tokenizer.check("IN"):
|
| 343 |
+
tokenizer.read()
|
| 344 |
+
return Op("in")
|
| 345 |
+
elif tokenizer.check("NOT"):
|
| 346 |
+
tokenizer.read()
|
| 347 |
+
tokenizer.expect("WS", expected="whitespace after 'not'")
|
| 348 |
+
tokenizer.expect("IN", expected="'in' after 'not'")
|
| 349 |
+
return Op("not in")
|
| 350 |
+
elif tokenizer.check("OP"):
|
| 351 |
+
return Op(tokenizer.read().text)
|
| 352 |
+
else:
|
| 353 |
+
return tokenizer.raise_syntax_error(
|
| 354 |
+
"Expected marker operator, one of "
|
| 355 |
+
"<=, <, !=, ==, >=, >, ~=, ===, in, not in"
|
| 356 |
+
)
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_structures.py
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class InfinityType:
|
| 7 |
+
def __repr__(self) -> str:
|
| 8 |
+
return "Infinity"
|
| 9 |
+
|
| 10 |
+
def __hash__(self) -> int:
|
| 11 |
+
return hash(repr(self))
|
| 12 |
+
|
| 13 |
+
def __lt__(self, other: object) -> bool:
|
| 14 |
+
return False
|
| 15 |
+
|
| 16 |
+
def __le__(self, other: object) -> bool:
|
| 17 |
+
return False
|
| 18 |
+
|
| 19 |
+
def __eq__(self, other: object) -> bool:
|
| 20 |
+
return isinstance(other, self.__class__)
|
| 21 |
+
|
| 22 |
+
def __gt__(self, other: object) -> bool:
|
| 23 |
+
return True
|
| 24 |
+
|
| 25 |
+
def __ge__(self, other: object) -> bool:
|
| 26 |
+
return True
|
| 27 |
+
|
| 28 |
+
def __neg__(self: object) -> "NegativeInfinityType":
|
| 29 |
+
return NegativeInfinity
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
Infinity = InfinityType()
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class NegativeInfinityType:
|
| 36 |
+
def __repr__(self) -> str:
|
| 37 |
+
return "-Infinity"
|
| 38 |
+
|
| 39 |
+
def __hash__(self) -> int:
|
| 40 |
+
return hash(repr(self))
|
| 41 |
+
|
| 42 |
+
def __lt__(self, other: object) -> bool:
|
| 43 |
+
return True
|
| 44 |
+
|
| 45 |
+
def __le__(self, other: object) -> bool:
|
| 46 |
+
return True
|
| 47 |
+
|
| 48 |
+
def __eq__(self, other: object) -> bool:
|
| 49 |
+
return isinstance(other, self.__class__)
|
| 50 |
+
|
| 51 |
+
def __gt__(self, other: object) -> bool:
|
| 52 |
+
return False
|
| 53 |
+
|
| 54 |
+
def __ge__(self, other: object) -> bool:
|
| 55 |
+
return False
|
| 56 |
+
|
| 57 |
+
def __neg__(self: object) -> InfinityType:
|
| 58 |
+
return Infinity
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
NegativeInfinity = NegativeInfinityType()
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/_tokenizer.py
ADDED
|
@@ -0,0 +1,192 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import re
|
| 3 |
+
from dataclasses import dataclass
|
| 4 |
+
from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union
|
| 5 |
+
|
| 6 |
+
from .specifiers import Specifier
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
@dataclass
|
| 10 |
+
class Token:
|
| 11 |
+
name: str
|
| 12 |
+
text: str
|
| 13 |
+
position: int
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class ParserSyntaxError(Exception):
|
| 17 |
+
"""The provided source text could not be parsed correctly."""
|
| 18 |
+
|
| 19 |
+
def __init__(
|
| 20 |
+
self,
|
| 21 |
+
message: str,
|
| 22 |
+
*,
|
| 23 |
+
source: str,
|
| 24 |
+
span: Tuple[int, int],
|
| 25 |
+
) -> None:
|
| 26 |
+
self.span = span
|
| 27 |
+
self.message = message
|
| 28 |
+
self.source = source
|
| 29 |
+
|
| 30 |
+
super().__init__()
|
| 31 |
+
|
| 32 |
+
def __str__(self) -> str:
|
| 33 |
+
marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^"
|
| 34 |
+
return "\n ".join([self.message, self.source, marker])
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = {
|
| 38 |
+
"LEFT_PARENTHESIS": r"\(",
|
| 39 |
+
"RIGHT_PARENTHESIS": r"\)",
|
| 40 |
+
"LEFT_BRACKET": r"\[",
|
| 41 |
+
"RIGHT_BRACKET": r"\]",
|
| 42 |
+
"SEMICOLON": r";",
|
| 43 |
+
"COMMA": r",",
|
| 44 |
+
"QUOTED_STRING": re.compile(
|
| 45 |
+
r"""
|
| 46 |
+
(
|
| 47 |
+
('[^']*')
|
| 48 |
+
|
|
| 49 |
+
("[^"]*")
|
| 50 |
+
)
|
| 51 |
+
""",
|
| 52 |
+
re.VERBOSE,
|
| 53 |
+
),
|
| 54 |
+
"OP": r"(===|==|~=|!=|<=|>=|<|>)",
|
| 55 |
+
"BOOLOP": r"\b(or|and)\b",
|
| 56 |
+
"IN": r"\bin\b",
|
| 57 |
+
"NOT": r"\bnot\b",
|
| 58 |
+
"VARIABLE": re.compile(
|
| 59 |
+
r"""
|
| 60 |
+
\b(
|
| 61 |
+
python_version
|
| 62 |
+
|python_full_version
|
| 63 |
+
|os[._]name
|
| 64 |
+
|sys[._]platform
|
| 65 |
+
|platform_(release|system)
|
| 66 |
+
|platform[._](version|machine|python_implementation)
|
| 67 |
+
|python_implementation
|
| 68 |
+
|implementation_(name|version)
|
| 69 |
+
|extra
|
| 70 |
+
)\b
|
| 71 |
+
""",
|
| 72 |
+
re.VERBOSE,
|
| 73 |
+
),
|
| 74 |
+
"SPECIFIER": re.compile(
|
| 75 |
+
Specifier._operator_regex_str + Specifier._version_regex_str,
|
| 76 |
+
re.VERBOSE | re.IGNORECASE,
|
| 77 |
+
),
|
| 78 |
+
"AT": r"\@",
|
| 79 |
+
"URL": r"[^ \t]+",
|
| 80 |
+
"IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b",
|
| 81 |
+
"VERSION_PREFIX_TRAIL": r"\.\*",
|
| 82 |
+
"VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*",
|
| 83 |
+
"WS": r"[ \t]+",
|
| 84 |
+
"END": r"$",
|
| 85 |
+
}
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
class Tokenizer:
|
| 89 |
+
"""Context-sensitive token parsing.
|
| 90 |
+
|
| 91 |
+
Provides methods to examine the input stream to check whether the next token
|
| 92 |
+
matches.
|
| 93 |
+
"""
|
| 94 |
+
|
| 95 |
+
def __init__(
|
| 96 |
+
self,
|
| 97 |
+
source: str,
|
| 98 |
+
*,
|
| 99 |
+
rules: "Dict[str, Union[str, re.Pattern[str]]]",
|
| 100 |
+
) -> None:
|
| 101 |
+
self.source = source
|
| 102 |
+
self.rules: Dict[str, re.Pattern[str]] = {
|
| 103 |
+
name: re.compile(pattern) for name, pattern in rules.items()
|
| 104 |
+
}
|
| 105 |
+
self.next_token: Optional[Token] = None
|
| 106 |
+
self.position = 0
|
| 107 |
+
|
| 108 |
+
def consume(self, name: str) -> None:
|
| 109 |
+
"""Move beyond provided token name, if at current position."""
|
| 110 |
+
if self.check(name):
|
| 111 |
+
self.read()
|
| 112 |
+
|
| 113 |
+
def check(self, name: str, *, peek: bool = False) -> bool:
|
| 114 |
+
"""Check whether the next token has the provided name.
|
| 115 |
+
|
| 116 |
+
By default, if the check succeeds, the token *must* be read before
|
| 117 |
+
another check. If `peek` is set to `True`, the token is not loaded and
|
| 118 |
+
would need to be checked again.
|
| 119 |
+
"""
|
| 120 |
+
assert (
|
| 121 |
+
self.next_token is None
|
| 122 |
+
), f"Cannot check for {name!r}, already have {self.next_token!r}"
|
| 123 |
+
assert name in self.rules, f"Unknown token name: {name!r}"
|
| 124 |
+
|
| 125 |
+
expression = self.rules[name]
|
| 126 |
+
|
| 127 |
+
match = expression.match(self.source, self.position)
|
| 128 |
+
if match is None:
|
| 129 |
+
return False
|
| 130 |
+
if not peek:
|
| 131 |
+
self.next_token = Token(name, match[0], self.position)
|
| 132 |
+
return True
|
| 133 |
+
|
| 134 |
+
def expect(self, name: str, *, expected: str) -> Token:
|
| 135 |
+
"""Expect a certain token name next, failing with a syntax error otherwise.
|
| 136 |
+
|
| 137 |
+
The token is *not* read.
|
| 138 |
+
"""
|
| 139 |
+
if not self.check(name):
|
| 140 |
+
raise self.raise_syntax_error(f"Expected {expected}")
|
| 141 |
+
return self.read()
|
| 142 |
+
|
| 143 |
+
def read(self) -> Token:
|
| 144 |
+
"""Consume the next token and return it."""
|
| 145 |
+
token = self.next_token
|
| 146 |
+
assert token is not None
|
| 147 |
+
|
| 148 |
+
self.position += len(token.text)
|
| 149 |
+
self.next_token = None
|
| 150 |
+
|
| 151 |
+
return token
|
| 152 |
+
|
| 153 |
+
def raise_syntax_error(
|
| 154 |
+
self,
|
| 155 |
+
message: str,
|
| 156 |
+
*,
|
| 157 |
+
span_start: Optional[int] = None,
|
| 158 |
+
span_end: Optional[int] = None,
|
| 159 |
+
) -> NoReturn:
|
| 160 |
+
"""Raise ParserSyntaxError at the given position."""
|
| 161 |
+
span = (
|
| 162 |
+
self.position if span_start is None else span_start,
|
| 163 |
+
self.position if span_end is None else span_end,
|
| 164 |
+
)
|
| 165 |
+
raise ParserSyntaxError(
|
| 166 |
+
message,
|
| 167 |
+
source=self.source,
|
| 168 |
+
span=span,
|
| 169 |
+
)
|
| 170 |
+
|
| 171 |
+
@contextlib.contextmanager
|
| 172 |
+
def enclosing_tokens(
|
| 173 |
+
self, open_token: str, close_token: str, *, around: str
|
| 174 |
+
) -> Iterator[None]:
|
| 175 |
+
if self.check(open_token):
|
| 176 |
+
open_position = self.position
|
| 177 |
+
self.read()
|
| 178 |
+
else:
|
| 179 |
+
open_position = None
|
| 180 |
+
|
| 181 |
+
yield
|
| 182 |
+
|
| 183 |
+
if open_position is None:
|
| 184 |
+
return
|
| 185 |
+
|
| 186 |
+
if not self.check(close_token):
|
| 187 |
+
self.raise_syntax_error(
|
| 188 |
+
f"Expected matching {close_token} for {open_token}, after {around}",
|
| 189 |
+
span_start=open_position,
|
| 190 |
+
)
|
| 191 |
+
|
| 192 |
+
self.read()
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/markers.py
ADDED
|
@@ -0,0 +1,253 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
|
| 5 |
+
import operator
|
| 6 |
+
import os
|
| 7 |
+
import platform
|
| 8 |
+
import sys
|
| 9 |
+
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
|
| 10 |
+
|
| 11 |
+
from ._parser import (
|
| 12 |
+
MarkerAtom,
|
| 13 |
+
MarkerList,
|
| 14 |
+
Op,
|
| 15 |
+
Value,
|
| 16 |
+
Variable,
|
| 17 |
+
)
|
| 18 |
+
from ._parser import (
|
| 19 |
+
parse_marker as _parse_marker,
|
| 20 |
+
)
|
| 21 |
+
from ._tokenizer import ParserSyntaxError
|
| 22 |
+
from .specifiers import InvalidSpecifier, Specifier
|
| 23 |
+
from .utils import canonicalize_name
|
| 24 |
+
|
| 25 |
+
__all__ = [
|
| 26 |
+
"InvalidMarker",
|
| 27 |
+
"UndefinedComparison",
|
| 28 |
+
"UndefinedEnvironmentName",
|
| 29 |
+
"Marker",
|
| 30 |
+
"default_environment",
|
| 31 |
+
]
|
| 32 |
+
|
| 33 |
+
Operator = Callable[[str, str], bool]
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class InvalidMarker(ValueError):
|
| 37 |
+
"""
|
| 38 |
+
An invalid marker was found, users should refer to PEP 508.
|
| 39 |
+
"""
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class UndefinedComparison(ValueError):
|
| 43 |
+
"""
|
| 44 |
+
An invalid operation was attempted on a value that doesn't support it.
|
| 45 |
+
"""
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class UndefinedEnvironmentName(ValueError):
|
| 49 |
+
"""
|
| 50 |
+
A name was attempted to be used that does not exist inside of the
|
| 51 |
+
environment.
|
| 52 |
+
"""
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def _normalize_extra_values(results: Any) -> Any:
|
| 56 |
+
"""
|
| 57 |
+
Normalize extra values.
|
| 58 |
+
"""
|
| 59 |
+
if isinstance(results[0], tuple):
|
| 60 |
+
lhs, op, rhs = results[0]
|
| 61 |
+
if isinstance(lhs, Variable) and lhs.value == "extra":
|
| 62 |
+
normalized_extra = canonicalize_name(rhs.value)
|
| 63 |
+
rhs = Value(normalized_extra)
|
| 64 |
+
elif isinstance(rhs, Variable) and rhs.value == "extra":
|
| 65 |
+
normalized_extra = canonicalize_name(lhs.value)
|
| 66 |
+
lhs = Value(normalized_extra)
|
| 67 |
+
results[0] = lhs, op, rhs
|
| 68 |
+
return results
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def _format_marker(
|
| 72 |
+
marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True
|
| 73 |
+
) -> str:
|
| 74 |
+
assert isinstance(marker, (list, tuple, str))
|
| 75 |
+
|
| 76 |
+
# Sometimes we have a structure like [[...]] which is a single item list
|
| 77 |
+
# where the single item is itself it's own list. In that case we want skip
|
| 78 |
+
# the rest of this function so that we don't get extraneous () on the
|
| 79 |
+
# outside.
|
| 80 |
+
if (
|
| 81 |
+
isinstance(marker, list)
|
| 82 |
+
and len(marker) == 1
|
| 83 |
+
and isinstance(marker[0], (list, tuple))
|
| 84 |
+
):
|
| 85 |
+
return _format_marker(marker[0])
|
| 86 |
+
|
| 87 |
+
if isinstance(marker, list):
|
| 88 |
+
inner = (_format_marker(m, first=False) for m in marker)
|
| 89 |
+
if first:
|
| 90 |
+
return " ".join(inner)
|
| 91 |
+
else:
|
| 92 |
+
return "(" + " ".join(inner) + ")"
|
| 93 |
+
elif isinstance(marker, tuple):
|
| 94 |
+
return " ".join([m.serialize() for m in marker])
|
| 95 |
+
else:
|
| 96 |
+
return marker
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
_operators: Dict[str, Operator] = {
|
| 100 |
+
"in": lambda lhs, rhs: lhs in rhs,
|
| 101 |
+
"not in": lambda lhs, rhs: lhs not in rhs,
|
| 102 |
+
"<": operator.lt,
|
| 103 |
+
"<=": operator.le,
|
| 104 |
+
"==": operator.eq,
|
| 105 |
+
"!=": operator.ne,
|
| 106 |
+
">=": operator.ge,
|
| 107 |
+
">": operator.gt,
|
| 108 |
+
}
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
|
| 112 |
+
try:
|
| 113 |
+
spec = Specifier("".join([op.serialize(), rhs]))
|
| 114 |
+
except InvalidSpecifier:
|
| 115 |
+
pass
|
| 116 |
+
else:
|
| 117 |
+
return spec.contains(lhs, prereleases=True)
|
| 118 |
+
|
| 119 |
+
oper: Optional[Operator] = _operators.get(op.serialize())
|
| 120 |
+
if oper is None:
|
| 121 |
+
raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
|
| 122 |
+
|
| 123 |
+
return oper(lhs, rhs)
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def _normalize(*values: str, key: str) -> Tuple[str, ...]:
|
| 127 |
+
# PEP 685 – Comparison of extra names for optional distribution dependencies
|
| 128 |
+
# https://peps.python.org/pep-0685/
|
| 129 |
+
# > When comparing extra names, tools MUST normalize the names being
|
| 130 |
+
# > compared using the semantics outlined in PEP 503 for names
|
| 131 |
+
if key == "extra":
|
| 132 |
+
return tuple(canonicalize_name(v) for v in values)
|
| 133 |
+
|
| 134 |
+
# other environment markers don't have such standards
|
| 135 |
+
return values
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
|
| 139 |
+
groups: List[List[bool]] = [[]]
|
| 140 |
+
|
| 141 |
+
for marker in markers:
|
| 142 |
+
assert isinstance(marker, (list, tuple, str))
|
| 143 |
+
|
| 144 |
+
if isinstance(marker, list):
|
| 145 |
+
groups[-1].append(_evaluate_markers(marker, environment))
|
| 146 |
+
elif isinstance(marker, tuple):
|
| 147 |
+
lhs, op, rhs = marker
|
| 148 |
+
|
| 149 |
+
if isinstance(lhs, Variable):
|
| 150 |
+
environment_key = lhs.value
|
| 151 |
+
lhs_value = environment[environment_key]
|
| 152 |
+
rhs_value = rhs.value
|
| 153 |
+
else:
|
| 154 |
+
lhs_value = lhs.value
|
| 155 |
+
environment_key = rhs.value
|
| 156 |
+
rhs_value = environment[environment_key]
|
| 157 |
+
|
| 158 |
+
lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
|
| 159 |
+
groups[-1].append(_eval_op(lhs_value, op, rhs_value))
|
| 160 |
+
else:
|
| 161 |
+
assert marker in ["and", "or"]
|
| 162 |
+
if marker == "or":
|
| 163 |
+
groups.append([])
|
| 164 |
+
|
| 165 |
+
return any(all(item) for item in groups)
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
def format_full_version(info: "sys._version_info") -> str:
|
| 169 |
+
version = "{0.major}.{0.minor}.{0.micro}".format(info)
|
| 170 |
+
kind = info.releaselevel
|
| 171 |
+
if kind != "final":
|
| 172 |
+
version += kind[0] + str(info.serial)
|
| 173 |
+
return version
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
def default_environment() -> Dict[str, str]:
|
| 177 |
+
iver = format_full_version(sys.implementation.version)
|
| 178 |
+
implementation_name = sys.implementation.name
|
| 179 |
+
return {
|
| 180 |
+
"implementation_name": implementation_name,
|
| 181 |
+
"implementation_version": iver,
|
| 182 |
+
"os_name": os.name,
|
| 183 |
+
"platform_machine": platform.machine(),
|
| 184 |
+
"platform_release": platform.release(),
|
| 185 |
+
"platform_system": platform.system(),
|
| 186 |
+
"platform_version": platform.version(),
|
| 187 |
+
"python_full_version": platform.python_version(),
|
| 188 |
+
"platform_python_implementation": platform.python_implementation(),
|
| 189 |
+
"python_version": ".".join(platform.python_version_tuple()[:2]),
|
| 190 |
+
"sys_platform": sys.platform,
|
| 191 |
+
}
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
class Marker:
|
| 195 |
+
def __init__(self, marker: str) -> None:
|
| 196 |
+
# Note: We create a Marker object without calling this constructor in
|
| 197 |
+
# packaging.requirements.Requirement. If any additional logic is
|
| 198 |
+
# added here, make sure to mirror/adapt Requirement.
|
| 199 |
+
try:
|
| 200 |
+
self._markers = _normalize_extra_values(_parse_marker(marker))
|
| 201 |
+
# The attribute `_markers` can be described in terms of a recursive type:
|
| 202 |
+
# MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]
|
| 203 |
+
#
|
| 204 |
+
# For example, the following expression:
|
| 205 |
+
# python_version > "3.6" or (python_version == "3.6" and os_name == "unix")
|
| 206 |
+
#
|
| 207 |
+
# is parsed into:
|
| 208 |
+
# [
|
| 209 |
+
# (<Variable('python_version')>, <Op('>')>, <Value('3.6')>),
|
| 210 |
+
# 'and',
|
| 211 |
+
# [
|
| 212 |
+
# (<Variable('python_version')>, <Op('==')>, <Value('3.6')>),
|
| 213 |
+
# 'or',
|
| 214 |
+
# (<Variable('os_name')>, <Op('==')>, <Value('unix')>)
|
| 215 |
+
# ]
|
| 216 |
+
# ]
|
| 217 |
+
except ParserSyntaxError as e:
|
| 218 |
+
raise InvalidMarker(str(e)) from e
|
| 219 |
+
|
| 220 |
+
def __str__(self) -> str:
|
| 221 |
+
return _format_marker(self._markers)
|
| 222 |
+
|
| 223 |
+
def __repr__(self) -> str:
|
| 224 |
+
return f"<Marker('{self}')>"
|
| 225 |
+
|
| 226 |
+
def __hash__(self) -> int:
|
| 227 |
+
return hash((self.__class__.__name__, str(self)))
|
| 228 |
+
|
| 229 |
+
def __eq__(self, other: Any) -> bool:
|
| 230 |
+
if not isinstance(other, Marker):
|
| 231 |
+
return NotImplemented
|
| 232 |
+
|
| 233 |
+
return str(self) == str(other)
|
| 234 |
+
|
| 235 |
+
def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
|
| 236 |
+
"""Evaluate a marker.
|
| 237 |
+
|
| 238 |
+
Return the boolean from evaluating the given marker against the
|
| 239 |
+
environment. environment is an optional argument to override all or
|
| 240 |
+
part of the determined environment.
|
| 241 |
+
|
| 242 |
+
The environment is determined from the current Python process.
|
| 243 |
+
"""
|
| 244 |
+
current_environment = default_environment()
|
| 245 |
+
current_environment["extra"] = ""
|
| 246 |
+
if environment is not None:
|
| 247 |
+
current_environment.update(environment)
|
| 248 |
+
# The API used to allow setting extra to None. We need to handle this
|
| 249 |
+
# case for backwards compatibility.
|
| 250 |
+
if current_environment["extra"] is None:
|
| 251 |
+
current_environment["extra"] = ""
|
| 252 |
+
|
| 253 |
+
return _evaluate_markers(self._markers, current_environment)
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/requirements.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
|
| 5 |
+
from typing import Any, Iterator, Optional, Set
|
| 6 |
+
|
| 7 |
+
from ._parser import parse_requirement as _parse_requirement
|
| 8 |
+
from ._tokenizer import ParserSyntaxError
|
| 9 |
+
from .markers import Marker, _normalize_extra_values
|
| 10 |
+
from .specifiers import SpecifierSet
|
| 11 |
+
from .utils import canonicalize_name
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class InvalidRequirement(ValueError):
|
| 15 |
+
"""
|
| 16 |
+
An invalid requirement was found, users should refer to PEP 508.
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class Requirement:
|
| 21 |
+
"""Parse a requirement.
|
| 22 |
+
|
| 23 |
+
Parse a given requirement string into its parts, such as name, specifier,
|
| 24 |
+
URL, and extras. Raises InvalidRequirement on a badly-formed requirement
|
| 25 |
+
string.
|
| 26 |
+
"""
|
| 27 |
+
|
| 28 |
+
# TODO: Can we test whether something is contained within a requirement?
|
| 29 |
+
# If so how do we do that? Do we need to test against the _name_ of
|
| 30 |
+
# the thing as well as the version? What about the markers?
|
| 31 |
+
# TODO: Can we normalize the name and extra name?
|
| 32 |
+
|
| 33 |
+
def __init__(self, requirement_string: str) -> None:
|
| 34 |
+
try:
|
| 35 |
+
parsed = _parse_requirement(requirement_string)
|
| 36 |
+
except ParserSyntaxError as e:
|
| 37 |
+
raise InvalidRequirement(str(e)) from e
|
| 38 |
+
|
| 39 |
+
self.name: str = parsed.name
|
| 40 |
+
self.url: Optional[str] = parsed.url or None
|
| 41 |
+
self.extras: Set[str] = set(parsed.extras or [])
|
| 42 |
+
self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
|
| 43 |
+
self.marker: Optional[Marker] = None
|
| 44 |
+
if parsed.marker is not None:
|
| 45 |
+
self.marker = Marker.__new__(Marker)
|
| 46 |
+
self.marker._markers = _normalize_extra_values(parsed.marker)
|
| 47 |
+
|
| 48 |
+
def _iter_parts(self, name: str) -> Iterator[str]:
|
| 49 |
+
yield name
|
| 50 |
+
|
| 51 |
+
if self.extras:
|
| 52 |
+
formatted_extras = ",".join(sorted(self.extras))
|
| 53 |
+
yield f"[{formatted_extras}]"
|
| 54 |
+
|
| 55 |
+
if self.specifier:
|
| 56 |
+
yield str(self.specifier)
|
| 57 |
+
|
| 58 |
+
if self.url:
|
| 59 |
+
yield f"@ {self.url}"
|
| 60 |
+
if self.marker:
|
| 61 |
+
yield " "
|
| 62 |
+
|
| 63 |
+
if self.marker:
|
| 64 |
+
yield f"; {self.marker}"
|
| 65 |
+
|
| 66 |
+
def __str__(self) -> str:
|
| 67 |
+
return "".join(self._iter_parts(self.name))
|
| 68 |
+
|
| 69 |
+
def __repr__(self) -> str:
|
| 70 |
+
return f"<Requirement('{self}')>"
|
| 71 |
+
|
| 72 |
+
def __hash__(self) -> int:
|
| 73 |
+
return hash(
|
| 74 |
+
(
|
| 75 |
+
self.__class__.__name__,
|
| 76 |
+
*self._iter_parts(canonicalize_name(self.name)),
|
| 77 |
+
)
|
| 78 |
+
)
|
| 79 |
+
|
| 80 |
+
def __eq__(self, other: Any) -> bool:
|
| 81 |
+
if not isinstance(other, Requirement):
|
| 82 |
+
return NotImplemented
|
| 83 |
+
|
| 84 |
+
return (
|
| 85 |
+
canonicalize_name(self.name) == canonicalize_name(other.name)
|
| 86 |
+
and self.extras == other.extras
|
| 87 |
+
and self.specifier == other.specifier
|
| 88 |
+
and self.url == other.url
|
| 89 |
+
and self.marker == other.marker
|
| 90 |
+
)
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/specifiers.py
ADDED
|
@@ -0,0 +1,1011 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
"""
|
| 5 |
+
.. testsetup::
|
| 6 |
+
|
| 7 |
+
from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier
|
| 8 |
+
from packaging.version import Version
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import abc
|
| 12 |
+
import itertools
|
| 13 |
+
import re
|
| 14 |
+
from typing import Callable, Iterable, Iterator, List, Optional, Tuple, TypeVar, Union
|
| 15 |
+
|
| 16 |
+
from .utils import canonicalize_version
|
| 17 |
+
from .version import Version
|
| 18 |
+
|
| 19 |
+
UnparsedVersion = Union[Version, str]
|
| 20 |
+
UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion)
|
| 21 |
+
CallableOperator = Callable[[Version, str], bool]
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def _coerce_version(version: UnparsedVersion) -> Version:
|
| 25 |
+
if not isinstance(version, Version):
|
| 26 |
+
version = Version(version)
|
| 27 |
+
return version
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class InvalidSpecifier(ValueError):
|
| 31 |
+
"""
|
| 32 |
+
Raised when attempting to create a :class:`Specifier` with a specifier
|
| 33 |
+
string that is invalid.
|
| 34 |
+
|
| 35 |
+
>>> Specifier("lolwat")
|
| 36 |
+
Traceback (most recent call last):
|
| 37 |
+
...
|
| 38 |
+
packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat'
|
| 39 |
+
"""
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class BaseSpecifier(metaclass=abc.ABCMeta):
|
| 43 |
+
@abc.abstractmethod
|
| 44 |
+
def __str__(self) -> str:
|
| 45 |
+
"""
|
| 46 |
+
Returns the str representation of this Specifier-like object. This
|
| 47 |
+
should be representative of the Specifier itself.
|
| 48 |
+
"""
|
| 49 |
+
|
| 50 |
+
@abc.abstractmethod
|
| 51 |
+
def __hash__(self) -> int:
|
| 52 |
+
"""
|
| 53 |
+
Returns a hash value for this Specifier-like object.
|
| 54 |
+
"""
|
| 55 |
+
|
| 56 |
+
@abc.abstractmethod
|
| 57 |
+
def __eq__(self, other: object) -> bool:
|
| 58 |
+
"""
|
| 59 |
+
Returns a boolean representing whether or not the two Specifier-like
|
| 60 |
+
objects are equal.
|
| 61 |
+
|
| 62 |
+
:param other: The other object to check against.
|
| 63 |
+
"""
|
| 64 |
+
|
| 65 |
+
@property
|
| 66 |
+
@abc.abstractmethod
|
| 67 |
+
def prereleases(self) -> Optional[bool]:
|
| 68 |
+
"""Whether or not pre-releases as a whole are allowed.
|
| 69 |
+
|
| 70 |
+
This can be set to either ``True`` or ``False`` to explicitly enable or disable
|
| 71 |
+
prereleases or it can be set to ``None`` (the default) to use default semantics.
|
| 72 |
+
"""
|
| 73 |
+
|
| 74 |
+
@prereleases.setter
|
| 75 |
+
def prereleases(self, value: bool) -> None:
|
| 76 |
+
"""Setter for :attr:`prereleases`.
|
| 77 |
+
|
| 78 |
+
:param value: The value to set.
|
| 79 |
+
"""
|
| 80 |
+
|
| 81 |
+
@abc.abstractmethod
|
| 82 |
+
def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
|
| 83 |
+
"""
|
| 84 |
+
Determines if the given item is contained within this specifier.
|
| 85 |
+
"""
|
| 86 |
+
|
| 87 |
+
@abc.abstractmethod
|
| 88 |
+
def filter(
|
| 89 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
|
| 90 |
+
) -> Iterator[UnparsedVersionVar]:
|
| 91 |
+
"""
|
| 92 |
+
Takes an iterable of items and filters them so that only items which
|
| 93 |
+
are contained within this specifier are allowed in it.
|
| 94 |
+
"""
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
class Specifier(BaseSpecifier):
|
| 98 |
+
"""This class abstracts handling of version specifiers.
|
| 99 |
+
|
| 100 |
+
.. tip::
|
| 101 |
+
|
| 102 |
+
It is generally not required to instantiate this manually. You should instead
|
| 103 |
+
prefer to work with :class:`SpecifierSet` instead, which can parse
|
| 104 |
+
comma-separated version specifiers (which is what package metadata contains).
|
| 105 |
+
"""
|
| 106 |
+
|
| 107 |
+
_operator_regex_str = r"""
|
| 108 |
+
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
|
| 109 |
+
"""
|
| 110 |
+
_version_regex_str = r"""
|
| 111 |
+
(?P<version>
|
| 112 |
+
(?:
|
| 113 |
+
# The identity operators allow for an escape hatch that will
|
| 114 |
+
# do an exact string match of the version you wish to install.
|
| 115 |
+
# This will not be parsed by PEP 440 and we cannot determine
|
| 116 |
+
# any semantic meaning from it. This operator is discouraged
|
| 117 |
+
# but included entirely as an escape hatch.
|
| 118 |
+
(?<====) # Only match for the identity operator
|
| 119 |
+
\s*
|
| 120 |
+
[^\s;)]* # The arbitrary version can be just about anything,
|
| 121 |
+
# we match everything except for whitespace, a
|
| 122 |
+
# semi-colon for marker support, and a closing paren
|
| 123 |
+
# since versions can be enclosed in them.
|
| 124 |
+
)
|
| 125 |
+
|
|
| 126 |
+
(?:
|
| 127 |
+
# The (non)equality operators allow for wild card and local
|
| 128 |
+
# versions to be specified so we have to define these two
|
| 129 |
+
# operators separately to enable that.
|
| 130 |
+
(?<===|!=) # Only match for equals and not equals
|
| 131 |
+
|
| 132 |
+
\s*
|
| 133 |
+
v?
|
| 134 |
+
(?:[0-9]+!)? # epoch
|
| 135 |
+
[0-9]+(?:\.[0-9]+)* # release
|
| 136 |
+
|
| 137 |
+
# You cannot use a wild card and a pre-release, post-release, a dev or
|
| 138 |
+
# local version together so group them with a | and make them optional.
|
| 139 |
+
(?:
|
| 140 |
+
\.\* # Wild card syntax of .*
|
| 141 |
+
|
|
| 142 |
+
(?: # pre release
|
| 143 |
+
[-_\.]?
|
| 144 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
| 145 |
+
[-_\.]?
|
| 146 |
+
[0-9]*
|
| 147 |
+
)?
|
| 148 |
+
(?: # post release
|
| 149 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
| 150 |
+
)?
|
| 151 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
| 152 |
+
(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
|
| 153 |
+
)?
|
| 154 |
+
)
|
| 155 |
+
|
|
| 156 |
+
(?:
|
| 157 |
+
# The compatible operator requires at least two digits in the
|
| 158 |
+
# release segment.
|
| 159 |
+
(?<=~=) # Only match for the compatible operator
|
| 160 |
+
|
| 161 |
+
\s*
|
| 162 |
+
v?
|
| 163 |
+
(?:[0-9]+!)? # epoch
|
| 164 |
+
[0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
|
| 165 |
+
(?: # pre release
|
| 166 |
+
[-_\.]?
|
| 167 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
| 168 |
+
[-_\.]?
|
| 169 |
+
[0-9]*
|
| 170 |
+
)?
|
| 171 |
+
(?: # post release
|
| 172 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
| 173 |
+
)?
|
| 174 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
| 175 |
+
)
|
| 176 |
+
|
|
| 177 |
+
(?:
|
| 178 |
+
# All other operators only allow a sub set of what the
|
| 179 |
+
# (non)equality operators do. Specifically they do not allow
|
| 180 |
+
# local versions to be specified nor do they allow the prefix
|
| 181 |
+
# matching wild cards.
|
| 182 |
+
(?<!==|!=|~=) # We have special cases for these
|
| 183 |
+
# operators so we want to make sure they
|
| 184 |
+
# don't match here.
|
| 185 |
+
|
| 186 |
+
\s*
|
| 187 |
+
v?
|
| 188 |
+
(?:[0-9]+!)? # epoch
|
| 189 |
+
[0-9]+(?:\.[0-9]+)* # release
|
| 190 |
+
(?: # pre release
|
| 191 |
+
[-_\.]?
|
| 192 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
| 193 |
+
[-_\.]?
|
| 194 |
+
[0-9]*
|
| 195 |
+
)?
|
| 196 |
+
(?: # post release
|
| 197 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
| 198 |
+
)?
|
| 199 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
| 200 |
+
)
|
| 201 |
+
)
|
| 202 |
+
"""
|
| 203 |
+
|
| 204 |
+
_regex = re.compile(
|
| 205 |
+
r"^\s*" + _operator_regex_str + _version_regex_str + r"\s*$",
|
| 206 |
+
re.VERBOSE | re.IGNORECASE,
|
| 207 |
+
)
|
| 208 |
+
|
| 209 |
+
_operators = {
|
| 210 |
+
"~=": "compatible",
|
| 211 |
+
"==": "equal",
|
| 212 |
+
"!=": "not_equal",
|
| 213 |
+
"<=": "less_than_equal",
|
| 214 |
+
">=": "greater_than_equal",
|
| 215 |
+
"<": "less_than",
|
| 216 |
+
">": "greater_than",
|
| 217 |
+
"===": "arbitrary",
|
| 218 |
+
}
|
| 219 |
+
|
| 220 |
+
def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
|
| 221 |
+
"""Initialize a Specifier instance.
|
| 222 |
+
|
| 223 |
+
:param spec:
|
| 224 |
+
The string representation of a specifier which will be parsed and
|
| 225 |
+
normalized before use.
|
| 226 |
+
:param prereleases:
|
| 227 |
+
This tells the specifier if it should accept prerelease versions if
|
| 228 |
+
applicable or not. The default of ``None`` will autodetect it from the
|
| 229 |
+
given specifiers.
|
| 230 |
+
:raises InvalidSpecifier:
|
| 231 |
+
If the given specifier is invalid (i.e. bad syntax).
|
| 232 |
+
"""
|
| 233 |
+
match = self._regex.search(spec)
|
| 234 |
+
if not match:
|
| 235 |
+
raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
|
| 236 |
+
|
| 237 |
+
self._spec: Tuple[str, str] = (
|
| 238 |
+
match.group("operator").strip(),
|
| 239 |
+
match.group("version").strip(),
|
| 240 |
+
)
|
| 241 |
+
|
| 242 |
+
# Store whether or not this Specifier should accept prereleases
|
| 243 |
+
self._prereleases = prereleases
|
| 244 |
+
|
| 245 |
+
# https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515
|
| 246 |
+
@property # type: ignore[override]
|
| 247 |
+
def prereleases(self) -> bool:
|
| 248 |
+
# If there is an explicit prereleases set for this, then we'll just
|
| 249 |
+
# blindly use that.
|
| 250 |
+
if self._prereleases is not None:
|
| 251 |
+
return self._prereleases
|
| 252 |
+
|
| 253 |
+
# Look at all of our specifiers and determine if they are inclusive
|
| 254 |
+
# operators, and if they are if they are including an explicit
|
| 255 |
+
# prerelease.
|
| 256 |
+
operator, version = self._spec
|
| 257 |
+
if operator in ["==", ">=", "<=", "~=", "==="]:
|
| 258 |
+
# The == specifier can include a trailing .*, if it does we
|
| 259 |
+
# want to remove before parsing.
|
| 260 |
+
if operator == "==" and version.endswith(".*"):
|
| 261 |
+
version = version[:-2]
|
| 262 |
+
|
| 263 |
+
# Parse the version, and if it is a pre-release than this
|
| 264 |
+
# specifier allows pre-releases.
|
| 265 |
+
if Version(version).is_prerelease:
|
| 266 |
+
return True
|
| 267 |
+
|
| 268 |
+
return False
|
| 269 |
+
|
| 270 |
+
@prereleases.setter
|
| 271 |
+
def prereleases(self, value: bool) -> None:
|
| 272 |
+
self._prereleases = value
|
| 273 |
+
|
| 274 |
+
@property
|
| 275 |
+
def operator(self) -> str:
|
| 276 |
+
"""The operator of this specifier.
|
| 277 |
+
|
| 278 |
+
>>> Specifier("==1.2.3").operator
|
| 279 |
+
'=='
|
| 280 |
+
"""
|
| 281 |
+
return self._spec[0]
|
| 282 |
+
|
| 283 |
+
@property
|
| 284 |
+
def version(self) -> str:
|
| 285 |
+
"""The version of this specifier.
|
| 286 |
+
|
| 287 |
+
>>> Specifier("==1.2.3").version
|
| 288 |
+
'1.2.3'
|
| 289 |
+
"""
|
| 290 |
+
return self._spec[1]
|
| 291 |
+
|
| 292 |
+
def __repr__(self) -> str:
|
| 293 |
+
"""A representation of the Specifier that shows all internal state.
|
| 294 |
+
|
| 295 |
+
>>> Specifier('>=1.0.0')
|
| 296 |
+
<Specifier('>=1.0.0')>
|
| 297 |
+
>>> Specifier('>=1.0.0', prereleases=False)
|
| 298 |
+
<Specifier('>=1.0.0', prereleases=False)>
|
| 299 |
+
>>> Specifier('>=1.0.0', prereleases=True)
|
| 300 |
+
<Specifier('>=1.0.0', prereleases=True)>
|
| 301 |
+
"""
|
| 302 |
+
pre = (
|
| 303 |
+
f", prereleases={self.prereleases!r}"
|
| 304 |
+
if self._prereleases is not None
|
| 305 |
+
else ""
|
| 306 |
+
)
|
| 307 |
+
|
| 308 |
+
return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
|
| 309 |
+
|
| 310 |
+
def __str__(self) -> str:
|
| 311 |
+
"""A string representation of the Specifier that can be round-tripped.
|
| 312 |
+
|
| 313 |
+
>>> str(Specifier('>=1.0.0'))
|
| 314 |
+
'>=1.0.0'
|
| 315 |
+
>>> str(Specifier('>=1.0.0', prereleases=False))
|
| 316 |
+
'>=1.0.0'
|
| 317 |
+
"""
|
| 318 |
+
return "{}{}".format(*self._spec)
|
| 319 |
+
|
| 320 |
+
@property
|
| 321 |
+
def _canonical_spec(self) -> Tuple[str, str]:
|
| 322 |
+
canonical_version = canonicalize_version(
|
| 323 |
+
self._spec[1],
|
| 324 |
+
strip_trailing_zero=(self._spec[0] != "~="),
|
| 325 |
+
)
|
| 326 |
+
return self._spec[0], canonical_version
|
| 327 |
+
|
| 328 |
+
def __hash__(self) -> int:
|
| 329 |
+
return hash(self._canonical_spec)
|
| 330 |
+
|
| 331 |
+
def __eq__(self, other: object) -> bool:
|
| 332 |
+
"""Whether or not the two Specifier-like objects are equal.
|
| 333 |
+
|
| 334 |
+
:param other: The other object to check against.
|
| 335 |
+
|
| 336 |
+
The value of :attr:`prereleases` is ignored.
|
| 337 |
+
|
| 338 |
+
>>> Specifier("==1.2.3") == Specifier("== 1.2.3.0")
|
| 339 |
+
True
|
| 340 |
+
>>> (Specifier("==1.2.3", prereleases=False) ==
|
| 341 |
+
... Specifier("==1.2.3", prereleases=True))
|
| 342 |
+
True
|
| 343 |
+
>>> Specifier("==1.2.3") == "==1.2.3"
|
| 344 |
+
True
|
| 345 |
+
>>> Specifier("==1.2.3") == Specifier("==1.2.4")
|
| 346 |
+
False
|
| 347 |
+
>>> Specifier("==1.2.3") == Specifier("~=1.2.3")
|
| 348 |
+
False
|
| 349 |
+
"""
|
| 350 |
+
if isinstance(other, str):
|
| 351 |
+
try:
|
| 352 |
+
other = self.__class__(str(other))
|
| 353 |
+
except InvalidSpecifier:
|
| 354 |
+
return NotImplemented
|
| 355 |
+
elif not isinstance(other, self.__class__):
|
| 356 |
+
return NotImplemented
|
| 357 |
+
|
| 358 |
+
return self._canonical_spec == other._canonical_spec
|
| 359 |
+
|
| 360 |
+
def _get_operator(self, op: str) -> CallableOperator:
|
| 361 |
+
operator_callable: CallableOperator = getattr(
|
| 362 |
+
self, f"_compare_{self._operators[op]}"
|
| 363 |
+
)
|
| 364 |
+
return operator_callable
|
| 365 |
+
|
| 366 |
+
def _compare_compatible(self, prospective: Version, spec: str) -> bool:
|
| 367 |
+
# Compatible releases have an equivalent combination of >= and ==. That
|
| 368 |
+
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
|
| 369 |
+
# implement this in terms of the other specifiers instead of
|
| 370 |
+
# implementing it ourselves. The only thing we need to do is construct
|
| 371 |
+
# the other specifiers.
|
| 372 |
+
|
| 373 |
+
# We want everything but the last item in the version, but we want to
|
| 374 |
+
# ignore suffix segments.
|
| 375 |
+
prefix = _version_join(
|
| 376 |
+
list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
|
| 377 |
+
)
|
| 378 |
+
|
| 379 |
+
# Add the prefix notation to the end of our string
|
| 380 |
+
prefix += ".*"
|
| 381 |
+
|
| 382 |
+
return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
|
| 383 |
+
prospective, prefix
|
| 384 |
+
)
|
| 385 |
+
|
| 386 |
+
def _compare_equal(self, prospective: Version, spec: str) -> bool:
|
| 387 |
+
# We need special logic to handle prefix matching
|
| 388 |
+
if spec.endswith(".*"):
|
| 389 |
+
# In the case of prefix matching we want to ignore local segment.
|
| 390 |
+
normalized_prospective = canonicalize_version(
|
| 391 |
+
prospective.public, strip_trailing_zero=False
|
| 392 |
+
)
|
| 393 |
+
# Get the normalized version string ignoring the trailing .*
|
| 394 |
+
normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False)
|
| 395 |
+
# Split the spec out by bangs and dots, and pretend that there is
|
| 396 |
+
# an implicit dot in between a release segment and a pre-release segment.
|
| 397 |
+
split_spec = _version_split(normalized_spec)
|
| 398 |
+
|
| 399 |
+
# Split the prospective version out by bangs and dots, and pretend
|
| 400 |
+
# that there is an implicit dot in between a release segment and
|
| 401 |
+
# a pre-release segment.
|
| 402 |
+
split_prospective = _version_split(normalized_prospective)
|
| 403 |
+
|
| 404 |
+
# 0-pad the prospective version before shortening it to get the correct
|
| 405 |
+
# shortened version.
|
| 406 |
+
padded_prospective, _ = _pad_version(split_prospective, split_spec)
|
| 407 |
+
|
| 408 |
+
# Shorten the prospective version to be the same length as the spec
|
| 409 |
+
# so that we can determine if the specifier is a prefix of the
|
| 410 |
+
# prospective version or not.
|
| 411 |
+
shortened_prospective = padded_prospective[: len(split_spec)]
|
| 412 |
+
|
| 413 |
+
return shortened_prospective == split_spec
|
| 414 |
+
else:
|
| 415 |
+
# Convert our spec string into a Version
|
| 416 |
+
spec_version = Version(spec)
|
| 417 |
+
|
| 418 |
+
# If the specifier does not have a local segment, then we want to
|
| 419 |
+
# act as if the prospective version also does not have a local
|
| 420 |
+
# segment.
|
| 421 |
+
if not spec_version.local:
|
| 422 |
+
prospective = Version(prospective.public)
|
| 423 |
+
|
| 424 |
+
return prospective == spec_version
|
| 425 |
+
|
| 426 |
+
def _compare_not_equal(self, prospective: Version, spec: str) -> bool:
|
| 427 |
+
return not self._compare_equal(prospective, spec)
|
| 428 |
+
|
| 429 |
+
def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool:
|
| 430 |
+
# NB: Local version identifiers are NOT permitted in the version
|
| 431 |
+
# specifier, so local version labels can be universally removed from
|
| 432 |
+
# the prospective version.
|
| 433 |
+
return Version(prospective.public) <= Version(spec)
|
| 434 |
+
|
| 435 |
+
def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool:
|
| 436 |
+
# NB: Local version identifiers are NOT permitted in the version
|
| 437 |
+
# specifier, so local version labels can be universally removed from
|
| 438 |
+
# the prospective version.
|
| 439 |
+
return Version(prospective.public) >= Version(spec)
|
| 440 |
+
|
| 441 |
+
def _compare_less_than(self, prospective: Version, spec_str: str) -> bool:
|
| 442 |
+
# Convert our spec to a Version instance, since we'll want to work with
|
| 443 |
+
# it as a version.
|
| 444 |
+
spec = Version(spec_str)
|
| 445 |
+
|
| 446 |
+
# Check to see if the prospective version is less than the spec
|
| 447 |
+
# version. If it's not we can short circuit and just return False now
|
| 448 |
+
# instead of doing extra unneeded work.
|
| 449 |
+
if not prospective < spec:
|
| 450 |
+
return False
|
| 451 |
+
|
| 452 |
+
# This special case is here so that, unless the specifier itself
|
| 453 |
+
# includes is a pre-release version, that we do not accept pre-release
|
| 454 |
+
# versions for the version mentioned in the specifier (e.g. <3.1 should
|
| 455 |
+
# not match 3.1.dev0, but should match 3.0.dev0).
|
| 456 |
+
if not spec.is_prerelease and prospective.is_prerelease:
|
| 457 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
| 458 |
+
return False
|
| 459 |
+
|
| 460 |
+
# If we've gotten to here, it means that prospective version is both
|
| 461 |
+
# less than the spec version *and* it's not a pre-release of the same
|
| 462 |
+
# version in the spec.
|
| 463 |
+
return True
|
| 464 |
+
|
| 465 |
+
def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool:
|
| 466 |
+
# Convert our spec to a Version instance, since we'll want to work with
|
| 467 |
+
# it as a version.
|
| 468 |
+
spec = Version(spec_str)
|
| 469 |
+
|
| 470 |
+
# Check to see if the prospective version is greater than the spec
|
| 471 |
+
# version. If it's not we can short circuit and just return False now
|
| 472 |
+
# instead of doing extra unneeded work.
|
| 473 |
+
if not prospective > spec:
|
| 474 |
+
return False
|
| 475 |
+
|
| 476 |
+
# This special case is here so that, unless the specifier itself
|
| 477 |
+
# includes is a post-release version, that we do not accept
|
| 478 |
+
# post-release versions for the version mentioned in the specifier
|
| 479 |
+
# (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
|
| 480 |
+
if not spec.is_postrelease and prospective.is_postrelease:
|
| 481 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
| 482 |
+
return False
|
| 483 |
+
|
| 484 |
+
# Ensure that we do not allow a local version of the version mentioned
|
| 485 |
+
# in the specifier, which is technically greater than, to match.
|
| 486 |
+
if prospective.local is not None:
|
| 487 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
| 488 |
+
return False
|
| 489 |
+
|
| 490 |
+
# If we've gotten to here, it means that prospective version is both
|
| 491 |
+
# greater than the spec version *and* it's not a pre-release of the
|
| 492 |
+
# same version in the spec.
|
| 493 |
+
return True
|
| 494 |
+
|
| 495 |
+
def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
|
| 496 |
+
return str(prospective).lower() == str(spec).lower()
|
| 497 |
+
|
| 498 |
+
def __contains__(self, item: Union[str, Version]) -> bool:
|
| 499 |
+
"""Return whether or not the item is contained in this specifier.
|
| 500 |
+
|
| 501 |
+
:param item: The item to check for.
|
| 502 |
+
|
| 503 |
+
This is used for the ``in`` operator and behaves the same as
|
| 504 |
+
:meth:`contains` with no ``prereleases`` argument passed.
|
| 505 |
+
|
| 506 |
+
>>> "1.2.3" in Specifier(">=1.2.3")
|
| 507 |
+
True
|
| 508 |
+
>>> Version("1.2.3") in Specifier(">=1.2.3")
|
| 509 |
+
True
|
| 510 |
+
>>> "1.0.0" in Specifier(">=1.2.3")
|
| 511 |
+
False
|
| 512 |
+
>>> "1.3.0a1" in Specifier(">=1.2.3")
|
| 513 |
+
False
|
| 514 |
+
>>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True)
|
| 515 |
+
True
|
| 516 |
+
"""
|
| 517 |
+
return self.contains(item)
|
| 518 |
+
|
| 519 |
+
def contains(
|
| 520 |
+
self, item: UnparsedVersion, prereleases: Optional[bool] = None
|
| 521 |
+
) -> bool:
|
| 522 |
+
"""Return whether or not the item is contained in this specifier.
|
| 523 |
+
|
| 524 |
+
:param item:
|
| 525 |
+
The item to check for, which can be a version string or a
|
| 526 |
+
:class:`Version` instance.
|
| 527 |
+
:param prereleases:
|
| 528 |
+
Whether or not to match prereleases with this Specifier. If set to
|
| 529 |
+
``None`` (the default), it uses :attr:`prereleases` to determine
|
| 530 |
+
whether or not prereleases are allowed.
|
| 531 |
+
|
| 532 |
+
>>> Specifier(">=1.2.3").contains("1.2.3")
|
| 533 |
+
True
|
| 534 |
+
>>> Specifier(">=1.2.3").contains(Version("1.2.3"))
|
| 535 |
+
True
|
| 536 |
+
>>> Specifier(">=1.2.3").contains("1.0.0")
|
| 537 |
+
False
|
| 538 |
+
>>> Specifier(">=1.2.3").contains("1.3.0a1")
|
| 539 |
+
False
|
| 540 |
+
>>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1")
|
| 541 |
+
True
|
| 542 |
+
>>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True)
|
| 543 |
+
True
|
| 544 |
+
"""
|
| 545 |
+
|
| 546 |
+
# Determine if prereleases are to be allowed or not.
|
| 547 |
+
if prereleases is None:
|
| 548 |
+
prereleases = self.prereleases
|
| 549 |
+
|
| 550 |
+
# Normalize item to a Version, this allows us to have a shortcut for
|
| 551 |
+
# "2.0" in Specifier(">=2")
|
| 552 |
+
normalized_item = _coerce_version(item)
|
| 553 |
+
|
| 554 |
+
# Determine if we should be supporting prereleases in this specifier
|
| 555 |
+
# or not, if we do not support prereleases than we can short circuit
|
| 556 |
+
# logic if this version is a prereleases.
|
| 557 |
+
if normalized_item.is_prerelease and not prereleases:
|
| 558 |
+
return False
|
| 559 |
+
|
| 560 |
+
# Actually do the comparison to determine if this item is contained
|
| 561 |
+
# within this Specifier or not.
|
| 562 |
+
operator_callable: CallableOperator = self._get_operator(self.operator)
|
| 563 |
+
return operator_callable(normalized_item, self.version)
|
| 564 |
+
|
| 565 |
+
def filter(
|
| 566 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
|
| 567 |
+
) -> Iterator[UnparsedVersionVar]:
|
| 568 |
+
"""Filter items in the given iterable, that match the specifier.
|
| 569 |
+
|
| 570 |
+
:param iterable:
|
| 571 |
+
An iterable that can contain version strings and :class:`Version` instances.
|
| 572 |
+
The items in the iterable will be filtered according to the specifier.
|
| 573 |
+
:param prereleases:
|
| 574 |
+
Whether or not to allow prereleases in the returned iterator. If set to
|
| 575 |
+
``None`` (the default), it will be intelligently decide whether to allow
|
| 576 |
+
prereleases or not (based on the :attr:`prereleases` attribute, and
|
| 577 |
+
whether the only versions matching are prereleases).
|
| 578 |
+
|
| 579 |
+
This method is smarter than just ``filter(Specifier().contains, [...])``
|
| 580 |
+
because it implements the rule from :pep:`440` that a prerelease item
|
| 581 |
+
SHOULD be accepted if no other versions match the given specifier.
|
| 582 |
+
|
| 583 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
|
| 584 |
+
['1.3']
|
| 585 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")]))
|
| 586 |
+
['1.2.3', '1.3', <Version('1.4')>]
|
| 587 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"]))
|
| 588 |
+
['1.5a1']
|
| 589 |
+
>>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
|
| 590 |
+
['1.3', '1.5a1']
|
| 591 |
+
>>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
|
| 592 |
+
['1.3', '1.5a1']
|
| 593 |
+
"""
|
| 594 |
+
|
| 595 |
+
yielded = False
|
| 596 |
+
found_prereleases = []
|
| 597 |
+
|
| 598 |
+
kw = {"prereleases": prereleases if prereleases is not None else True}
|
| 599 |
+
|
| 600 |
+
# Attempt to iterate over all the values in the iterable and if any of
|
| 601 |
+
# them match, yield them.
|
| 602 |
+
for version in iterable:
|
| 603 |
+
parsed_version = _coerce_version(version)
|
| 604 |
+
|
| 605 |
+
if self.contains(parsed_version, **kw):
|
| 606 |
+
# If our version is a prerelease, and we were not set to allow
|
| 607 |
+
# prereleases, then we'll store it for later in case nothing
|
| 608 |
+
# else matches this specifier.
|
| 609 |
+
if parsed_version.is_prerelease and not (
|
| 610 |
+
prereleases or self.prereleases
|
| 611 |
+
):
|
| 612 |
+
found_prereleases.append(version)
|
| 613 |
+
# Either this is not a prerelease, or we should have been
|
| 614 |
+
# accepting prereleases from the beginning.
|
| 615 |
+
else:
|
| 616 |
+
yielded = True
|
| 617 |
+
yield version
|
| 618 |
+
|
| 619 |
+
# Now that we've iterated over everything, determine if we've yielded
|
| 620 |
+
# any values, and if we have not and we have any prereleases stored up
|
| 621 |
+
# then we will go ahead and yield the prereleases.
|
| 622 |
+
if not yielded and found_prereleases:
|
| 623 |
+
for version in found_prereleases:
|
| 624 |
+
yield version
|
| 625 |
+
|
| 626 |
+
|
| 627 |
+
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
|
| 628 |
+
|
| 629 |
+
|
| 630 |
+
def _version_split(version: str) -> List[str]:
|
| 631 |
+
"""Split version into components.
|
| 632 |
+
|
| 633 |
+
The split components are intended for version comparison. The logic does
|
| 634 |
+
not attempt to retain the original version string, so joining the
|
| 635 |
+
components back with :func:`_version_join` may not produce the original
|
| 636 |
+
version string.
|
| 637 |
+
"""
|
| 638 |
+
result: List[str] = []
|
| 639 |
+
|
| 640 |
+
epoch, _, rest = version.rpartition("!")
|
| 641 |
+
result.append(epoch or "0")
|
| 642 |
+
|
| 643 |
+
for item in rest.split("."):
|
| 644 |
+
match = _prefix_regex.search(item)
|
| 645 |
+
if match:
|
| 646 |
+
result.extend(match.groups())
|
| 647 |
+
else:
|
| 648 |
+
result.append(item)
|
| 649 |
+
return result
|
| 650 |
+
|
| 651 |
+
|
| 652 |
+
def _version_join(components: List[str]) -> str:
|
| 653 |
+
"""Join split version components into a version string.
|
| 654 |
+
|
| 655 |
+
This function assumes the input came from :func:`_version_split`, where the
|
| 656 |
+
first component must be the epoch (either empty or numeric), and all other
|
| 657 |
+
components numeric.
|
| 658 |
+
"""
|
| 659 |
+
epoch, *rest = components
|
| 660 |
+
return f"{epoch}!{'.'.join(rest)}"
|
| 661 |
+
|
| 662 |
+
|
| 663 |
+
def _is_not_suffix(segment: str) -> bool:
|
| 664 |
+
return not any(
|
| 665 |
+
segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
|
| 666 |
+
)
|
| 667 |
+
|
| 668 |
+
|
| 669 |
+
def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]:
|
| 670 |
+
left_split, right_split = [], []
|
| 671 |
+
|
| 672 |
+
# Get the release segment of our versions
|
| 673 |
+
left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
|
| 674 |
+
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
|
| 675 |
+
|
| 676 |
+
# Get the rest of our versions
|
| 677 |
+
left_split.append(left[len(left_split[0]) :])
|
| 678 |
+
right_split.append(right[len(right_split[0]) :])
|
| 679 |
+
|
| 680 |
+
# Insert our padding
|
| 681 |
+
left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
|
| 682 |
+
right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
|
| 683 |
+
|
| 684 |
+
return (
|
| 685 |
+
list(itertools.chain.from_iterable(left_split)),
|
| 686 |
+
list(itertools.chain.from_iterable(right_split)),
|
| 687 |
+
)
|
| 688 |
+
|
| 689 |
+
|
| 690 |
+
class SpecifierSet(BaseSpecifier):
|
| 691 |
+
"""This class abstracts handling of a set of version specifiers.
|
| 692 |
+
|
| 693 |
+
It can be passed a single specifier (``>=3.0``), a comma-separated list of
|
| 694 |
+
specifiers (``>=3.0,!=3.1``), or no specifier at all.
|
| 695 |
+
"""
|
| 696 |
+
|
| 697 |
+
def __init__(
|
| 698 |
+
self, specifiers: str = "", prereleases: Optional[bool] = None
|
| 699 |
+
) -> None:
|
| 700 |
+
"""Initialize a SpecifierSet instance.
|
| 701 |
+
|
| 702 |
+
:param specifiers:
|
| 703 |
+
The string representation of a specifier or a comma-separated list of
|
| 704 |
+
specifiers which will be parsed and normalized before use.
|
| 705 |
+
:param prereleases:
|
| 706 |
+
This tells the SpecifierSet if it should accept prerelease versions if
|
| 707 |
+
applicable or not. The default of ``None`` will autodetect it from the
|
| 708 |
+
given specifiers.
|
| 709 |
+
|
| 710 |
+
:raises InvalidSpecifier:
|
| 711 |
+
If the given ``specifiers`` are not parseable than this exception will be
|
| 712 |
+
raised.
|
| 713 |
+
"""
|
| 714 |
+
|
| 715 |
+
# Split on `,` to break each individual specifier into it's own item, and
|
| 716 |
+
# strip each item to remove leading/trailing whitespace.
|
| 717 |
+
split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
|
| 718 |
+
|
| 719 |
+
# Make each individual specifier a Specifier and save in a frozen set for later.
|
| 720 |
+
self._specs = frozenset(map(Specifier, split_specifiers))
|
| 721 |
+
|
| 722 |
+
# Store our prereleases value so we can use it later to determine if
|
| 723 |
+
# we accept prereleases or not.
|
| 724 |
+
self._prereleases = prereleases
|
| 725 |
+
|
| 726 |
+
@property
|
| 727 |
+
def prereleases(self) -> Optional[bool]:
|
| 728 |
+
# If we have been given an explicit prerelease modifier, then we'll
|
| 729 |
+
# pass that through here.
|
| 730 |
+
if self._prereleases is not None:
|
| 731 |
+
return self._prereleases
|
| 732 |
+
|
| 733 |
+
# If we don't have any specifiers, and we don't have a forced value,
|
| 734 |
+
# then we'll just return None since we don't know if this should have
|
| 735 |
+
# pre-releases or not.
|
| 736 |
+
if not self._specs:
|
| 737 |
+
return None
|
| 738 |
+
|
| 739 |
+
# Otherwise we'll see if any of the given specifiers accept
|
| 740 |
+
# prereleases, if any of them do we'll return True, otherwise False.
|
| 741 |
+
return any(s.prereleases for s in self._specs)
|
| 742 |
+
|
| 743 |
+
@prereleases.setter
|
| 744 |
+
def prereleases(self, value: bool) -> None:
|
| 745 |
+
self._prereleases = value
|
| 746 |
+
|
| 747 |
+
def __repr__(self) -> str:
|
| 748 |
+
"""A representation of the specifier set that shows all internal state.
|
| 749 |
+
|
| 750 |
+
Note that the ordering of the individual specifiers within the set may not
|
| 751 |
+
match the input string.
|
| 752 |
+
|
| 753 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0')
|
| 754 |
+
<SpecifierSet('!=2.0.0,>=1.0.0')>
|
| 755 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False)
|
| 756 |
+
<SpecifierSet('!=2.0.0,>=1.0.0', prereleases=False)>
|
| 757 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True)
|
| 758 |
+
<SpecifierSet('!=2.0.0,>=1.0.0', prereleases=True)>
|
| 759 |
+
"""
|
| 760 |
+
pre = (
|
| 761 |
+
f", prereleases={self.prereleases!r}"
|
| 762 |
+
if self._prereleases is not None
|
| 763 |
+
else ""
|
| 764 |
+
)
|
| 765 |
+
|
| 766 |
+
return f"<SpecifierSet({str(self)!r}{pre})>"
|
| 767 |
+
|
| 768 |
+
def __str__(self) -> str:
|
| 769 |
+
"""A string representation of the specifier set that can be round-tripped.
|
| 770 |
+
|
| 771 |
+
Note that the ordering of the individual specifiers within the set may not
|
| 772 |
+
match the input string.
|
| 773 |
+
|
| 774 |
+
>>> str(SpecifierSet(">=1.0.0,!=1.0.1"))
|
| 775 |
+
'!=1.0.1,>=1.0.0'
|
| 776 |
+
>>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False))
|
| 777 |
+
'!=1.0.1,>=1.0.0'
|
| 778 |
+
"""
|
| 779 |
+
return ",".join(sorted(str(s) for s in self._specs))
|
| 780 |
+
|
| 781 |
+
def __hash__(self) -> int:
|
| 782 |
+
return hash(self._specs)
|
| 783 |
+
|
| 784 |
+
def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet":
|
| 785 |
+
"""Return a SpecifierSet which is a combination of the two sets.
|
| 786 |
+
|
| 787 |
+
:param other: The other object to combine with.
|
| 788 |
+
|
| 789 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1'
|
| 790 |
+
<SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
|
| 791 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1')
|
| 792 |
+
<SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
|
| 793 |
+
"""
|
| 794 |
+
if isinstance(other, str):
|
| 795 |
+
other = SpecifierSet(other)
|
| 796 |
+
elif not isinstance(other, SpecifierSet):
|
| 797 |
+
return NotImplemented
|
| 798 |
+
|
| 799 |
+
specifier = SpecifierSet()
|
| 800 |
+
specifier._specs = frozenset(self._specs | other._specs)
|
| 801 |
+
|
| 802 |
+
if self._prereleases is None and other._prereleases is not None:
|
| 803 |
+
specifier._prereleases = other._prereleases
|
| 804 |
+
elif self._prereleases is not None and other._prereleases is None:
|
| 805 |
+
specifier._prereleases = self._prereleases
|
| 806 |
+
elif self._prereleases == other._prereleases:
|
| 807 |
+
specifier._prereleases = self._prereleases
|
| 808 |
+
else:
|
| 809 |
+
raise ValueError(
|
| 810 |
+
"Cannot combine SpecifierSets with True and False prerelease "
|
| 811 |
+
"overrides."
|
| 812 |
+
)
|
| 813 |
+
|
| 814 |
+
return specifier
|
| 815 |
+
|
| 816 |
+
def __eq__(self, other: object) -> bool:
|
| 817 |
+
"""Whether or not the two SpecifierSet-like objects are equal.
|
| 818 |
+
|
| 819 |
+
:param other: The other object to check against.
|
| 820 |
+
|
| 821 |
+
The value of :attr:`prereleases` is ignored.
|
| 822 |
+
|
| 823 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1")
|
| 824 |
+
True
|
| 825 |
+
>>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) ==
|
| 826 |
+
... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True))
|
| 827 |
+
True
|
| 828 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1"
|
| 829 |
+
True
|
| 830 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0")
|
| 831 |
+
False
|
| 832 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2")
|
| 833 |
+
False
|
| 834 |
+
"""
|
| 835 |
+
if isinstance(other, (str, Specifier)):
|
| 836 |
+
other = SpecifierSet(str(other))
|
| 837 |
+
elif not isinstance(other, SpecifierSet):
|
| 838 |
+
return NotImplemented
|
| 839 |
+
|
| 840 |
+
return self._specs == other._specs
|
| 841 |
+
|
| 842 |
+
def __len__(self) -> int:
|
| 843 |
+
"""Returns the number of specifiers in this specifier set."""
|
| 844 |
+
return len(self._specs)
|
| 845 |
+
|
| 846 |
+
def __iter__(self) -> Iterator[Specifier]:
|
| 847 |
+
"""
|
| 848 |
+
Returns an iterator over all the underlying :class:`Specifier` instances
|
| 849 |
+
in this specifier set.
|
| 850 |
+
|
| 851 |
+
>>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str)
|
| 852 |
+
[<Specifier('!=1.0.1')>, <Specifier('>=1.0.0')>]
|
| 853 |
+
"""
|
| 854 |
+
return iter(self._specs)
|
| 855 |
+
|
| 856 |
+
def __contains__(self, item: UnparsedVersion) -> bool:
|
| 857 |
+
"""Return whether or not the item is contained in this specifier.
|
| 858 |
+
|
| 859 |
+
:param item: The item to check for.
|
| 860 |
+
|
| 861 |
+
This is used for the ``in`` operator and behaves the same as
|
| 862 |
+
:meth:`contains` with no ``prereleases`` argument passed.
|
| 863 |
+
|
| 864 |
+
>>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1")
|
| 865 |
+
True
|
| 866 |
+
>>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1")
|
| 867 |
+
True
|
| 868 |
+
>>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1")
|
| 869 |
+
False
|
| 870 |
+
>>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1")
|
| 871 |
+
False
|
| 872 |
+
>>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)
|
| 873 |
+
True
|
| 874 |
+
"""
|
| 875 |
+
return self.contains(item)
|
| 876 |
+
|
| 877 |
+
def contains(
|
| 878 |
+
self,
|
| 879 |
+
item: UnparsedVersion,
|
| 880 |
+
prereleases: Optional[bool] = None,
|
| 881 |
+
installed: Optional[bool] = None,
|
| 882 |
+
) -> bool:
|
| 883 |
+
"""Return whether or not the item is contained in this SpecifierSet.
|
| 884 |
+
|
| 885 |
+
:param item:
|
| 886 |
+
The item to check for, which can be a version string or a
|
| 887 |
+
:class:`Version` instance.
|
| 888 |
+
:param prereleases:
|
| 889 |
+
Whether or not to match prereleases with this SpecifierSet. If set to
|
| 890 |
+
``None`` (the default), it uses :attr:`prereleases` to determine
|
| 891 |
+
whether or not prereleases are allowed.
|
| 892 |
+
|
| 893 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3")
|
| 894 |
+
True
|
| 895 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3"))
|
| 896 |
+
True
|
| 897 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1")
|
| 898 |
+
False
|
| 899 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1")
|
| 900 |
+
False
|
| 901 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1")
|
| 902 |
+
True
|
| 903 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True)
|
| 904 |
+
True
|
| 905 |
+
"""
|
| 906 |
+
# Ensure that our item is a Version instance.
|
| 907 |
+
if not isinstance(item, Version):
|
| 908 |
+
item = Version(item)
|
| 909 |
+
|
| 910 |
+
# Determine if we're forcing a prerelease or not, if we're not forcing
|
| 911 |
+
# one for this particular filter call, then we'll use whatever the
|
| 912 |
+
# SpecifierSet thinks for whether or not we should support prereleases.
|
| 913 |
+
if prereleases is None:
|
| 914 |
+
prereleases = self.prereleases
|
| 915 |
+
|
| 916 |
+
# We can determine if we're going to allow pre-releases by looking to
|
| 917 |
+
# see if any of the underlying items supports them. If none of them do
|
| 918 |
+
# and this item is a pre-release then we do not allow it and we can
|
| 919 |
+
# short circuit that here.
|
| 920 |
+
# Note: This means that 1.0.dev1 would not be contained in something
|
| 921 |
+
# like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
|
| 922 |
+
if not prereleases and item.is_prerelease:
|
| 923 |
+
return False
|
| 924 |
+
|
| 925 |
+
if installed and item.is_prerelease:
|
| 926 |
+
item = Version(item.base_version)
|
| 927 |
+
|
| 928 |
+
# We simply dispatch to the underlying specs here to make sure that the
|
| 929 |
+
# given version is contained within all of them.
|
| 930 |
+
# Note: This use of all() here means that an empty set of specifiers
|
| 931 |
+
# will always return True, this is an explicit design decision.
|
| 932 |
+
return all(s.contains(item, prereleases=prereleases) for s in self._specs)
|
| 933 |
+
|
| 934 |
+
def filter(
|
| 935 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
|
| 936 |
+
) -> Iterator[UnparsedVersionVar]:
|
| 937 |
+
"""Filter items in the given iterable, that match the specifiers in this set.
|
| 938 |
+
|
| 939 |
+
:param iterable:
|
| 940 |
+
An iterable that can contain version strings and :class:`Version` instances.
|
| 941 |
+
The items in the iterable will be filtered according to the specifier.
|
| 942 |
+
:param prereleases:
|
| 943 |
+
Whether or not to allow prereleases in the returned iterator. If set to
|
| 944 |
+
``None`` (the default), it will be intelligently decide whether to allow
|
| 945 |
+
prereleases or not (based on the :attr:`prereleases` attribute, and
|
| 946 |
+
whether the only versions matching are prereleases).
|
| 947 |
+
|
| 948 |
+
This method is smarter than just ``filter(SpecifierSet(...).contains, [...])``
|
| 949 |
+
because it implements the rule from :pep:`440` that a prerelease item
|
| 950 |
+
SHOULD be accepted if no other versions match the given specifier.
|
| 951 |
+
|
| 952 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
|
| 953 |
+
['1.3']
|
| 954 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")]))
|
| 955 |
+
['1.3', <Version('1.4')>]
|
| 956 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"]))
|
| 957 |
+
[]
|
| 958 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
|
| 959 |
+
['1.3', '1.5a1']
|
| 960 |
+
>>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
|
| 961 |
+
['1.3', '1.5a1']
|
| 962 |
+
|
| 963 |
+
An "empty" SpecifierSet will filter items based on the presence of prerelease
|
| 964 |
+
versions in the set.
|
| 965 |
+
|
| 966 |
+
>>> list(SpecifierSet("").filter(["1.3", "1.5a1"]))
|
| 967 |
+
['1.3']
|
| 968 |
+
>>> list(SpecifierSet("").filter(["1.5a1"]))
|
| 969 |
+
['1.5a1']
|
| 970 |
+
>>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"]))
|
| 971 |
+
['1.3', '1.5a1']
|
| 972 |
+
>>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True))
|
| 973 |
+
['1.3', '1.5a1']
|
| 974 |
+
"""
|
| 975 |
+
# Determine if we're forcing a prerelease or not, if we're not forcing
|
| 976 |
+
# one for this particular filter call, then we'll use whatever the
|
| 977 |
+
# SpecifierSet thinks for whether or not we should support prereleases.
|
| 978 |
+
if prereleases is None:
|
| 979 |
+
prereleases = self.prereleases
|
| 980 |
+
|
| 981 |
+
# If we have any specifiers, then we want to wrap our iterable in the
|
| 982 |
+
# filter method for each one, this will act as a logical AND amongst
|
| 983 |
+
# each specifier.
|
| 984 |
+
if self._specs:
|
| 985 |
+
for spec in self._specs:
|
| 986 |
+
iterable = spec.filter(iterable, prereleases=bool(prereleases))
|
| 987 |
+
return iter(iterable)
|
| 988 |
+
# If we do not have any specifiers, then we need to have a rough filter
|
| 989 |
+
# which will filter out any pre-releases, unless there are no final
|
| 990 |
+
# releases.
|
| 991 |
+
else:
|
| 992 |
+
filtered: List[UnparsedVersionVar] = []
|
| 993 |
+
found_prereleases: List[UnparsedVersionVar] = []
|
| 994 |
+
|
| 995 |
+
for item in iterable:
|
| 996 |
+
parsed_version = _coerce_version(item)
|
| 997 |
+
|
| 998 |
+
# Store any item which is a pre-release for later unless we've
|
| 999 |
+
# already found a final version or we are accepting prereleases
|
| 1000 |
+
if parsed_version.is_prerelease and not prereleases:
|
| 1001 |
+
if not filtered:
|
| 1002 |
+
found_prereleases.append(item)
|
| 1003 |
+
else:
|
| 1004 |
+
filtered.append(item)
|
| 1005 |
+
|
| 1006 |
+
# If we've found no items except for pre-releases, then we'll go
|
| 1007 |
+
# ahead and use the pre-releases
|
| 1008 |
+
if not filtered and found_prereleases and prereleases is None:
|
| 1009 |
+
return iter(found_prereleases)
|
| 1010 |
+
|
| 1011 |
+
return iter(filtered)
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/tags.py
ADDED
|
@@ -0,0 +1,571 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
|
| 5 |
+
import logging
|
| 6 |
+
import platform
|
| 7 |
+
import re
|
| 8 |
+
import struct
|
| 9 |
+
import subprocess
|
| 10 |
+
import sys
|
| 11 |
+
import sysconfig
|
| 12 |
+
from importlib.machinery import EXTENSION_SUFFIXES
|
| 13 |
+
from typing import (
|
| 14 |
+
Dict,
|
| 15 |
+
FrozenSet,
|
| 16 |
+
Iterable,
|
| 17 |
+
Iterator,
|
| 18 |
+
List,
|
| 19 |
+
Optional,
|
| 20 |
+
Sequence,
|
| 21 |
+
Tuple,
|
| 22 |
+
Union,
|
| 23 |
+
cast,
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
from . import _manylinux, _musllinux
|
| 27 |
+
|
| 28 |
+
logger = logging.getLogger(__name__)
|
| 29 |
+
|
| 30 |
+
PythonVersion = Sequence[int]
|
| 31 |
+
MacVersion = Tuple[int, int]
|
| 32 |
+
|
| 33 |
+
INTERPRETER_SHORT_NAMES: Dict[str, str] = {
|
| 34 |
+
"python": "py", # Generic.
|
| 35 |
+
"cpython": "cp",
|
| 36 |
+
"pypy": "pp",
|
| 37 |
+
"ironpython": "ip",
|
| 38 |
+
"jython": "jy",
|
| 39 |
+
}
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
_32_BIT_INTERPRETER = struct.calcsize("P") == 4
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class Tag:
|
| 46 |
+
"""
|
| 47 |
+
A representation of the tag triple for a wheel.
|
| 48 |
+
|
| 49 |
+
Instances are considered immutable and thus are hashable. Equality checking
|
| 50 |
+
is also supported.
|
| 51 |
+
"""
|
| 52 |
+
|
| 53 |
+
__slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
|
| 54 |
+
|
| 55 |
+
def __init__(self, interpreter: str, abi: str, platform: str) -> None:
|
| 56 |
+
self._interpreter = interpreter.lower()
|
| 57 |
+
self._abi = abi.lower()
|
| 58 |
+
self._platform = platform.lower()
|
| 59 |
+
# The __hash__ of every single element in a Set[Tag] will be evaluated each time
|
| 60 |
+
# that a set calls its `.disjoint()` method, which may be called hundreds of
|
| 61 |
+
# times when scanning a page of links for packages with tags matching that
|
| 62 |
+
# Set[Tag]. Pre-computing the value here produces significant speedups for
|
| 63 |
+
# downstream consumers.
|
| 64 |
+
self._hash = hash((self._interpreter, self._abi, self._platform))
|
| 65 |
+
|
| 66 |
+
@property
|
| 67 |
+
def interpreter(self) -> str:
|
| 68 |
+
return self._interpreter
|
| 69 |
+
|
| 70 |
+
@property
|
| 71 |
+
def abi(self) -> str:
|
| 72 |
+
return self._abi
|
| 73 |
+
|
| 74 |
+
@property
|
| 75 |
+
def platform(self) -> str:
|
| 76 |
+
return self._platform
|
| 77 |
+
|
| 78 |
+
def __eq__(self, other: object) -> bool:
|
| 79 |
+
if not isinstance(other, Tag):
|
| 80 |
+
return NotImplemented
|
| 81 |
+
|
| 82 |
+
return (
|
| 83 |
+
(self._hash == other._hash) # Short-circuit ASAP for perf reasons.
|
| 84 |
+
and (self._platform == other._platform)
|
| 85 |
+
and (self._abi == other._abi)
|
| 86 |
+
and (self._interpreter == other._interpreter)
|
| 87 |
+
)
|
| 88 |
+
|
| 89 |
+
def __hash__(self) -> int:
|
| 90 |
+
return self._hash
|
| 91 |
+
|
| 92 |
+
def __str__(self) -> str:
|
| 93 |
+
return f"{self._interpreter}-{self._abi}-{self._platform}"
|
| 94 |
+
|
| 95 |
+
def __repr__(self) -> str:
|
| 96 |
+
return f"<{self} @ {id(self)}>"
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def parse_tag(tag: str) -> FrozenSet[Tag]:
|
| 100 |
+
"""
|
| 101 |
+
Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
|
| 102 |
+
|
| 103 |
+
Returning a set is required due to the possibility that the tag is a
|
| 104 |
+
compressed tag set.
|
| 105 |
+
"""
|
| 106 |
+
tags = set()
|
| 107 |
+
interpreters, abis, platforms = tag.split("-")
|
| 108 |
+
for interpreter in interpreters.split("."):
|
| 109 |
+
for abi in abis.split("."):
|
| 110 |
+
for platform_ in platforms.split("."):
|
| 111 |
+
tags.add(Tag(interpreter, abi, platform_))
|
| 112 |
+
return frozenset(tags)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
|
| 116 |
+
value: Union[int, str, None] = sysconfig.get_config_var(name)
|
| 117 |
+
if value is None and warn:
|
| 118 |
+
logger.debug(
|
| 119 |
+
"Config variable '%s' is unset, Python ABI tag may be incorrect", name
|
| 120 |
+
)
|
| 121 |
+
return value
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
def _normalize_string(string: str) -> str:
|
| 125 |
+
return string.replace(".", "_").replace("-", "_").replace(" ", "_")
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def _is_threaded_cpython(abis: List[str]) -> bool:
|
| 129 |
+
"""
|
| 130 |
+
Determine if the ABI corresponds to a threaded (`--disable-gil`) build.
|
| 131 |
+
|
| 132 |
+
The threaded builds are indicated by a "t" in the abiflags.
|
| 133 |
+
"""
|
| 134 |
+
if len(abis) == 0:
|
| 135 |
+
return False
|
| 136 |
+
# expect e.g., cp313
|
| 137 |
+
m = re.match(r"cp\d+(.*)", abis[0])
|
| 138 |
+
if not m:
|
| 139 |
+
return False
|
| 140 |
+
abiflags = m.group(1)
|
| 141 |
+
return "t" in abiflags
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool:
|
| 145 |
+
"""
|
| 146 |
+
Determine if the Python version supports abi3.
|
| 147 |
+
|
| 148 |
+
PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`)
|
| 149 |
+
builds do not support abi3.
|
| 150 |
+
"""
|
| 151 |
+
return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
|
| 155 |
+
py_version = tuple(py_version) # To allow for version comparison.
|
| 156 |
+
abis = []
|
| 157 |
+
version = _version_nodot(py_version[:2])
|
| 158 |
+
threading = debug = pymalloc = ucs4 = ""
|
| 159 |
+
with_debug = _get_config_var("Py_DEBUG", warn)
|
| 160 |
+
has_refcount = hasattr(sys, "gettotalrefcount")
|
| 161 |
+
# Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
|
| 162 |
+
# extension modules is the best option.
|
| 163 |
+
# https://github.com/pypa/pip/issues/3383#issuecomment-173267692
|
| 164 |
+
has_ext = "_d.pyd" in EXTENSION_SUFFIXES
|
| 165 |
+
if with_debug or (with_debug is None and (has_refcount or has_ext)):
|
| 166 |
+
debug = "d"
|
| 167 |
+
if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn):
|
| 168 |
+
threading = "t"
|
| 169 |
+
if py_version < (3, 8):
|
| 170 |
+
with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
|
| 171 |
+
if with_pymalloc or with_pymalloc is None:
|
| 172 |
+
pymalloc = "m"
|
| 173 |
+
if py_version < (3, 3):
|
| 174 |
+
unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
|
| 175 |
+
if unicode_size == 4 or (
|
| 176 |
+
unicode_size is None and sys.maxunicode == 0x10FFFF
|
| 177 |
+
):
|
| 178 |
+
ucs4 = "u"
|
| 179 |
+
elif debug:
|
| 180 |
+
# Debug builds can also load "normal" extension modules.
|
| 181 |
+
# We can also assume no UCS-4 or pymalloc requirement.
|
| 182 |
+
abis.append(f"cp{version}{threading}")
|
| 183 |
+
abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}")
|
| 184 |
+
return abis
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
def cpython_tags(
|
| 188 |
+
python_version: Optional[PythonVersion] = None,
|
| 189 |
+
abis: Optional[Iterable[str]] = None,
|
| 190 |
+
platforms: Optional[Iterable[str]] = None,
|
| 191 |
+
*,
|
| 192 |
+
warn: bool = False,
|
| 193 |
+
) -> Iterator[Tag]:
|
| 194 |
+
"""
|
| 195 |
+
Yields the tags for a CPython interpreter.
|
| 196 |
+
|
| 197 |
+
The tags consist of:
|
| 198 |
+
- cp<python_version>-<abi>-<platform>
|
| 199 |
+
- cp<python_version>-abi3-<platform>
|
| 200 |
+
- cp<python_version>-none-<platform>
|
| 201 |
+
- cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
|
| 202 |
+
|
| 203 |
+
If python_version only specifies a major version then user-provided ABIs and
|
| 204 |
+
the 'none' ABItag will be used.
|
| 205 |
+
|
| 206 |
+
If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
|
| 207 |
+
their normal position and not at the beginning.
|
| 208 |
+
"""
|
| 209 |
+
if not python_version:
|
| 210 |
+
python_version = sys.version_info[:2]
|
| 211 |
+
|
| 212 |
+
interpreter = f"cp{_version_nodot(python_version[:2])}"
|
| 213 |
+
|
| 214 |
+
if abis is None:
|
| 215 |
+
if len(python_version) > 1:
|
| 216 |
+
abis = _cpython_abis(python_version, warn)
|
| 217 |
+
else:
|
| 218 |
+
abis = []
|
| 219 |
+
abis = list(abis)
|
| 220 |
+
# 'abi3' and 'none' are explicitly handled later.
|
| 221 |
+
for explicit_abi in ("abi3", "none"):
|
| 222 |
+
try:
|
| 223 |
+
abis.remove(explicit_abi)
|
| 224 |
+
except ValueError:
|
| 225 |
+
pass
|
| 226 |
+
|
| 227 |
+
platforms = list(platforms or platform_tags())
|
| 228 |
+
for abi in abis:
|
| 229 |
+
for platform_ in platforms:
|
| 230 |
+
yield Tag(interpreter, abi, platform_)
|
| 231 |
+
|
| 232 |
+
threading = _is_threaded_cpython(abis)
|
| 233 |
+
use_abi3 = _abi3_applies(python_version, threading)
|
| 234 |
+
if use_abi3:
|
| 235 |
+
yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
|
| 236 |
+
yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
|
| 237 |
+
|
| 238 |
+
if use_abi3:
|
| 239 |
+
for minor_version in range(python_version[1] - 1, 1, -1):
|
| 240 |
+
for platform_ in platforms:
|
| 241 |
+
interpreter = "cp{version}".format(
|
| 242 |
+
version=_version_nodot((python_version[0], minor_version))
|
| 243 |
+
)
|
| 244 |
+
yield Tag(interpreter, "abi3", platform_)
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
def _generic_abi() -> List[str]:
|
| 248 |
+
"""
|
| 249 |
+
Return the ABI tag based on EXT_SUFFIX.
|
| 250 |
+
"""
|
| 251 |
+
# The following are examples of `EXT_SUFFIX`.
|
| 252 |
+
# We want to keep the parts which are related to the ABI and remove the
|
| 253 |
+
# parts which are related to the platform:
|
| 254 |
+
# - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310
|
| 255 |
+
# - mac: '.cpython-310-darwin.so' => cp310
|
| 256 |
+
# - win: '.cp310-win_amd64.pyd' => cp310
|
| 257 |
+
# - win: '.pyd' => cp37 (uses _cpython_abis())
|
| 258 |
+
# - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73
|
| 259 |
+
# - graalpy: '.graalpy-38-native-x86_64-darwin.dylib'
|
| 260 |
+
# => graalpy_38_native
|
| 261 |
+
|
| 262 |
+
ext_suffix = _get_config_var("EXT_SUFFIX", warn=True)
|
| 263 |
+
if not isinstance(ext_suffix, str) or ext_suffix[0] != ".":
|
| 264 |
+
raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')")
|
| 265 |
+
parts = ext_suffix.split(".")
|
| 266 |
+
if len(parts) < 3:
|
| 267 |
+
# CPython3.7 and earlier uses ".pyd" on Windows.
|
| 268 |
+
return _cpython_abis(sys.version_info[:2])
|
| 269 |
+
soabi = parts[1]
|
| 270 |
+
if soabi.startswith("cpython"):
|
| 271 |
+
# non-windows
|
| 272 |
+
abi = "cp" + soabi.split("-")[1]
|
| 273 |
+
elif soabi.startswith("cp"):
|
| 274 |
+
# windows
|
| 275 |
+
abi = soabi.split("-")[0]
|
| 276 |
+
elif soabi.startswith("pypy"):
|
| 277 |
+
abi = "-".join(soabi.split("-")[:2])
|
| 278 |
+
elif soabi.startswith("graalpy"):
|
| 279 |
+
abi = "-".join(soabi.split("-")[:3])
|
| 280 |
+
elif soabi:
|
| 281 |
+
# pyston, ironpython, others?
|
| 282 |
+
abi = soabi
|
| 283 |
+
else:
|
| 284 |
+
return []
|
| 285 |
+
return [_normalize_string(abi)]
|
| 286 |
+
|
| 287 |
+
|
| 288 |
+
def generic_tags(
|
| 289 |
+
interpreter: Optional[str] = None,
|
| 290 |
+
abis: Optional[Iterable[str]] = None,
|
| 291 |
+
platforms: Optional[Iterable[str]] = None,
|
| 292 |
+
*,
|
| 293 |
+
warn: bool = False,
|
| 294 |
+
) -> Iterator[Tag]:
|
| 295 |
+
"""
|
| 296 |
+
Yields the tags for a generic interpreter.
|
| 297 |
+
|
| 298 |
+
The tags consist of:
|
| 299 |
+
- <interpreter>-<abi>-<platform>
|
| 300 |
+
|
| 301 |
+
The "none" ABI will be added if it was not explicitly provided.
|
| 302 |
+
"""
|
| 303 |
+
if not interpreter:
|
| 304 |
+
interp_name = interpreter_name()
|
| 305 |
+
interp_version = interpreter_version(warn=warn)
|
| 306 |
+
interpreter = "".join([interp_name, interp_version])
|
| 307 |
+
if abis is None:
|
| 308 |
+
abis = _generic_abi()
|
| 309 |
+
else:
|
| 310 |
+
abis = list(abis)
|
| 311 |
+
platforms = list(platforms or platform_tags())
|
| 312 |
+
if "none" not in abis:
|
| 313 |
+
abis.append("none")
|
| 314 |
+
for abi in abis:
|
| 315 |
+
for platform_ in platforms:
|
| 316 |
+
yield Tag(interpreter, abi, platform_)
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
|
| 320 |
+
"""
|
| 321 |
+
Yields Python versions in descending order.
|
| 322 |
+
|
| 323 |
+
After the latest version, the major-only version will be yielded, and then
|
| 324 |
+
all previous versions of that major version.
|
| 325 |
+
"""
|
| 326 |
+
if len(py_version) > 1:
|
| 327 |
+
yield f"py{_version_nodot(py_version[:2])}"
|
| 328 |
+
yield f"py{py_version[0]}"
|
| 329 |
+
if len(py_version) > 1:
|
| 330 |
+
for minor in range(py_version[1] - 1, -1, -1):
|
| 331 |
+
yield f"py{_version_nodot((py_version[0], minor))}"
|
| 332 |
+
|
| 333 |
+
|
| 334 |
+
def compatible_tags(
|
| 335 |
+
python_version: Optional[PythonVersion] = None,
|
| 336 |
+
interpreter: Optional[str] = None,
|
| 337 |
+
platforms: Optional[Iterable[str]] = None,
|
| 338 |
+
) -> Iterator[Tag]:
|
| 339 |
+
"""
|
| 340 |
+
Yields the sequence of tags that are compatible with a specific version of Python.
|
| 341 |
+
|
| 342 |
+
The tags consist of:
|
| 343 |
+
- py*-none-<platform>
|
| 344 |
+
- <interpreter>-none-any # ... if `interpreter` is provided.
|
| 345 |
+
- py*-none-any
|
| 346 |
+
"""
|
| 347 |
+
if not python_version:
|
| 348 |
+
python_version = sys.version_info[:2]
|
| 349 |
+
platforms = list(platforms or platform_tags())
|
| 350 |
+
for version in _py_interpreter_range(python_version):
|
| 351 |
+
for platform_ in platforms:
|
| 352 |
+
yield Tag(version, "none", platform_)
|
| 353 |
+
if interpreter:
|
| 354 |
+
yield Tag(interpreter, "none", "any")
|
| 355 |
+
for version in _py_interpreter_range(python_version):
|
| 356 |
+
yield Tag(version, "none", "any")
|
| 357 |
+
|
| 358 |
+
|
| 359 |
+
def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
|
| 360 |
+
if not is_32bit:
|
| 361 |
+
return arch
|
| 362 |
+
|
| 363 |
+
if arch.startswith("ppc"):
|
| 364 |
+
return "ppc"
|
| 365 |
+
|
| 366 |
+
return "i386"
|
| 367 |
+
|
| 368 |
+
|
| 369 |
+
def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
|
| 370 |
+
formats = [cpu_arch]
|
| 371 |
+
if cpu_arch == "x86_64":
|
| 372 |
+
if version < (10, 4):
|
| 373 |
+
return []
|
| 374 |
+
formats.extend(["intel", "fat64", "fat32"])
|
| 375 |
+
|
| 376 |
+
elif cpu_arch == "i386":
|
| 377 |
+
if version < (10, 4):
|
| 378 |
+
return []
|
| 379 |
+
formats.extend(["intel", "fat32", "fat"])
|
| 380 |
+
|
| 381 |
+
elif cpu_arch == "ppc64":
|
| 382 |
+
# TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
|
| 383 |
+
if version > (10, 5) or version < (10, 4):
|
| 384 |
+
return []
|
| 385 |
+
formats.append("fat64")
|
| 386 |
+
|
| 387 |
+
elif cpu_arch == "ppc":
|
| 388 |
+
if version > (10, 6):
|
| 389 |
+
return []
|
| 390 |
+
formats.extend(["fat32", "fat"])
|
| 391 |
+
|
| 392 |
+
if cpu_arch in {"arm64", "x86_64"}:
|
| 393 |
+
formats.append("universal2")
|
| 394 |
+
|
| 395 |
+
if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
|
| 396 |
+
formats.append("universal")
|
| 397 |
+
|
| 398 |
+
return formats
|
| 399 |
+
|
| 400 |
+
|
| 401 |
+
def mac_platforms(
|
| 402 |
+
version: Optional[MacVersion] = None, arch: Optional[str] = None
|
| 403 |
+
) -> Iterator[str]:
|
| 404 |
+
"""
|
| 405 |
+
Yields the platform tags for a macOS system.
|
| 406 |
+
|
| 407 |
+
The `version` parameter is a two-item tuple specifying the macOS version to
|
| 408 |
+
generate platform tags for. The `arch` parameter is the CPU architecture to
|
| 409 |
+
generate platform tags for. Both parameters default to the appropriate value
|
| 410 |
+
for the current system.
|
| 411 |
+
"""
|
| 412 |
+
version_str, _, cpu_arch = platform.mac_ver()
|
| 413 |
+
if version is None:
|
| 414 |
+
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
|
| 415 |
+
if version == (10, 16):
|
| 416 |
+
# When built against an older macOS SDK, Python will report macOS 10.16
|
| 417 |
+
# instead of the real version.
|
| 418 |
+
version_str = subprocess.run(
|
| 419 |
+
[
|
| 420 |
+
sys.executable,
|
| 421 |
+
"-sS",
|
| 422 |
+
"-c",
|
| 423 |
+
"import platform; print(platform.mac_ver()[0])",
|
| 424 |
+
],
|
| 425 |
+
check=True,
|
| 426 |
+
env={"SYSTEM_VERSION_COMPAT": "0"},
|
| 427 |
+
stdout=subprocess.PIPE,
|
| 428 |
+
text=True,
|
| 429 |
+
).stdout
|
| 430 |
+
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
|
| 431 |
+
else:
|
| 432 |
+
version = version
|
| 433 |
+
if arch is None:
|
| 434 |
+
arch = _mac_arch(cpu_arch)
|
| 435 |
+
else:
|
| 436 |
+
arch = arch
|
| 437 |
+
|
| 438 |
+
if (10, 0) <= version and version < (11, 0):
|
| 439 |
+
# Prior to Mac OS 11, each yearly release of Mac OS bumped the
|
| 440 |
+
# "minor" version number. The major version was always 10.
|
| 441 |
+
for minor_version in range(version[1], -1, -1):
|
| 442 |
+
compat_version = 10, minor_version
|
| 443 |
+
binary_formats = _mac_binary_formats(compat_version, arch)
|
| 444 |
+
for binary_format in binary_formats:
|
| 445 |
+
yield "macosx_{major}_{minor}_{binary_format}".format(
|
| 446 |
+
major=10, minor=minor_version, binary_format=binary_format
|
| 447 |
+
)
|
| 448 |
+
|
| 449 |
+
if version >= (11, 0):
|
| 450 |
+
# Starting with Mac OS 11, each yearly release bumps the major version
|
| 451 |
+
# number. The minor versions are now the midyear updates.
|
| 452 |
+
for major_version in range(version[0], 10, -1):
|
| 453 |
+
compat_version = major_version, 0
|
| 454 |
+
binary_formats = _mac_binary_formats(compat_version, arch)
|
| 455 |
+
for binary_format in binary_formats:
|
| 456 |
+
yield "macosx_{major}_{minor}_{binary_format}".format(
|
| 457 |
+
major=major_version, minor=0, binary_format=binary_format
|
| 458 |
+
)
|
| 459 |
+
|
| 460 |
+
if version >= (11, 0):
|
| 461 |
+
# Mac OS 11 on x86_64 is compatible with binaries from previous releases.
|
| 462 |
+
# Arm64 support was introduced in 11.0, so no Arm binaries from previous
|
| 463 |
+
# releases exist.
|
| 464 |
+
#
|
| 465 |
+
# However, the "universal2" binary format can have a
|
| 466 |
+
# macOS version earlier than 11.0 when the x86_64 part of the binary supports
|
| 467 |
+
# that version of macOS.
|
| 468 |
+
if arch == "x86_64":
|
| 469 |
+
for minor_version in range(16, 3, -1):
|
| 470 |
+
compat_version = 10, minor_version
|
| 471 |
+
binary_formats = _mac_binary_formats(compat_version, arch)
|
| 472 |
+
for binary_format in binary_formats:
|
| 473 |
+
yield "macosx_{major}_{minor}_{binary_format}".format(
|
| 474 |
+
major=compat_version[0],
|
| 475 |
+
minor=compat_version[1],
|
| 476 |
+
binary_format=binary_format,
|
| 477 |
+
)
|
| 478 |
+
else:
|
| 479 |
+
for minor_version in range(16, 3, -1):
|
| 480 |
+
compat_version = 10, minor_version
|
| 481 |
+
binary_format = "universal2"
|
| 482 |
+
yield "macosx_{major}_{minor}_{binary_format}".format(
|
| 483 |
+
major=compat_version[0],
|
| 484 |
+
minor=compat_version[1],
|
| 485 |
+
binary_format=binary_format,
|
| 486 |
+
)
|
| 487 |
+
|
| 488 |
+
|
| 489 |
+
def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
|
| 490 |
+
linux = _normalize_string(sysconfig.get_platform())
|
| 491 |
+
if not linux.startswith("linux_"):
|
| 492 |
+
# we should never be here, just yield the sysconfig one and return
|
| 493 |
+
yield linux
|
| 494 |
+
return
|
| 495 |
+
if is_32bit:
|
| 496 |
+
if linux == "linux_x86_64":
|
| 497 |
+
linux = "linux_i686"
|
| 498 |
+
elif linux == "linux_aarch64":
|
| 499 |
+
linux = "linux_armv8l"
|
| 500 |
+
_, arch = linux.split("_", 1)
|
| 501 |
+
archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch])
|
| 502 |
+
yield from _manylinux.platform_tags(archs)
|
| 503 |
+
yield from _musllinux.platform_tags(archs)
|
| 504 |
+
for arch in archs:
|
| 505 |
+
yield f"linux_{arch}"
|
| 506 |
+
|
| 507 |
+
|
| 508 |
+
def _generic_platforms() -> Iterator[str]:
|
| 509 |
+
yield _normalize_string(sysconfig.get_platform())
|
| 510 |
+
|
| 511 |
+
|
| 512 |
+
def platform_tags() -> Iterator[str]:
|
| 513 |
+
"""
|
| 514 |
+
Provides the platform tags for this installation.
|
| 515 |
+
"""
|
| 516 |
+
if platform.system() == "Darwin":
|
| 517 |
+
return mac_platforms()
|
| 518 |
+
elif platform.system() == "Linux":
|
| 519 |
+
return _linux_platforms()
|
| 520 |
+
else:
|
| 521 |
+
return _generic_platforms()
|
| 522 |
+
|
| 523 |
+
|
| 524 |
+
def interpreter_name() -> str:
|
| 525 |
+
"""
|
| 526 |
+
Returns the name of the running interpreter.
|
| 527 |
+
|
| 528 |
+
Some implementations have a reserved, two-letter abbreviation which will
|
| 529 |
+
be returned when appropriate.
|
| 530 |
+
"""
|
| 531 |
+
name = sys.implementation.name
|
| 532 |
+
return INTERPRETER_SHORT_NAMES.get(name) or name
|
| 533 |
+
|
| 534 |
+
|
| 535 |
+
def interpreter_version(*, warn: bool = False) -> str:
|
| 536 |
+
"""
|
| 537 |
+
Returns the version of the running interpreter.
|
| 538 |
+
"""
|
| 539 |
+
version = _get_config_var("py_version_nodot", warn=warn)
|
| 540 |
+
if version:
|
| 541 |
+
version = str(version)
|
| 542 |
+
else:
|
| 543 |
+
version = _version_nodot(sys.version_info[:2])
|
| 544 |
+
return version
|
| 545 |
+
|
| 546 |
+
|
| 547 |
+
def _version_nodot(version: PythonVersion) -> str:
|
| 548 |
+
return "".join(map(str, version))
|
| 549 |
+
|
| 550 |
+
|
| 551 |
+
def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
|
| 552 |
+
"""
|
| 553 |
+
Returns the sequence of tag triples for the running interpreter.
|
| 554 |
+
|
| 555 |
+
The order of the sequence corresponds to priority order for the
|
| 556 |
+
interpreter, from most to least important.
|
| 557 |
+
"""
|
| 558 |
+
|
| 559 |
+
interp_name = interpreter_name()
|
| 560 |
+
if interp_name == "cp":
|
| 561 |
+
yield from cpython_tags(warn=warn)
|
| 562 |
+
else:
|
| 563 |
+
yield from generic_tags()
|
| 564 |
+
|
| 565 |
+
if interp_name == "pp":
|
| 566 |
+
interp = "pp3"
|
| 567 |
+
elif interp_name == "cp":
|
| 568 |
+
interp = "cp" + interpreter_version(warn=warn)
|
| 569 |
+
else:
|
| 570 |
+
interp = None
|
| 571 |
+
yield from compatible_tags(interpreter=interp)
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/utils.py
ADDED
|
@@ -0,0 +1,172 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
|
| 5 |
+
import re
|
| 6 |
+
from typing import FrozenSet, NewType, Tuple, Union, cast
|
| 7 |
+
|
| 8 |
+
from .tags import Tag, parse_tag
|
| 9 |
+
from .version import InvalidVersion, Version
|
| 10 |
+
|
| 11 |
+
BuildTag = Union[Tuple[()], Tuple[int, str]]
|
| 12 |
+
NormalizedName = NewType("NormalizedName", str)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class InvalidName(ValueError):
|
| 16 |
+
"""
|
| 17 |
+
An invalid distribution name; users should refer to the packaging user guide.
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class InvalidWheelFilename(ValueError):
|
| 22 |
+
"""
|
| 23 |
+
An invalid wheel filename was found, users should refer to PEP 427.
|
| 24 |
+
"""
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class InvalidSdistFilename(ValueError):
|
| 28 |
+
"""
|
| 29 |
+
An invalid sdist filename was found, users should refer to the packaging user guide.
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
# Core metadata spec for `Name`
|
| 34 |
+
_validate_regex = re.compile(
|
| 35 |
+
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
|
| 36 |
+
)
|
| 37 |
+
_canonicalize_regex = re.compile(r"[-_.]+")
|
| 38 |
+
_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")
|
| 39 |
+
# PEP 427: The build number must start with a digit.
|
| 40 |
+
_build_tag_regex = re.compile(r"(\d+)(.*)")
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
|
| 44 |
+
if validate and not _validate_regex.match(name):
|
| 45 |
+
raise InvalidName(f"name is invalid: {name!r}")
|
| 46 |
+
# This is taken from PEP 503.
|
| 47 |
+
value = _canonicalize_regex.sub("-", name).lower()
|
| 48 |
+
return cast(NormalizedName, value)
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def is_normalized_name(name: str) -> bool:
|
| 52 |
+
return _normalized_regex.match(name) is not None
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def canonicalize_version(
|
| 56 |
+
version: Union[Version, str], *, strip_trailing_zero: bool = True
|
| 57 |
+
) -> str:
|
| 58 |
+
"""
|
| 59 |
+
This is very similar to Version.__str__, but has one subtle difference
|
| 60 |
+
with the way it handles the release segment.
|
| 61 |
+
"""
|
| 62 |
+
if isinstance(version, str):
|
| 63 |
+
try:
|
| 64 |
+
parsed = Version(version)
|
| 65 |
+
except InvalidVersion:
|
| 66 |
+
# Legacy versions cannot be normalized
|
| 67 |
+
return version
|
| 68 |
+
else:
|
| 69 |
+
parsed = version
|
| 70 |
+
|
| 71 |
+
parts = []
|
| 72 |
+
|
| 73 |
+
# Epoch
|
| 74 |
+
if parsed.epoch != 0:
|
| 75 |
+
parts.append(f"{parsed.epoch}!")
|
| 76 |
+
|
| 77 |
+
# Release segment
|
| 78 |
+
release_segment = ".".join(str(x) for x in parsed.release)
|
| 79 |
+
if strip_trailing_zero:
|
| 80 |
+
# NB: This strips trailing '.0's to normalize
|
| 81 |
+
release_segment = re.sub(r"(\.0)+$", "", release_segment)
|
| 82 |
+
parts.append(release_segment)
|
| 83 |
+
|
| 84 |
+
# Pre-release
|
| 85 |
+
if parsed.pre is not None:
|
| 86 |
+
parts.append("".join(str(x) for x in parsed.pre))
|
| 87 |
+
|
| 88 |
+
# Post-release
|
| 89 |
+
if parsed.post is not None:
|
| 90 |
+
parts.append(f".post{parsed.post}")
|
| 91 |
+
|
| 92 |
+
# Development release
|
| 93 |
+
if parsed.dev is not None:
|
| 94 |
+
parts.append(f".dev{parsed.dev}")
|
| 95 |
+
|
| 96 |
+
# Local version segment
|
| 97 |
+
if parsed.local is not None:
|
| 98 |
+
parts.append(f"+{parsed.local}")
|
| 99 |
+
|
| 100 |
+
return "".join(parts)
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def parse_wheel_filename(
|
| 104 |
+
filename: str,
|
| 105 |
+
) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
|
| 106 |
+
if not filename.endswith(".whl"):
|
| 107 |
+
raise InvalidWheelFilename(
|
| 108 |
+
f"Invalid wheel filename (extension must be '.whl'): {filename}"
|
| 109 |
+
)
|
| 110 |
+
|
| 111 |
+
filename = filename[:-4]
|
| 112 |
+
dashes = filename.count("-")
|
| 113 |
+
if dashes not in (4, 5):
|
| 114 |
+
raise InvalidWheelFilename(
|
| 115 |
+
f"Invalid wheel filename (wrong number of parts): {filename}"
|
| 116 |
+
)
|
| 117 |
+
|
| 118 |
+
parts = filename.split("-", dashes - 2)
|
| 119 |
+
name_part = parts[0]
|
| 120 |
+
# See PEP 427 for the rules on escaping the project name.
|
| 121 |
+
if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
|
| 122 |
+
raise InvalidWheelFilename(f"Invalid project name: {filename}")
|
| 123 |
+
name = canonicalize_name(name_part)
|
| 124 |
+
|
| 125 |
+
try:
|
| 126 |
+
version = Version(parts[1])
|
| 127 |
+
except InvalidVersion as e:
|
| 128 |
+
raise InvalidWheelFilename(
|
| 129 |
+
f"Invalid wheel filename (invalid version): {filename}"
|
| 130 |
+
) from e
|
| 131 |
+
|
| 132 |
+
if dashes == 5:
|
| 133 |
+
build_part = parts[2]
|
| 134 |
+
build_match = _build_tag_regex.match(build_part)
|
| 135 |
+
if build_match is None:
|
| 136 |
+
raise InvalidWheelFilename(
|
| 137 |
+
f"Invalid build number: {build_part} in '{filename}'"
|
| 138 |
+
)
|
| 139 |
+
build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
|
| 140 |
+
else:
|
| 141 |
+
build = ()
|
| 142 |
+
tags = parse_tag(parts[-1])
|
| 143 |
+
return (name, version, build, tags)
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
|
| 147 |
+
if filename.endswith(".tar.gz"):
|
| 148 |
+
file_stem = filename[: -len(".tar.gz")]
|
| 149 |
+
elif filename.endswith(".zip"):
|
| 150 |
+
file_stem = filename[: -len(".zip")]
|
| 151 |
+
else:
|
| 152 |
+
raise InvalidSdistFilename(
|
| 153 |
+
f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
|
| 154 |
+
f" {filename}"
|
| 155 |
+
)
|
| 156 |
+
|
| 157 |
+
# We are requiring a PEP 440 version, which cannot contain dashes,
|
| 158 |
+
# so we split on the last dash.
|
| 159 |
+
name_part, sep, version_part = file_stem.rpartition("-")
|
| 160 |
+
if not sep:
|
| 161 |
+
raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
|
| 162 |
+
|
| 163 |
+
name = canonicalize_name(name_part)
|
| 164 |
+
|
| 165 |
+
try:
|
| 166 |
+
version = Version(version_part)
|
| 167 |
+
except InvalidVersion as e:
|
| 168 |
+
raise InvalidSdistFilename(
|
| 169 |
+
f"Invalid sdist filename (invalid version): {filename}"
|
| 170 |
+
) from e
|
| 171 |
+
|
| 172 |
+
return (name, version)
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/packaging/version.py
ADDED
|
@@ -0,0 +1,561 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
"""
|
| 5 |
+
.. testsetup::
|
| 6 |
+
|
| 7 |
+
from packaging.version import parse, Version
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import itertools
|
| 11 |
+
import re
|
| 12 |
+
from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union
|
| 13 |
+
|
| 14 |
+
from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
|
| 15 |
+
|
| 16 |
+
__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"]
|
| 17 |
+
|
| 18 |
+
LocalType = Tuple[Union[int, str], ...]
|
| 19 |
+
|
| 20 |
+
CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]]
|
| 21 |
+
CmpLocalType = Union[
|
| 22 |
+
NegativeInfinityType,
|
| 23 |
+
Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...],
|
| 24 |
+
]
|
| 25 |
+
CmpKey = Tuple[
|
| 26 |
+
int,
|
| 27 |
+
Tuple[int, ...],
|
| 28 |
+
CmpPrePostDevType,
|
| 29 |
+
CmpPrePostDevType,
|
| 30 |
+
CmpPrePostDevType,
|
| 31 |
+
CmpLocalType,
|
| 32 |
+
]
|
| 33 |
+
VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class _Version(NamedTuple):
|
| 37 |
+
epoch: int
|
| 38 |
+
release: Tuple[int, ...]
|
| 39 |
+
dev: Optional[Tuple[str, int]]
|
| 40 |
+
pre: Optional[Tuple[str, int]]
|
| 41 |
+
post: Optional[Tuple[str, int]]
|
| 42 |
+
local: Optional[LocalType]
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def parse(version: str) -> "Version":
|
| 46 |
+
"""Parse the given version string.
|
| 47 |
+
|
| 48 |
+
>>> parse('1.0.dev1')
|
| 49 |
+
<Version('1.0.dev1')>
|
| 50 |
+
|
| 51 |
+
:param version: The version string to parse.
|
| 52 |
+
:raises InvalidVersion: When the version string is not a valid version.
|
| 53 |
+
"""
|
| 54 |
+
return Version(version)
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
class InvalidVersion(ValueError):
|
| 58 |
+
"""Raised when a version string is not a valid version.
|
| 59 |
+
|
| 60 |
+
>>> Version("invalid")
|
| 61 |
+
Traceback (most recent call last):
|
| 62 |
+
...
|
| 63 |
+
packaging.version.InvalidVersion: Invalid version: 'invalid'
|
| 64 |
+
"""
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
class _BaseVersion:
|
| 68 |
+
_key: Tuple[Any, ...]
|
| 69 |
+
|
| 70 |
+
def __hash__(self) -> int:
|
| 71 |
+
return hash(self._key)
|
| 72 |
+
|
| 73 |
+
# Please keep the duplicated `isinstance` check
|
| 74 |
+
# in the six comparisons hereunder
|
| 75 |
+
# unless you find a way to avoid adding overhead function calls.
|
| 76 |
+
def __lt__(self, other: "_BaseVersion") -> bool:
|
| 77 |
+
if not isinstance(other, _BaseVersion):
|
| 78 |
+
return NotImplemented
|
| 79 |
+
|
| 80 |
+
return self._key < other._key
|
| 81 |
+
|
| 82 |
+
def __le__(self, other: "_BaseVersion") -> bool:
|
| 83 |
+
if not isinstance(other, _BaseVersion):
|
| 84 |
+
return NotImplemented
|
| 85 |
+
|
| 86 |
+
return self._key <= other._key
|
| 87 |
+
|
| 88 |
+
def __eq__(self, other: object) -> bool:
|
| 89 |
+
if not isinstance(other, _BaseVersion):
|
| 90 |
+
return NotImplemented
|
| 91 |
+
|
| 92 |
+
return self._key == other._key
|
| 93 |
+
|
| 94 |
+
def __ge__(self, other: "_BaseVersion") -> bool:
|
| 95 |
+
if not isinstance(other, _BaseVersion):
|
| 96 |
+
return NotImplemented
|
| 97 |
+
|
| 98 |
+
return self._key >= other._key
|
| 99 |
+
|
| 100 |
+
def __gt__(self, other: "_BaseVersion") -> bool:
|
| 101 |
+
if not isinstance(other, _BaseVersion):
|
| 102 |
+
return NotImplemented
|
| 103 |
+
|
| 104 |
+
return self._key > other._key
|
| 105 |
+
|
| 106 |
+
def __ne__(self, other: object) -> bool:
|
| 107 |
+
if not isinstance(other, _BaseVersion):
|
| 108 |
+
return NotImplemented
|
| 109 |
+
|
| 110 |
+
return self._key != other._key
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
# Deliberately not anchored to the start and end of the string, to make it
|
| 114 |
+
# easier for 3rd party code to reuse
|
| 115 |
+
_VERSION_PATTERN = r"""
|
| 116 |
+
v?
|
| 117 |
+
(?:
|
| 118 |
+
(?:(?P<epoch>[0-9]+)!)? # epoch
|
| 119 |
+
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
| 120 |
+
(?P<pre> # pre-release
|
| 121 |
+
[-_\.]?
|
| 122 |
+
(?P<pre_l>alpha|a|beta|b|preview|pre|c|rc)
|
| 123 |
+
[-_\.]?
|
| 124 |
+
(?P<pre_n>[0-9]+)?
|
| 125 |
+
)?
|
| 126 |
+
(?P<post> # post release
|
| 127 |
+
(?:-(?P<post_n1>[0-9]+))
|
| 128 |
+
|
|
| 129 |
+
(?:
|
| 130 |
+
[-_\.]?
|
| 131 |
+
(?P<post_l>post|rev|r)
|
| 132 |
+
[-_\.]?
|
| 133 |
+
(?P<post_n2>[0-9]+)?
|
| 134 |
+
)
|
| 135 |
+
)?
|
| 136 |
+
(?P<dev> # dev release
|
| 137 |
+
[-_\.]?
|
| 138 |
+
(?P<dev_l>dev)
|
| 139 |
+
[-_\.]?
|
| 140 |
+
(?P<dev_n>[0-9]+)?
|
| 141 |
+
)?
|
| 142 |
+
)
|
| 143 |
+
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
| 144 |
+
"""
|
| 145 |
+
|
| 146 |
+
VERSION_PATTERN = _VERSION_PATTERN
|
| 147 |
+
"""
|
| 148 |
+
A string containing the regular expression used to match a valid version.
|
| 149 |
+
|
| 150 |
+
The pattern is not anchored at either end, and is intended for embedding in larger
|
| 151 |
+
expressions (for example, matching a version number as part of a file name). The
|
| 152 |
+
regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
|
| 153 |
+
flags set.
|
| 154 |
+
|
| 155 |
+
:meta hide-value:
|
| 156 |
+
"""
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
class Version(_BaseVersion):
|
| 160 |
+
"""This class abstracts handling of a project's versions.
|
| 161 |
+
|
| 162 |
+
A :class:`Version` instance is comparison aware and can be compared and
|
| 163 |
+
sorted using the standard Python interfaces.
|
| 164 |
+
|
| 165 |
+
>>> v1 = Version("1.0a5")
|
| 166 |
+
>>> v2 = Version("1.0")
|
| 167 |
+
>>> v1
|
| 168 |
+
<Version('1.0a5')>
|
| 169 |
+
>>> v2
|
| 170 |
+
<Version('1.0')>
|
| 171 |
+
>>> v1 < v2
|
| 172 |
+
True
|
| 173 |
+
>>> v1 == v2
|
| 174 |
+
False
|
| 175 |
+
>>> v1 > v2
|
| 176 |
+
False
|
| 177 |
+
>>> v1 >= v2
|
| 178 |
+
False
|
| 179 |
+
>>> v1 <= v2
|
| 180 |
+
True
|
| 181 |
+
"""
|
| 182 |
+
|
| 183 |
+
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
| 184 |
+
_key: CmpKey
|
| 185 |
+
|
| 186 |
+
def __init__(self, version: str) -> None:
|
| 187 |
+
"""Initialize a Version object.
|
| 188 |
+
|
| 189 |
+
:param version:
|
| 190 |
+
The string representation of a version which will be parsed and normalized
|
| 191 |
+
before use.
|
| 192 |
+
:raises InvalidVersion:
|
| 193 |
+
If the ``version`` does not conform to PEP 440 in any way then this
|
| 194 |
+
exception will be raised.
|
| 195 |
+
"""
|
| 196 |
+
|
| 197 |
+
# Validate the version and parse it into pieces
|
| 198 |
+
match = self._regex.search(version)
|
| 199 |
+
if not match:
|
| 200 |
+
raise InvalidVersion(f"Invalid version: '{version}'")
|
| 201 |
+
|
| 202 |
+
# Store the parsed out pieces of the version
|
| 203 |
+
self._version = _Version(
|
| 204 |
+
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
|
| 205 |
+
release=tuple(int(i) for i in match.group("release").split(".")),
|
| 206 |
+
pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
|
| 207 |
+
post=_parse_letter_version(
|
| 208 |
+
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
|
| 209 |
+
),
|
| 210 |
+
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
|
| 211 |
+
local=_parse_local_version(match.group("local")),
|
| 212 |
+
)
|
| 213 |
+
|
| 214 |
+
# Generate a key which will be used for sorting
|
| 215 |
+
self._key = _cmpkey(
|
| 216 |
+
self._version.epoch,
|
| 217 |
+
self._version.release,
|
| 218 |
+
self._version.pre,
|
| 219 |
+
self._version.post,
|
| 220 |
+
self._version.dev,
|
| 221 |
+
self._version.local,
|
| 222 |
+
)
|
| 223 |
+
|
| 224 |
+
def __repr__(self) -> str:
|
| 225 |
+
"""A representation of the Version that shows all internal state.
|
| 226 |
+
|
| 227 |
+
>>> Version('1.0.0')
|
| 228 |
+
<Version('1.0.0')>
|
| 229 |
+
"""
|
| 230 |
+
return f"<Version('{self}')>"
|
| 231 |
+
|
| 232 |
+
def __str__(self) -> str:
|
| 233 |
+
"""A string representation of the version that can be rounded-tripped.
|
| 234 |
+
|
| 235 |
+
>>> str(Version("1.0a5"))
|
| 236 |
+
'1.0a5'
|
| 237 |
+
"""
|
| 238 |
+
parts = []
|
| 239 |
+
|
| 240 |
+
# Epoch
|
| 241 |
+
if self.epoch != 0:
|
| 242 |
+
parts.append(f"{self.epoch}!")
|
| 243 |
+
|
| 244 |
+
# Release segment
|
| 245 |
+
parts.append(".".join(str(x) for x in self.release))
|
| 246 |
+
|
| 247 |
+
# Pre-release
|
| 248 |
+
if self.pre is not None:
|
| 249 |
+
parts.append("".join(str(x) for x in self.pre))
|
| 250 |
+
|
| 251 |
+
# Post-release
|
| 252 |
+
if self.post is not None:
|
| 253 |
+
parts.append(f".post{self.post}")
|
| 254 |
+
|
| 255 |
+
# Development release
|
| 256 |
+
if self.dev is not None:
|
| 257 |
+
parts.append(f".dev{self.dev}")
|
| 258 |
+
|
| 259 |
+
# Local version segment
|
| 260 |
+
if self.local is not None:
|
| 261 |
+
parts.append(f"+{self.local}")
|
| 262 |
+
|
| 263 |
+
return "".join(parts)
|
| 264 |
+
|
| 265 |
+
@property
|
| 266 |
+
def epoch(self) -> int:
|
| 267 |
+
"""The epoch of the version.
|
| 268 |
+
|
| 269 |
+
>>> Version("2.0.0").epoch
|
| 270 |
+
0
|
| 271 |
+
>>> Version("1!2.0.0").epoch
|
| 272 |
+
1
|
| 273 |
+
"""
|
| 274 |
+
return self._version.epoch
|
| 275 |
+
|
| 276 |
+
@property
|
| 277 |
+
def release(self) -> Tuple[int, ...]:
|
| 278 |
+
"""The components of the "release" segment of the version.
|
| 279 |
+
|
| 280 |
+
>>> Version("1.2.3").release
|
| 281 |
+
(1, 2, 3)
|
| 282 |
+
>>> Version("2.0.0").release
|
| 283 |
+
(2, 0, 0)
|
| 284 |
+
>>> Version("1!2.0.0.post0").release
|
| 285 |
+
(2, 0, 0)
|
| 286 |
+
|
| 287 |
+
Includes trailing zeroes but not the epoch or any pre-release / development /
|
| 288 |
+
post-release suffixes.
|
| 289 |
+
"""
|
| 290 |
+
return self._version.release
|
| 291 |
+
|
| 292 |
+
@property
|
| 293 |
+
def pre(self) -> Optional[Tuple[str, int]]:
|
| 294 |
+
"""The pre-release segment of the version.
|
| 295 |
+
|
| 296 |
+
>>> print(Version("1.2.3").pre)
|
| 297 |
+
None
|
| 298 |
+
>>> Version("1.2.3a1").pre
|
| 299 |
+
('a', 1)
|
| 300 |
+
>>> Version("1.2.3b1").pre
|
| 301 |
+
('b', 1)
|
| 302 |
+
>>> Version("1.2.3rc1").pre
|
| 303 |
+
('rc', 1)
|
| 304 |
+
"""
|
| 305 |
+
return self._version.pre
|
| 306 |
+
|
| 307 |
+
@property
|
| 308 |
+
def post(self) -> Optional[int]:
|
| 309 |
+
"""The post-release number of the version.
|
| 310 |
+
|
| 311 |
+
>>> print(Version("1.2.3").post)
|
| 312 |
+
None
|
| 313 |
+
>>> Version("1.2.3.post1").post
|
| 314 |
+
1
|
| 315 |
+
"""
|
| 316 |
+
return self._version.post[1] if self._version.post else None
|
| 317 |
+
|
| 318 |
+
@property
|
| 319 |
+
def dev(self) -> Optional[int]:
|
| 320 |
+
"""The development number of the version.
|
| 321 |
+
|
| 322 |
+
>>> print(Version("1.2.3").dev)
|
| 323 |
+
None
|
| 324 |
+
>>> Version("1.2.3.dev1").dev
|
| 325 |
+
1
|
| 326 |
+
"""
|
| 327 |
+
return self._version.dev[1] if self._version.dev else None
|
| 328 |
+
|
| 329 |
+
@property
|
| 330 |
+
def local(self) -> Optional[str]:
|
| 331 |
+
"""The local version segment of the version.
|
| 332 |
+
|
| 333 |
+
>>> print(Version("1.2.3").local)
|
| 334 |
+
None
|
| 335 |
+
>>> Version("1.2.3+abc").local
|
| 336 |
+
'abc'
|
| 337 |
+
"""
|
| 338 |
+
if self._version.local:
|
| 339 |
+
return ".".join(str(x) for x in self._version.local)
|
| 340 |
+
else:
|
| 341 |
+
return None
|
| 342 |
+
|
| 343 |
+
@property
|
| 344 |
+
def public(self) -> str:
|
| 345 |
+
"""The public portion of the version.
|
| 346 |
+
|
| 347 |
+
>>> Version("1.2.3").public
|
| 348 |
+
'1.2.3'
|
| 349 |
+
>>> Version("1.2.3+abc").public
|
| 350 |
+
'1.2.3'
|
| 351 |
+
>>> Version("1.2.3+abc.dev1").public
|
| 352 |
+
'1.2.3'
|
| 353 |
+
"""
|
| 354 |
+
return str(self).split("+", 1)[0]
|
| 355 |
+
|
| 356 |
+
@property
|
| 357 |
+
def base_version(self) -> str:
|
| 358 |
+
"""The "base version" of the version.
|
| 359 |
+
|
| 360 |
+
>>> Version("1.2.3").base_version
|
| 361 |
+
'1.2.3'
|
| 362 |
+
>>> Version("1.2.3+abc").base_version
|
| 363 |
+
'1.2.3'
|
| 364 |
+
>>> Version("1!1.2.3+abc.dev1").base_version
|
| 365 |
+
'1!1.2.3'
|
| 366 |
+
|
| 367 |
+
The "base version" is the public version of the project without any pre or post
|
| 368 |
+
release markers.
|
| 369 |
+
"""
|
| 370 |
+
parts = []
|
| 371 |
+
|
| 372 |
+
# Epoch
|
| 373 |
+
if self.epoch != 0:
|
| 374 |
+
parts.append(f"{self.epoch}!")
|
| 375 |
+
|
| 376 |
+
# Release segment
|
| 377 |
+
parts.append(".".join(str(x) for x in self.release))
|
| 378 |
+
|
| 379 |
+
return "".join(parts)
|
| 380 |
+
|
| 381 |
+
@property
|
| 382 |
+
def is_prerelease(self) -> bool:
|
| 383 |
+
"""Whether this version is a pre-release.
|
| 384 |
+
|
| 385 |
+
>>> Version("1.2.3").is_prerelease
|
| 386 |
+
False
|
| 387 |
+
>>> Version("1.2.3a1").is_prerelease
|
| 388 |
+
True
|
| 389 |
+
>>> Version("1.2.3b1").is_prerelease
|
| 390 |
+
True
|
| 391 |
+
>>> Version("1.2.3rc1").is_prerelease
|
| 392 |
+
True
|
| 393 |
+
>>> Version("1.2.3dev1").is_prerelease
|
| 394 |
+
True
|
| 395 |
+
"""
|
| 396 |
+
return self.dev is not None or self.pre is not None
|
| 397 |
+
|
| 398 |
+
@property
|
| 399 |
+
def is_postrelease(self) -> bool:
|
| 400 |
+
"""Whether this version is a post-release.
|
| 401 |
+
|
| 402 |
+
>>> Version("1.2.3").is_postrelease
|
| 403 |
+
False
|
| 404 |
+
>>> Version("1.2.3.post1").is_postrelease
|
| 405 |
+
True
|
| 406 |
+
"""
|
| 407 |
+
return self.post is not None
|
| 408 |
+
|
| 409 |
+
@property
|
| 410 |
+
def is_devrelease(self) -> bool:
|
| 411 |
+
"""Whether this version is a development release.
|
| 412 |
+
|
| 413 |
+
>>> Version("1.2.3").is_devrelease
|
| 414 |
+
False
|
| 415 |
+
>>> Version("1.2.3.dev1").is_devrelease
|
| 416 |
+
True
|
| 417 |
+
"""
|
| 418 |
+
return self.dev is not None
|
| 419 |
+
|
| 420 |
+
@property
|
| 421 |
+
def major(self) -> int:
|
| 422 |
+
"""The first item of :attr:`release` or ``0`` if unavailable.
|
| 423 |
+
|
| 424 |
+
>>> Version("1.2.3").major
|
| 425 |
+
1
|
| 426 |
+
"""
|
| 427 |
+
return self.release[0] if len(self.release) >= 1 else 0
|
| 428 |
+
|
| 429 |
+
@property
|
| 430 |
+
def minor(self) -> int:
|
| 431 |
+
"""The second item of :attr:`release` or ``0`` if unavailable.
|
| 432 |
+
|
| 433 |
+
>>> Version("1.2.3").minor
|
| 434 |
+
2
|
| 435 |
+
>>> Version("1").minor
|
| 436 |
+
0
|
| 437 |
+
"""
|
| 438 |
+
return self.release[1] if len(self.release) >= 2 else 0
|
| 439 |
+
|
| 440 |
+
@property
|
| 441 |
+
def micro(self) -> int:
|
| 442 |
+
"""The third item of :attr:`release` or ``0`` if unavailable.
|
| 443 |
+
|
| 444 |
+
>>> Version("1.2.3").micro
|
| 445 |
+
3
|
| 446 |
+
>>> Version("1").micro
|
| 447 |
+
0
|
| 448 |
+
"""
|
| 449 |
+
return self.release[2] if len(self.release) >= 3 else 0
|
| 450 |
+
|
| 451 |
+
|
| 452 |
+
def _parse_letter_version(
|
| 453 |
+
letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
|
| 454 |
+
) -> Optional[Tuple[str, int]]:
|
| 455 |
+
if letter:
|
| 456 |
+
# We consider there to be an implicit 0 in a pre-release if there is
|
| 457 |
+
# not a numeral associated with it.
|
| 458 |
+
if number is None:
|
| 459 |
+
number = 0
|
| 460 |
+
|
| 461 |
+
# We normalize any letters to their lower case form
|
| 462 |
+
letter = letter.lower()
|
| 463 |
+
|
| 464 |
+
# We consider some words to be alternate spellings of other words and
|
| 465 |
+
# in those cases we want to normalize the spellings to our preferred
|
| 466 |
+
# spelling.
|
| 467 |
+
if letter == "alpha":
|
| 468 |
+
letter = "a"
|
| 469 |
+
elif letter == "beta":
|
| 470 |
+
letter = "b"
|
| 471 |
+
elif letter in ["c", "pre", "preview"]:
|
| 472 |
+
letter = "rc"
|
| 473 |
+
elif letter in ["rev", "r"]:
|
| 474 |
+
letter = "post"
|
| 475 |
+
|
| 476 |
+
return letter, int(number)
|
| 477 |
+
if not letter and number:
|
| 478 |
+
# We assume if we are given a number, but we are not given a letter
|
| 479 |
+
# then this is using the implicit post release syntax (e.g. 1.0-1)
|
| 480 |
+
letter = "post"
|
| 481 |
+
|
| 482 |
+
return letter, int(number)
|
| 483 |
+
|
| 484 |
+
return None
|
| 485 |
+
|
| 486 |
+
|
| 487 |
+
_local_version_separators = re.compile(r"[\._-]")
|
| 488 |
+
|
| 489 |
+
|
| 490 |
+
def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
|
| 491 |
+
"""
|
| 492 |
+
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
| 493 |
+
"""
|
| 494 |
+
if local is not None:
|
| 495 |
+
return tuple(
|
| 496 |
+
part.lower() if not part.isdigit() else int(part)
|
| 497 |
+
for part in _local_version_separators.split(local)
|
| 498 |
+
)
|
| 499 |
+
return None
|
| 500 |
+
|
| 501 |
+
|
| 502 |
+
def _cmpkey(
|
| 503 |
+
epoch: int,
|
| 504 |
+
release: Tuple[int, ...],
|
| 505 |
+
pre: Optional[Tuple[str, int]],
|
| 506 |
+
post: Optional[Tuple[str, int]],
|
| 507 |
+
dev: Optional[Tuple[str, int]],
|
| 508 |
+
local: Optional[LocalType],
|
| 509 |
+
) -> CmpKey:
|
| 510 |
+
# When we compare a release version, we want to compare it with all of the
|
| 511 |
+
# trailing zeros removed. So we'll use a reverse the list, drop all the now
|
| 512 |
+
# leading zeros until we come to something non zero, then take the rest
|
| 513 |
+
# re-reverse it back into the correct order and make it a tuple and use
|
| 514 |
+
# that for our sorting key.
|
| 515 |
+
_release = tuple(
|
| 516 |
+
reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
|
| 517 |
+
)
|
| 518 |
+
|
| 519 |
+
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
|
| 520 |
+
# We'll do this by abusing the pre segment, but we _only_ want to do this
|
| 521 |
+
# if there is not a pre or a post segment. If we have one of those then
|
| 522 |
+
# the normal sorting rules will handle this case correctly.
|
| 523 |
+
if pre is None and post is None and dev is not None:
|
| 524 |
+
_pre: CmpPrePostDevType = NegativeInfinity
|
| 525 |
+
# Versions without a pre-release (except as noted above) should sort after
|
| 526 |
+
# those with one.
|
| 527 |
+
elif pre is None:
|
| 528 |
+
_pre = Infinity
|
| 529 |
+
else:
|
| 530 |
+
_pre = pre
|
| 531 |
+
|
| 532 |
+
# Versions without a post segment should sort before those with one.
|
| 533 |
+
if post is None:
|
| 534 |
+
_post: CmpPrePostDevType = NegativeInfinity
|
| 535 |
+
|
| 536 |
+
else:
|
| 537 |
+
_post = post
|
| 538 |
+
|
| 539 |
+
# Versions without a development segment should sort after those with one.
|
| 540 |
+
if dev is None:
|
| 541 |
+
_dev: CmpPrePostDevType = Infinity
|
| 542 |
+
|
| 543 |
+
else:
|
| 544 |
+
_dev = dev
|
| 545 |
+
|
| 546 |
+
if local is None:
|
| 547 |
+
# Versions without a local segment should sort before those with one.
|
| 548 |
+
_local: CmpLocalType = NegativeInfinity
|
| 549 |
+
else:
|
| 550 |
+
# Versions with a local segment need that segment parsed to implement
|
| 551 |
+
# the sorting rules in PEP440.
|
| 552 |
+
# - Alpha numeric segments sort before numeric segments
|
| 553 |
+
# - Alpha numeric segments sort lexicographically
|
| 554 |
+
# - Numeric segments sort numerically
|
| 555 |
+
# - Shorter versions sort before longer versions when the prefixes
|
| 556 |
+
# match exactly
|
| 557 |
+
_local = tuple(
|
| 558 |
+
(i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
|
| 559 |
+
)
|
| 560 |
+
|
| 561 |
+
return epoch, _release, _pre, _post, _dev, _local
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/vendored/vendor.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
packaging==24.0
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/_vendor/wheel/wheelfile.py
ADDED
|
@@ -0,0 +1,196 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import csv
|
| 4 |
+
import hashlib
|
| 5 |
+
import os.path
|
| 6 |
+
import re
|
| 7 |
+
import stat
|
| 8 |
+
import time
|
| 9 |
+
from io import StringIO, TextIOWrapper
|
| 10 |
+
from zipfile import ZIP_DEFLATED, ZipFile, ZipInfo
|
| 11 |
+
|
| 12 |
+
from wheel.cli import WheelError
|
| 13 |
+
from wheel.util import log, urlsafe_b64decode, urlsafe_b64encode
|
| 14 |
+
|
| 15 |
+
# Non-greedy matching of an optional build number may be too clever (more
|
| 16 |
+
# invalid wheel filenames will match). Separate regex for .dist-info?
|
| 17 |
+
WHEEL_INFO_RE = re.compile(
|
| 18 |
+
r"""^(?P<namever>(?P<name>[^\s-]+?)-(?P<ver>[^\s-]+?))(-(?P<build>\d[^\s-]*))?
|
| 19 |
+
-(?P<pyver>[^\s-]+?)-(?P<abi>[^\s-]+?)-(?P<plat>\S+)\.whl$""",
|
| 20 |
+
re.VERBOSE,
|
| 21 |
+
)
|
| 22 |
+
MINIMUM_TIMESTAMP = 315532800 # 1980-01-01 00:00:00 UTC
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def get_zipinfo_datetime(timestamp=None):
|
| 26 |
+
# Some applications need reproducible .whl files, but they can't do this without
|
| 27 |
+
# forcing the timestamp of the individual ZipInfo objects. See issue #143.
|
| 28 |
+
timestamp = int(os.environ.get("SOURCE_DATE_EPOCH", timestamp or time.time()))
|
| 29 |
+
timestamp = max(timestamp, MINIMUM_TIMESTAMP)
|
| 30 |
+
return time.gmtime(timestamp)[0:6]
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class WheelFile(ZipFile):
|
| 34 |
+
"""A ZipFile derivative class that also reads SHA-256 hashes from
|
| 35 |
+
.dist-info/RECORD and checks any read files against those.
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
_default_algorithm = hashlib.sha256
|
| 39 |
+
|
| 40 |
+
def __init__(self, file, mode="r", compression=ZIP_DEFLATED):
|
| 41 |
+
basename = os.path.basename(file)
|
| 42 |
+
self.parsed_filename = WHEEL_INFO_RE.match(basename)
|
| 43 |
+
if not basename.endswith(".whl") or self.parsed_filename is None:
|
| 44 |
+
raise WheelError(f"Bad wheel filename {basename!r}")
|
| 45 |
+
|
| 46 |
+
ZipFile.__init__(self, file, mode, compression=compression, allowZip64=True)
|
| 47 |
+
|
| 48 |
+
self.dist_info_path = "{}.dist-info".format(
|
| 49 |
+
self.parsed_filename.group("namever")
|
| 50 |
+
)
|
| 51 |
+
self.record_path = self.dist_info_path + "/RECORD"
|
| 52 |
+
self._file_hashes = {}
|
| 53 |
+
self._file_sizes = {}
|
| 54 |
+
if mode == "r":
|
| 55 |
+
# Ignore RECORD and any embedded wheel signatures
|
| 56 |
+
self._file_hashes[self.record_path] = None, None
|
| 57 |
+
self._file_hashes[self.record_path + ".jws"] = None, None
|
| 58 |
+
self._file_hashes[self.record_path + ".p7s"] = None, None
|
| 59 |
+
|
| 60 |
+
# Fill in the expected hashes by reading them from RECORD
|
| 61 |
+
try:
|
| 62 |
+
record = self.open(self.record_path)
|
| 63 |
+
except KeyError:
|
| 64 |
+
raise WheelError(f"Missing {self.record_path} file") from None
|
| 65 |
+
|
| 66 |
+
with record:
|
| 67 |
+
for line in csv.reader(
|
| 68 |
+
TextIOWrapper(record, newline="", encoding="utf-8")
|
| 69 |
+
):
|
| 70 |
+
path, hash_sum, size = line
|
| 71 |
+
if not hash_sum:
|
| 72 |
+
continue
|
| 73 |
+
|
| 74 |
+
algorithm, hash_sum = hash_sum.split("=")
|
| 75 |
+
try:
|
| 76 |
+
hashlib.new(algorithm)
|
| 77 |
+
except ValueError:
|
| 78 |
+
raise WheelError(
|
| 79 |
+
f"Unsupported hash algorithm: {algorithm}"
|
| 80 |
+
) from None
|
| 81 |
+
|
| 82 |
+
if algorithm.lower() in {"md5", "sha1"}:
|
| 83 |
+
raise WheelError(
|
| 84 |
+
f"Weak hash algorithm ({algorithm}) is not permitted by "
|
| 85 |
+
f"PEP 427"
|
| 86 |
+
)
|
| 87 |
+
|
| 88 |
+
self._file_hashes[path] = (
|
| 89 |
+
algorithm,
|
| 90 |
+
urlsafe_b64decode(hash_sum.encode("ascii")),
|
| 91 |
+
)
|
| 92 |
+
|
| 93 |
+
def open(self, name_or_info, mode="r", pwd=None):
|
| 94 |
+
def _update_crc(newdata):
|
| 95 |
+
eof = ef._eof
|
| 96 |
+
update_crc_orig(newdata)
|
| 97 |
+
running_hash.update(newdata)
|
| 98 |
+
if eof and running_hash.digest() != expected_hash:
|
| 99 |
+
raise WheelError(f"Hash mismatch for file '{ef_name}'")
|
| 100 |
+
|
| 101 |
+
ef_name = (
|
| 102 |
+
name_or_info.filename if isinstance(name_or_info, ZipInfo) else name_or_info
|
| 103 |
+
)
|
| 104 |
+
if (
|
| 105 |
+
mode == "r"
|
| 106 |
+
and not ef_name.endswith("/")
|
| 107 |
+
and ef_name not in self._file_hashes
|
| 108 |
+
):
|
| 109 |
+
raise WheelError(f"No hash found for file '{ef_name}'")
|
| 110 |
+
|
| 111 |
+
ef = ZipFile.open(self, name_or_info, mode, pwd)
|
| 112 |
+
if mode == "r" and not ef_name.endswith("/"):
|
| 113 |
+
algorithm, expected_hash = self._file_hashes[ef_name]
|
| 114 |
+
if expected_hash is not None:
|
| 115 |
+
# Monkey patch the _update_crc method to also check for the hash from
|
| 116 |
+
# RECORD
|
| 117 |
+
running_hash = hashlib.new(algorithm)
|
| 118 |
+
update_crc_orig, ef._update_crc = ef._update_crc, _update_crc
|
| 119 |
+
|
| 120 |
+
return ef
|
| 121 |
+
|
| 122 |
+
def write_files(self, base_dir):
|
| 123 |
+
log.info(f"creating '{self.filename}' and adding '{base_dir}' to it")
|
| 124 |
+
deferred = []
|
| 125 |
+
for root, dirnames, filenames in os.walk(base_dir):
|
| 126 |
+
# Sort the directory names so that `os.walk` will walk them in a
|
| 127 |
+
# defined order on the next iteration.
|
| 128 |
+
dirnames.sort()
|
| 129 |
+
for name in sorted(filenames):
|
| 130 |
+
path = os.path.normpath(os.path.join(root, name))
|
| 131 |
+
if os.path.isfile(path):
|
| 132 |
+
arcname = os.path.relpath(path, base_dir).replace(os.path.sep, "/")
|
| 133 |
+
if arcname == self.record_path:
|
| 134 |
+
pass
|
| 135 |
+
elif root.endswith(".dist-info"):
|
| 136 |
+
deferred.append((path, arcname))
|
| 137 |
+
else:
|
| 138 |
+
self.write(path, arcname)
|
| 139 |
+
|
| 140 |
+
deferred.sort()
|
| 141 |
+
for path, arcname in deferred:
|
| 142 |
+
self.write(path, arcname)
|
| 143 |
+
|
| 144 |
+
def write(self, filename, arcname=None, compress_type=None):
|
| 145 |
+
with open(filename, "rb") as f:
|
| 146 |
+
st = os.fstat(f.fileno())
|
| 147 |
+
data = f.read()
|
| 148 |
+
|
| 149 |
+
zinfo = ZipInfo(
|
| 150 |
+
arcname or filename, date_time=get_zipinfo_datetime(st.st_mtime)
|
| 151 |
+
)
|
| 152 |
+
zinfo.external_attr = (stat.S_IMODE(st.st_mode) | stat.S_IFMT(st.st_mode)) << 16
|
| 153 |
+
zinfo.compress_type = compress_type or self.compression
|
| 154 |
+
self.writestr(zinfo, data, compress_type)
|
| 155 |
+
|
| 156 |
+
def writestr(self, zinfo_or_arcname, data, compress_type=None):
|
| 157 |
+
if isinstance(zinfo_or_arcname, str):
|
| 158 |
+
zinfo_or_arcname = ZipInfo(
|
| 159 |
+
zinfo_or_arcname, date_time=get_zipinfo_datetime()
|
| 160 |
+
)
|
| 161 |
+
zinfo_or_arcname.compress_type = self.compression
|
| 162 |
+
zinfo_or_arcname.external_attr = (0o664 | stat.S_IFREG) << 16
|
| 163 |
+
|
| 164 |
+
if isinstance(data, str):
|
| 165 |
+
data = data.encode("utf-8")
|
| 166 |
+
|
| 167 |
+
ZipFile.writestr(self, zinfo_or_arcname, data, compress_type)
|
| 168 |
+
fname = (
|
| 169 |
+
zinfo_or_arcname.filename
|
| 170 |
+
if isinstance(zinfo_or_arcname, ZipInfo)
|
| 171 |
+
else zinfo_or_arcname
|
| 172 |
+
)
|
| 173 |
+
log.info(f"adding '{fname}'")
|
| 174 |
+
if fname != self.record_path:
|
| 175 |
+
hash_ = self._default_algorithm(data)
|
| 176 |
+
self._file_hashes[fname] = (
|
| 177 |
+
hash_.name,
|
| 178 |
+
urlsafe_b64encode(hash_.digest()).decode("ascii"),
|
| 179 |
+
)
|
| 180 |
+
self._file_sizes[fname] = len(data)
|
| 181 |
+
|
| 182 |
+
def close(self):
|
| 183 |
+
# Write RECORD
|
| 184 |
+
if self.fp is not None and self.mode == "w" and self._file_hashes:
|
| 185 |
+
data = StringIO()
|
| 186 |
+
writer = csv.writer(data, delimiter=",", quotechar='"', lineterminator="\n")
|
| 187 |
+
writer.writerows(
|
| 188 |
+
(
|
| 189 |
+
(fname, algorithm + "=" + hash_, self._file_sizes[fname])
|
| 190 |
+
for fname, (algorithm, hash_) in self._file_hashes.items()
|
| 191 |
+
)
|
| 192 |
+
)
|
| 193 |
+
writer.writerow((format(self.record_path), "", ""))
|
| 194 |
+
self.writestr(self.record_path, data.getvalue())
|
| 195 |
+
|
| 196 |
+
ZipFile.close(self)
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/fixtures.py
ADDED
|
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import os
|
| 3 |
+
import subprocess
|
| 4 |
+
import sys
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
|
| 7 |
+
import path
|
| 8 |
+
import pytest
|
| 9 |
+
|
| 10 |
+
from . import contexts, environment
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
@pytest.fixture
|
| 14 |
+
def user_override(monkeypatch):
|
| 15 |
+
"""
|
| 16 |
+
Override site.USER_BASE and site.USER_SITE with temporary directories in
|
| 17 |
+
a context.
|
| 18 |
+
"""
|
| 19 |
+
with contexts.tempdir() as user_base:
|
| 20 |
+
monkeypatch.setattr('site.USER_BASE', user_base)
|
| 21 |
+
with contexts.tempdir() as user_site:
|
| 22 |
+
monkeypatch.setattr('site.USER_SITE', user_site)
|
| 23 |
+
with contexts.save_user_site_setting():
|
| 24 |
+
yield
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
@pytest.fixture
|
| 28 |
+
def tmpdir_cwd(tmpdir):
|
| 29 |
+
with tmpdir.as_cwd() as orig:
|
| 30 |
+
yield orig
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
@pytest.fixture(autouse=True, scope="session")
|
| 34 |
+
def workaround_xdist_376(request):
|
| 35 |
+
"""
|
| 36 |
+
Workaround pytest-dev/pytest-xdist#376
|
| 37 |
+
|
| 38 |
+
``pytest-xdist`` tends to inject '' into ``sys.path``,
|
| 39 |
+
which may break certain isolation expectations.
|
| 40 |
+
Remove the entry so the import
|
| 41 |
+
machinery behaves the same irrespective of xdist.
|
| 42 |
+
"""
|
| 43 |
+
if not request.config.pluginmanager.has_plugin('xdist'):
|
| 44 |
+
return
|
| 45 |
+
|
| 46 |
+
with contextlib.suppress(ValueError):
|
| 47 |
+
sys.path.remove('')
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
@pytest.fixture
|
| 51 |
+
def sample_project(tmp_path):
|
| 52 |
+
"""
|
| 53 |
+
Clone the 'sampleproject' and return a path to it.
|
| 54 |
+
"""
|
| 55 |
+
cmd = ['git', 'clone', 'https://github.com/pypa/sampleproject']
|
| 56 |
+
try:
|
| 57 |
+
subprocess.check_call(cmd, cwd=str(tmp_path))
|
| 58 |
+
except Exception:
|
| 59 |
+
pytest.skip("Unable to clone sampleproject")
|
| 60 |
+
return tmp_path / 'sampleproject'
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
# sdist and wheel artifacts should be stable across a round of tests
|
| 64 |
+
# so we can build them once per session and use the files as "readonly"
|
| 65 |
+
|
| 66 |
+
# In the case of setuptools, building the wheel without sdist may cause
|
| 67 |
+
# it to contain the `build` directory, and therefore create situations with
|
| 68 |
+
# `setuptools/build/lib/build/lib/...`. To avoid that, build both artifacts at once.
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def _build_distributions(tmp_path_factory, request):
|
| 72 |
+
with contexts.session_locked_tmp_dir(
|
| 73 |
+
request, tmp_path_factory, "dist_build"
|
| 74 |
+
) as tmp: # pragma: no cover
|
| 75 |
+
sdist = next(tmp.glob("*.tar.gz"), None)
|
| 76 |
+
wheel = next(tmp.glob("*.whl"), None)
|
| 77 |
+
if sdist and wheel:
|
| 78 |
+
return (sdist, wheel)
|
| 79 |
+
|
| 80 |
+
# Sanity check: should not create recursive setuptools/build/lib/build/lib/...
|
| 81 |
+
assert not Path(request.config.rootdir, "build/lib/build").exists()
|
| 82 |
+
|
| 83 |
+
subprocess.check_output([
|
| 84 |
+
sys.executable,
|
| 85 |
+
"-m",
|
| 86 |
+
"build",
|
| 87 |
+
"--outdir",
|
| 88 |
+
str(tmp),
|
| 89 |
+
str(request.config.rootdir),
|
| 90 |
+
])
|
| 91 |
+
|
| 92 |
+
# Sanity check: should not create recursive setuptools/build/lib/build/lib/...
|
| 93 |
+
assert not Path(request.config.rootdir, "build/lib/build").exists()
|
| 94 |
+
|
| 95 |
+
return next(tmp.glob("*.tar.gz")), next(tmp.glob("*.whl"))
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
@pytest.fixture(scope="session")
|
| 99 |
+
def setuptools_sdist(tmp_path_factory, request):
|
| 100 |
+
prebuilt = os.getenv("PRE_BUILT_SETUPTOOLS_SDIST")
|
| 101 |
+
if prebuilt and os.path.exists(prebuilt): # pragma: no cover
|
| 102 |
+
return Path(prebuilt).resolve()
|
| 103 |
+
|
| 104 |
+
sdist, _ = _build_distributions(tmp_path_factory, request)
|
| 105 |
+
return sdist
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
@pytest.fixture(scope="session")
|
| 109 |
+
def setuptools_wheel(tmp_path_factory, request):
|
| 110 |
+
prebuilt = os.getenv("PRE_BUILT_SETUPTOOLS_WHEEL")
|
| 111 |
+
if prebuilt and os.path.exists(prebuilt): # pragma: no cover
|
| 112 |
+
return Path(prebuilt).resolve()
|
| 113 |
+
|
| 114 |
+
_, wheel = _build_distributions(tmp_path_factory, request)
|
| 115 |
+
return wheel
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
@pytest.fixture
|
| 119 |
+
def venv(tmp_path, setuptools_wheel):
|
| 120 |
+
"""Virtual env with the version of setuptools under test installed"""
|
| 121 |
+
env = environment.VirtualEnv()
|
| 122 |
+
env.root = path.Path(tmp_path / 'venv')
|
| 123 |
+
env.create_opts = ['--no-setuptools', '--wheel=bundle']
|
| 124 |
+
# TODO: Use `--no-wheel` when setuptools implements its own bdist_wheel
|
| 125 |
+
env.req = str(setuptools_wheel)
|
| 126 |
+
# In some environments (eg. downstream distro packaging),
|
| 127 |
+
# where tox isn't used to run tests and PYTHONPATH is set to point to
|
| 128 |
+
# a specific setuptools codebase, PYTHONPATH will leak into the spawned
|
| 129 |
+
# processes.
|
| 130 |
+
# env.create() should install the just created setuptools
|
| 131 |
+
# wheel, but it doesn't if it finds another existing matching setuptools
|
| 132 |
+
# installation present on PYTHONPATH:
|
| 133 |
+
# `setuptools is already installed with the same version as the provided
|
| 134 |
+
# wheel. Use --force-reinstall to force an installation of the wheel.`
|
| 135 |
+
# This prevents leaking PYTHONPATH to the created environment.
|
| 136 |
+
with contexts.environment(PYTHONPATH=None):
|
| 137 |
+
return env.create()
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
@pytest.fixture
|
| 141 |
+
def venv_without_setuptools(tmp_path):
|
| 142 |
+
"""Virtual env without any version of setuptools installed"""
|
| 143 |
+
env = environment.VirtualEnv()
|
| 144 |
+
env.root = path.Path(tmp_path / 'venv_without_setuptools')
|
| 145 |
+
env.create_opts = ['--no-setuptools', '--no-wheel']
|
| 146 |
+
env.ensure_env()
|
| 147 |
+
return env
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
@pytest.fixture
|
| 151 |
+
def bare_venv(tmp_path):
|
| 152 |
+
"""Virtual env without any common packages installed"""
|
| 153 |
+
env = environment.VirtualEnv()
|
| 154 |
+
env.root = path.Path(tmp_path / 'bare_venv')
|
| 155 |
+
env.create_opts = ['--no-setuptools', '--no-pip', '--no-wheel', '--no-seed']
|
| 156 |
+
env.ensure_env()
|
| 157 |
+
return env
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/script-with-bom.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
result = 'passed'
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_bdist_egg.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""develop tests"""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import re
|
| 5 |
+
import zipfile
|
| 6 |
+
|
| 7 |
+
import pytest
|
| 8 |
+
|
| 9 |
+
from setuptools.dist import Distribution
|
| 10 |
+
|
| 11 |
+
from . import contexts
|
| 12 |
+
|
| 13 |
+
SETUP_PY = """\
|
| 14 |
+
from setuptools import setup
|
| 15 |
+
|
| 16 |
+
setup(py_modules=['hi'])
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
@pytest.fixture
|
| 21 |
+
def setup_context(tmpdir):
|
| 22 |
+
with (tmpdir / 'setup.py').open('w') as f:
|
| 23 |
+
f.write(SETUP_PY)
|
| 24 |
+
with (tmpdir / 'hi.py').open('w') as f:
|
| 25 |
+
f.write('1\n')
|
| 26 |
+
with tmpdir.as_cwd():
|
| 27 |
+
yield tmpdir
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class Test:
|
| 31 |
+
@pytest.mark.usefixtures("user_override")
|
| 32 |
+
@pytest.mark.usefixtures("setup_context")
|
| 33 |
+
def test_bdist_egg(self):
|
| 34 |
+
dist = Distribution(
|
| 35 |
+
dict(
|
| 36 |
+
script_name='setup.py',
|
| 37 |
+
script_args=['bdist_egg'],
|
| 38 |
+
name='foo',
|
| 39 |
+
py_modules=['hi'],
|
| 40 |
+
)
|
| 41 |
+
)
|
| 42 |
+
os.makedirs(os.path.join('build', 'src'))
|
| 43 |
+
with contexts.quiet():
|
| 44 |
+
dist.parse_command_line()
|
| 45 |
+
dist.run_commands()
|
| 46 |
+
|
| 47 |
+
# let's see if we got our egg link at the right place
|
| 48 |
+
[content] = os.listdir('dist')
|
| 49 |
+
assert re.match(r'foo-0.0.0-py[23].\d+.egg$', content)
|
| 50 |
+
|
| 51 |
+
@pytest.mark.xfail(
|
| 52 |
+
os.environ.get('PYTHONDONTWRITEBYTECODE', False),
|
| 53 |
+
reason="Byte code disabled",
|
| 54 |
+
)
|
| 55 |
+
@pytest.mark.usefixtures("user_override")
|
| 56 |
+
@pytest.mark.usefixtures("setup_context")
|
| 57 |
+
def test_exclude_source_files(self):
|
| 58 |
+
dist = Distribution(
|
| 59 |
+
dict(
|
| 60 |
+
script_name='setup.py',
|
| 61 |
+
script_args=['bdist_egg', '--exclude-source-files'],
|
| 62 |
+
py_modules=['hi'],
|
| 63 |
+
)
|
| 64 |
+
)
|
| 65 |
+
with contexts.quiet():
|
| 66 |
+
dist.parse_command_line()
|
| 67 |
+
dist.run_commands()
|
| 68 |
+
[dist_name] = os.listdir('dist')
|
| 69 |
+
dist_filename = os.path.join('dist', dist_name)
|
| 70 |
+
zip = zipfile.ZipFile(dist_filename)
|
| 71 |
+
names = list(zi.filename for zi in zip.filelist)
|
| 72 |
+
assert 'hi.pyc' in names
|
| 73 |
+
assert 'hi.py' not in names
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_build_meta.py
ADDED
|
@@ -0,0 +1,970 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import importlib
|
| 3 |
+
import os
|
| 4 |
+
import re
|
| 5 |
+
import shutil
|
| 6 |
+
import signal
|
| 7 |
+
import sys
|
| 8 |
+
import tarfile
|
| 9 |
+
from concurrent import futures
|
| 10 |
+
from pathlib import Path
|
| 11 |
+
from typing import Any, Callable
|
| 12 |
+
from zipfile import ZipFile
|
| 13 |
+
|
| 14 |
+
import pytest
|
| 15 |
+
from jaraco import path
|
| 16 |
+
from packaging.requirements import Requirement
|
| 17 |
+
|
| 18 |
+
from .textwrap import DALS
|
| 19 |
+
|
| 20 |
+
SETUP_SCRIPT_STUB = "__import__('setuptools').setup()"
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
TIMEOUT = int(os.getenv("TIMEOUT_BACKEND_TEST", "180")) # in seconds
|
| 24 |
+
IS_PYPY = '__pypy__' in sys.builtin_module_names
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
pytestmark = pytest.mark.skipif(
|
| 28 |
+
sys.platform == "win32" and IS_PYPY,
|
| 29 |
+
reason="The combination of PyPy + Windows + pytest-xdist + ProcessPoolExecutor "
|
| 30 |
+
"is flaky and problematic",
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class BuildBackendBase:
|
| 35 |
+
def __init__(self, cwd='.', env=None, backend_name='setuptools.build_meta'):
|
| 36 |
+
self.cwd = cwd
|
| 37 |
+
self.env = env or {}
|
| 38 |
+
self.backend_name = backend_name
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class BuildBackend(BuildBackendBase):
|
| 42 |
+
"""PEP 517 Build Backend"""
|
| 43 |
+
|
| 44 |
+
def __init__(self, *args, **kwargs):
|
| 45 |
+
super().__init__(*args, **kwargs)
|
| 46 |
+
self.pool = futures.ProcessPoolExecutor(max_workers=1)
|
| 47 |
+
|
| 48 |
+
def __getattr__(self, name: str) -> Callable[..., Any]:
|
| 49 |
+
"""Handles arbitrary function invocations on the build backend."""
|
| 50 |
+
|
| 51 |
+
def method(*args, **kw):
|
| 52 |
+
root = os.path.abspath(self.cwd)
|
| 53 |
+
caller = BuildBackendCaller(root, self.env, self.backend_name)
|
| 54 |
+
pid = None
|
| 55 |
+
try:
|
| 56 |
+
pid = self.pool.submit(os.getpid).result(TIMEOUT)
|
| 57 |
+
return self.pool.submit(caller, name, *args, **kw).result(TIMEOUT)
|
| 58 |
+
except futures.TimeoutError:
|
| 59 |
+
self.pool.shutdown(wait=False) # doesn't stop already running processes
|
| 60 |
+
self._kill(pid)
|
| 61 |
+
pytest.xfail(f"Backend did not respond before timeout ({TIMEOUT} s)")
|
| 62 |
+
except (futures.process.BrokenProcessPool, MemoryError, OSError):
|
| 63 |
+
if IS_PYPY:
|
| 64 |
+
pytest.xfail("PyPy frequently fails tests with ProcessPoolExector")
|
| 65 |
+
raise
|
| 66 |
+
|
| 67 |
+
return method
|
| 68 |
+
|
| 69 |
+
def _kill(self, pid):
|
| 70 |
+
if pid is None:
|
| 71 |
+
return
|
| 72 |
+
with contextlib.suppress(ProcessLookupError, OSError):
|
| 73 |
+
os.kill(pid, signal.SIGTERM if os.name == "nt" else signal.SIGKILL)
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class BuildBackendCaller(BuildBackendBase):
|
| 77 |
+
def __init__(self, *args, **kwargs):
|
| 78 |
+
super().__init__(*args, **kwargs)
|
| 79 |
+
|
| 80 |
+
(self.backend_name, _, self.backend_obj) = self.backend_name.partition(':')
|
| 81 |
+
|
| 82 |
+
def __call__(self, name, *args, **kw):
|
| 83 |
+
"""Handles arbitrary function invocations on the build backend."""
|
| 84 |
+
os.chdir(self.cwd)
|
| 85 |
+
os.environ.update(self.env)
|
| 86 |
+
mod = importlib.import_module(self.backend_name)
|
| 87 |
+
|
| 88 |
+
if self.backend_obj:
|
| 89 |
+
backend = getattr(mod, self.backend_obj)
|
| 90 |
+
else:
|
| 91 |
+
backend = mod
|
| 92 |
+
|
| 93 |
+
return getattr(backend, name)(*args, **kw)
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
defns = [
|
| 97 |
+
{ # simple setup.py script
|
| 98 |
+
'setup.py': DALS(
|
| 99 |
+
"""
|
| 100 |
+
__import__('setuptools').setup(
|
| 101 |
+
name='foo',
|
| 102 |
+
version='0.0.0',
|
| 103 |
+
py_modules=['hello'],
|
| 104 |
+
setup_requires=['six'],
|
| 105 |
+
)
|
| 106 |
+
"""
|
| 107 |
+
),
|
| 108 |
+
'hello.py': DALS(
|
| 109 |
+
"""
|
| 110 |
+
def run():
|
| 111 |
+
print('hello')
|
| 112 |
+
"""
|
| 113 |
+
),
|
| 114 |
+
},
|
| 115 |
+
{ # setup.py that relies on __name__
|
| 116 |
+
'setup.py': DALS(
|
| 117 |
+
"""
|
| 118 |
+
assert __name__ == '__main__'
|
| 119 |
+
__import__('setuptools').setup(
|
| 120 |
+
name='foo',
|
| 121 |
+
version='0.0.0',
|
| 122 |
+
py_modules=['hello'],
|
| 123 |
+
setup_requires=['six'],
|
| 124 |
+
)
|
| 125 |
+
"""
|
| 126 |
+
),
|
| 127 |
+
'hello.py': DALS(
|
| 128 |
+
"""
|
| 129 |
+
def run():
|
| 130 |
+
print('hello')
|
| 131 |
+
"""
|
| 132 |
+
),
|
| 133 |
+
},
|
| 134 |
+
{ # setup.py script that runs arbitrary code
|
| 135 |
+
'setup.py': DALS(
|
| 136 |
+
"""
|
| 137 |
+
variable = True
|
| 138 |
+
def function():
|
| 139 |
+
return variable
|
| 140 |
+
assert variable
|
| 141 |
+
__import__('setuptools').setup(
|
| 142 |
+
name='foo',
|
| 143 |
+
version='0.0.0',
|
| 144 |
+
py_modules=['hello'],
|
| 145 |
+
setup_requires=['six'],
|
| 146 |
+
)
|
| 147 |
+
"""
|
| 148 |
+
),
|
| 149 |
+
'hello.py': DALS(
|
| 150 |
+
"""
|
| 151 |
+
def run():
|
| 152 |
+
print('hello')
|
| 153 |
+
"""
|
| 154 |
+
),
|
| 155 |
+
},
|
| 156 |
+
{ # setup.py script that constructs temp files to be included in the distribution
|
| 157 |
+
'setup.py': DALS(
|
| 158 |
+
"""
|
| 159 |
+
# Some packages construct files on the fly, include them in the package,
|
| 160 |
+
# and immediately remove them after `setup()` (e.g. pybind11==2.9.1).
|
| 161 |
+
# Therefore, we cannot use `distutils.core.run_setup(..., stop_after=...)`
|
| 162 |
+
# to obtain a distribution object first, and then run the distutils
|
| 163 |
+
# commands later, because these files will be removed in the meantime.
|
| 164 |
+
|
| 165 |
+
with open('world.py', 'w', encoding="utf-8") as f:
|
| 166 |
+
f.write('x = 42')
|
| 167 |
+
|
| 168 |
+
try:
|
| 169 |
+
__import__('setuptools').setup(
|
| 170 |
+
name='foo',
|
| 171 |
+
version='0.0.0',
|
| 172 |
+
py_modules=['world'],
|
| 173 |
+
setup_requires=['six'],
|
| 174 |
+
)
|
| 175 |
+
finally:
|
| 176 |
+
# Some packages will clean temporary files
|
| 177 |
+
__import__('os').unlink('world.py')
|
| 178 |
+
"""
|
| 179 |
+
),
|
| 180 |
+
},
|
| 181 |
+
{ # setup.cfg only
|
| 182 |
+
'setup.cfg': DALS(
|
| 183 |
+
"""
|
| 184 |
+
[metadata]
|
| 185 |
+
name = foo
|
| 186 |
+
version = 0.0.0
|
| 187 |
+
|
| 188 |
+
[options]
|
| 189 |
+
py_modules=hello
|
| 190 |
+
setup_requires=six
|
| 191 |
+
"""
|
| 192 |
+
),
|
| 193 |
+
'hello.py': DALS(
|
| 194 |
+
"""
|
| 195 |
+
def run():
|
| 196 |
+
print('hello')
|
| 197 |
+
"""
|
| 198 |
+
),
|
| 199 |
+
},
|
| 200 |
+
{ # setup.cfg and setup.py
|
| 201 |
+
'setup.cfg': DALS(
|
| 202 |
+
"""
|
| 203 |
+
[metadata]
|
| 204 |
+
name = foo
|
| 205 |
+
version = 0.0.0
|
| 206 |
+
|
| 207 |
+
[options]
|
| 208 |
+
py_modules=hello
|
| 209 |
+
setup_requires=six
|
| 210 |
+
"""
|
| 211 |
+
),
|
| 212 |
+
'setup.py': "__import__('setuptools').setup()",
|
| 213 |
+
'hello.py': DALS(
|
| 214 |
+
"""
|
| 215 |
+
def run():
|
| 216 |
+
print('hello')
|
| 217 |
+
"""
|
| 218 |
+
),
|
| 219 |
+
},
|
| 220 |
+
]
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
class TestBuildMetaBackend:
|
| 224 |
+
backend_name = 'setuptools.build_meta'
|
| 225 |
+
|
| 226 |
+
def get_build_backend(self):
|
| 227 |
+
return BuildBackend(backend_name=self.backend_name)
|
| 228 |
+
|
| 229 |
+
@pytest.fixture(params=defns)
|
| 230 |
+
def build_backend(self, tmpdir, request):
|
| 231 |
+
path.build(request.param, prefix=str(tmpdir))
|
| 232 |
+
with tmpdir.as_cwd():
|
| 233 |
+
yield self.get_build_backend()
|
| 234 |
+
|
| 235 |
+
def test_get_requires_for_build_wheel(self, build_backend):
|
| 236 |
+
actual = build_backend.get_requires_for_build_wheel()
|
| 237 |
+
expected = ['six']
|
| 238 |
+
assert sorted(actual) == sorted(expected)
|
| 239 |
+
|
| 240 |
+
def test_get_requires_for_build_sdist(self, build_backend):
|
| 241 |
+
actual = build_backend.get_requires_for_build_sdist()
|
| 242 |
+
expected = ['six']
|
| 243 |
+
assert sorted(actual) == sorted(expected)
|
| 244 |
+
|
| 245 |
+
def test_build_wheel(self, build_backend):
|
| 246 |
+
dist_dir = os.path.abspath('pip-wheel')
|
| 247 |
+
os.makedirs(dist_dir)
|
| 248 |
+
wheel_name = build_backend.build_wheel(dist_dir)
|
| 249 |
+
|
| 250 |
+
wheel_file = os.path.join(dist_dir, wheel_name)
|
| 251 |
+
assert os.path.isfile(wheel_file)
|
| 252 |
+
|
| 253 |
+
# Temporary files should be removed
|
| 254 |
+
assert not os.path.isfile('world.py')
|
| 255 |
+
|
| 256 |
+
with ZipFile(wheel_file) as zipfile:
|
| 257 |
+
wheel_contents = set(zipfile.namelist())
|
| 258 |
+
|
| 259 |
+
# Each one of the examples have a single module
|
| 260 |
+
# that should be included in the distribution
|
| 261 |
+
python_scripts = (f for f in wheel_contents if f.endswith('.py'))
|
| 262 |
+
modules = [f for f in python_scripts if not f.endswith('setup.py')]
|
| 263 |
+
assert len(modules) == 1
|
| 264 |
+
|
| 265 |
+
@pytest.mark.parametrize('build_type', ('wheel', 'sdist'))
|
| 266 |
+
def test_build_with_existing_file_present(self, build_type, tmpdir_cwd):
|
| 267 |
+
# Building a sdist/wheel should still succeed if there's
|
| 268 |
+
# already a sdist/wheel in the destination directory.
|
| 269 |
+
files = {
|
| 270 |
+
'setup.py': "from setuptools import setup\nsetup()",
|
| 271 |
+
'VERSION': "0.0.1",
|
| 272 |
+
'setup.cfg': DALS(
|
| 273 |
+
"""
|
| 274 |
+
[metadata]
|
| 275 |
+
name = foo
|
| 276 |
+
version = file: VERSION
|
| 277 |
+
"""
|
| 278 |
+
),
|
| 279 |
+
'pyproject.toml': DALS(
|
| 280 |
+
"""
|
| 281 |
+
[build-system]
|
| 282 |
+
requires = ["setuptools", "wheel"]
|
| 283 |
+
build-backend = "setuptools.build_meta"
|
| 284 |
+
"""
|
| 285 |
+
),
|
| 286 |
+
}
|
| 287 |
+
|
| 288 |
+
path.build(files)
|
| 289 |
+
|
| 290 |
+
dist_dir = os.path.abspath('preexisting-' + build_type)
|
| 291 |
+
|
| 292 |
+
build_backend = self.get_build_backend()
|
| 293 |
+
build_method = getattr(build_backend, 'build_' + build_type)
|
| 294 |
+
|
| 295 |
+
# Build a first sdist/wheel.
|
| 296 |
+
# Note: this also check the destination directory is
|
| 297 |
+
# successfully created if it does not exist already.
|
| 298 |
+
first_result = build_method(dist_dir)
|
| 299 |
+
|
| 300 |
+
# Change version.
|
| 301 |
+
with open("VERSION", "wt", encoding="utf-8") as version_file:
|
| 302 |
+
version_file.write("0.0.2")
|
| 303 |
+
|
| 304 |
+
# Build a *second* sdist/wheel.
|
| 305 |
+
second_result = build_method(dist_dir)
|
| 306 |
+
|
| 307 |
+
assert os.path.isfile(os.path.join(dist_dir, first_result))
|
| 308 |
+
assert first_result != second_result
|
| 309 |
+
|
| 310 |
+
# And if rebuilding the exact same sdist/wheel?
|
| 311 |
+
open(os.path.join(dist_dir, second_result), 'wb').close()
|
| 312 |
+
third_result = build_method(dist_dir)
|
| 313 |
+
assert third_result == second_result
|
| 314 |
+
assert os.path.getsize(os.path.join(dist_dir, third_result)) > 0
|
| 315 |
+
|
| 316 |
+
@pytest.mark.parametrize("setup_script", [None, SETUP_SCRIPT_STUB])
|
| 317 |
+
def test_build_with_pyproject_config(self, tmpdir, setup_script):
|
| 318 |
+
files = {
|
| 319 |
+
'pyproject.toml': DALS(
|
| 320 |
+
"""
|
| 321 |
+
[build-system]
|
| 322 |
+
requires = ["setuptools", "wheel"]
|
| 323 |
+
build-backend = "setuptools.build_meta"
|
| 324 |
+
|
| 325 |
+
[project]
|
| 326 |
+
name = "foo"
|
| 327 |
+
license = {text = "MIT"}
|
| 328 |
+
description = "This is a Python package"
|
| 329 |
+
dynamic = ["version", "readme"]
|
| 330 |
+
classifiers = [
|
| 331 |
+
"Development Status :: 5 - Production/Stable",
|
| 332 |
+
"Intended Audience :: Developers"
|
| 333 |
+
]
|
| 334 |
+
urls = {Homepage = "http://github.com"}
|
| 335 |
+
dependencies = [
|
| 336 |
+
"appdirs",
|
| 337 |
+
]
|
| 338 |
+
|
| 339 |
+
[project.optional-dependencies]
|
| 340 |
+
all = [
|
| 341 |
+
"tomli>=1",
|
| 342 |
+
"pyscaffold>=4,<5",
|
| 343 |
+
'importlib; python_version == "2.6"',
|
| 344 |
+
]
|
| 345 |
+
|
| 346 |
+
[project.scripts]
|
| 347 |
+
foo = "foo.cli:main"
|
| 348 |
+
|
| 349 |
+
[tool.setuptools]
|
| 350 |
+
zip-safe = false
|
| 351 |
+
package-dir = {"" = "src"}
|
| 352 |
+
packages = {find = {where = ["src"]}}
|
| 353 |
+
license-files = ["LICENSE*"]
|
| 354 |
+
|
| 355 |
+
[tool.setuptools.dynamic]
|
| 356 |
+
version = {attr = "foo.__version__"}
|
| 357 |
+
readme = {file = "README.rst"}
|
| 358 |
+
|
| 359 |
+
[tool.distutils.sdist]
|
| 360 |
+
formats = "gztar"
|
| 361 |
+
"""
|
| 362 |
+
),
|
| 363 |
+
"MANIFEST.in": DALS(
|
| 364 |
+
"""
|
| 365 |
+
global-include *.py *.txt
|
| 366 |
+
global-exclude *.py[cod]
|
| 367 |
+
"""
|
| 368 |
+
),
|
| 369 |
+
"README.rst": "This is a ``README``",
|
| 370 |
+
"LICENSE.txt": "---- placeholder MIT license ----",
|
| 371 |
+
"src": {
|
| 372 |
+
"foo": {
|
| 373 |
+
"__init__.py": "__version__ = '0.1'",
|
| 374 |
+
"__init__.pyi": "__version__: str",
|
| 375 |
+
"cli.py": "def main(): print('hello world')",
|
| 376 |
+
"data.txt": "def main(): print('hello world')",
|
| 377 |
+
"py.typed": "",
|
| 378 |
+
}
|
| 379 |
+
},
|
| 380 |
+
}
|
| 381 |
+
if setup_script:
|
| 382 |
+
files["setup.py"] = setup_script
|
| 383 |
+
|
| 384 |
+
build_backend = self.get_build_backend()
|
| 385 |
+
with tmpdir.as_cwd():
|
| 386 |
+
path.build(files)
|
| 387 |
+
sdist_path = build_backend.build_sdist("temp")
|
| 388 |
+
wheel_file = build_backend.build_wheel("temp")
|
| 389 |
+
|
| 390 |
+
with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
|
| 391 |
+
sdist_contents = set(tar.getnames())
|
| 392 |
+
|
| 393 |
+
with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
|
| 394 |
+
wheel_contents = set(zipfile.namelist())
|
| 395 |
+
metadata = str(zipfile.read("foo-0.1.dist-info/METADATA"), "utf-8")
|
| 396 |
+
license = str(zipfile.read("foo-0.1.dist-info/LICENSE.txt"), "utf-8")
|
| 397 |
+
epoints = str(zipfile.read("foo-0.1.dist-info/entry_points.txt"), "utf-8")
|
| 398 |
+
|
| 399 |
+
assert sdist_contents - {"foo-0.1/setup.py"} == {
|
| 400 |
+
'foo-0.1',
|
| 401 |
+
'foo-0.1/LICENSE.txt',
|
| 402 |
+
'foo-0.1/MANIFEST.in',
|
| 403 |
+
'foo-0.1/PKG-INFO',
|
| 404 |
+
'foo-0.1/README.rst',
|
| 405 |
+
'foo-0.1/pyproject.toml',
|
| 406 |
+
'foo-0.1/setup.cfg',
|
| 407 |
+
'foo-0.1/src',
|
| 408 |
+
'foo-0.1/src/foo',
|
| 409 |
+
'foo-0.1/src/foo/__init__.py',
|
| 410 |
+
'foo-0.1/src/foo/__init__.pyi',
|
| 411 |
+
'foo-0.1/src/foo/cli.py',
|
| 412 |
+
'foo-0.1/src/foo/data.txt',
|
| 413 |
+
'foo-0.1/src/foo/py.typed',
|
| 414 |
+
'foo-0.1/src/foo.egg-info',
|
| 415 |
+
'foo-0.1/src/foo.egg-info/PKG-INFO',
|
| 416 |
+
'foo-0.1/src/foo.egg-info/SOURCES.txt',
|
| 417 |
+
'foo-0.1/src/foo.egg-info/dependency_links.txt',
|
| 418 |
+
'foo-0.1/src/foo.egg-info/entry_points.txt',
|
| 419 |
+
'foo-0.1/src/foo.egg-info/requires.txt',
|
| 420 |
+
'foo-0.1/src/foo.egg-info/top_level.txt',
|
| 421 |
+
'foo-0.1/src/foo.egg-info/not-zip-safe',
|
| 422 |
+
}
|
| 423 |
+
assert wheel_contents == {
|
| 424 |
+
"foo/__init__.py",
|
| 425 |
+
"foo/__init__.pyi", # include type information by default
|
| 426 |
+
"foo/cli.py",
|
| 427 |
+
"foo/data.txt", # include_package_data defaults to True
|
| 428 |
+
"foo/py.typed", # include type information by default
|
| 429 |
+
"foo-0.1.dist-info/LICENSE.txt",
|
| 430 |
+
"foo-0.1.dist-info/METADATA",
|
| 431 |
+
"foo-0.1.dist-info/WHEEL",
|
| 432 |
+
"foo-0.1.dist-info/entry_points.txt",
|
| 433 |
+
"foo-0.1.dist-info/top_level.txt",
|
| 434 |
+
"foo-0.1.dist-info/RECORD",
|
| 435 |
+
}
|
| 436 |
+
assert license == "---- placeholder MIT license ----"
|
| 437 |
+
|
| 438 |
+
for line in (
|
| 439 |
+
"Summary: This is a Python package",
|
| 440 |
+
"License: MIT",
|
| 441 |
+
"Classifier: Intended Audience :: Developers",
|
| 442 |
+
"Requires-Dist: appdirs",
|
| 443 |
+
"Requires-Dist: " + str(Requirement('tomli>=1 ; extra == "all"')),
|
| 444 |
+
"Requires-Dist: "
|
| 445 |
+
+ str(Requirement('importlib; python_version=="2.6" and extra =="all"')),
|
| 446 |
+
):
|
| 447 |
+
assert line in metadata, (line, metadata)
|
| 448 |
+
|
| 449 |
+
assert metadata.strip().endswith("This is a ``README``")
|
| 450 |
+
assert epoints.strip() == "[console_scripts]\nfoo = foo.cli:main"
|
| 451 |
+
|
| 452 |
+
def test_static_metadata_in_pyproject_config(self, tmpdir):
|
| 453 |
+
# Make sure static metadata in pyproject.toml is not overwritten by setup.py
|
| 454 |
+
# as required by PEP 621
|
| 455 |
+
files = {
|
| 456 |
+
'pyproject.toml': DALS(
|
| 457 |
+
"""
|
| 458 |
+
[build-system]
|
| 459 |
+
requires = ["setuptools", "wheel"]
|
| 460 |
+
build-backend = "setuptools.build_meta"
|
| 461 |
+
|
| 462 |
+
[project]
|
| 463 |
+
name = "foo"
|
| 464 |
+
description = "This is a Python package"
|
| 465 |
+
version = "42"
|
| 466 |
+
dependencies = ["six"]
|
| 467 |
+
"""
|
| 468 |
+
),
|
| 469 |
+
'hello.py': DALS(
|
| 470 |
+
"""
|
| 471 |
+
def run():
|
| 472 |
+
print('hello')
|
| 473 |
+
"""
|
| 474 |
+
),
|
| 475 |
+
'setup.py': DALS(
|
| 476 |
+
"""
|
| 477 |
+
__import__('setuptools').setup(
|
| 478 |
+
name='bar',
|
| 479 |
+
version='13',
|
| 480 |
+
)
|
| 481 |
+
"""
|
| 482 |
+
),
|
| 483 |
+
}
|
| 484 |
+
build_backend = self.get_build_backend()
|
| 485 |
+
with tmpdir.as_cwd():
|
| 486 |
+
path.build(files)
|
| 487 |
+
sdist_path = build_backend.build_sdist("temp")
|
| 488 |
+
wheel_file = build_backend.build_wheel("temp")
|
| 489 |
+
|
| 490 |
+
assert (tmpdir / "temp/foo-42.tar.gz").exists()
|
| 491 |
+
assert (tmpdir / "temp/foo-42-py3-none-any.whl").exists()
|
| 492 |
+
assert not (tmpdir / "temp/bar-13.tar.gz").exists()
|
| 493 |
+
assert not (tmpdir / "temp/bar-42.tar.gz").exists()
|
| 494 |
+
assert not (tmpdir / "temp/foo-13.tar.gz").exists()
|
| 495 |
+
assert not (tmpdir / "temp/bar-13-py3-none-any.whl").exists()
|
| 496 |
+
assert not (tmpdir / "temp/bar-42-py3-none-any.whl").exists()
|
| 497 |
+
assert not (tmpdir / "temp/foo-13-py3-none-any.whl").exists()
|
| 498 |
+
|
| 499 |
+
with tarfile.open(os.path.join(tmpdir, "temp", sdist_path)) as tar:
|
| 500 |
+
pkg_info = str(tar.extractfile('foo-42/PKG-INFO').read(), "utf-8")
|
| 501 |
+
members = tar.getnames()
|
| 502 |
+
assert "bar-13/PKG-INFO" not in members
|
| 503 |
+
|
| 504 |
+
with ZipFile(os.path.join(tmpdir, "temp", wheel_file)) as zipfile:
|
| 505 |
+
metadata = str(zipfile.read("foo-42.dist-info/METADATA"), "utf-8")
|
| 506 |
+
members = zipfile.namelist()
|
| 507 |
+
assert "bar-13.dist-info/METADATA" not in members
|
| 508 |
+
|
| 509 |
+
for file in pkg_info, metadata:
|
| 510 |
+
for line in ("Name: foo", "Version: 42"):
|
| 511 |
+
assert line in file
|
| 512 |
+
for line in ("Name: bar", "Version: 13"):
|
| 513 |
+
assert line not in file
|
| 514 |
+
|
| 515 |
+
def test_build_sdist(self, build_backend):
|
| 516 |
+
dist_dir = os.path.abspath('pip-sdist')
|
| 517 |
+
os.makedirs(dist_dir)
|
| 518 |
+
sdist_name = build_backend.build_sdist(dist_dir)
|
| 519 |
+
|
| 520 |
+
assert os.path.isfile(os.path.join(dist_dir, sdist_name))
|
| 521 |
+
|
| 522 |
+
def test_prepare_metadata_for_build_wheel(self, build_backend):
|
| 523 |
+
dist_dir = os.path.abspath('pip-dist-info')
|
| 524 |
+
os.makedirs(dist_dir)
|
| 525 |
+
|
| 526 |
+
dist_info = build_backend.prepare_metadata_for_build_wheel(dist_dir)
|
| 527 |
+
|
| 528 |
+
assert os.path.isfile(os.path.join(dist_dir, dist_info, 'METADATA'))
|
| 529 |
+
|
| 530 |
+
def test_prepare_metadata_inplace(self, build_backend):
|
| 531 |
+
"""
|
| 532 |
+
Some users might pass metadata_directory pre-populated with `.tox` or `.venv`.
|
| 533 |
+
See issue #3523.
|
| 534 |
+
"""
|
| 535 |
+
for pre_existing in [
|
| 536 |
+
".tox/python/lib/python3.10/site-packages/attrs-22.1.0.dist-info",
|
| 537 |
+
".tox/python/lib/python3.10/site-packages/autocommand-2.2.1.dist-info",
|
| 538 |
+
".nox/python/lib/python3.10/site-packages/build-0.8.0.dist-info",
|
| 539 |
+
".venv/python3.10/site-packages/click-8.1.3.dist-info",
|
| 540 |
+
"venv/python3.10/site-packages/distlib-0.3.5.dist-info",
|
| 541 |
+
"env/python3.10/site-packages/docutils-0.19.dist-info",
|
| 542 |
+
]:
|
| 543 |
+
os.makedirs(pre_existing, exist_ok=True)
|
| 544 |
+
dist_info = build_backend.prepare_metadata_for_build_wheel(".")
|
| 545 |
+
assert os.path.isfile(os.path.join(dist_info, 'METADATA'))
|
| 546 |
+
|
| 547 |
+
def test_build_sdist_explicit_dist(self, build_backend):
|
| 548 |
+
# explicitly specifying the dist folder should work
|
| 549 |
+
# the folder sdist_directory and the ``--dist-dir`` can be the same
|
| 550 |
+
dist_dir = os.path.abspath('dist')
|
| 551 |
+
sdist_name = build_backend.build_sdist(dist_dir)
|
| 552 |
+
assert os.path.isfile(os.path.join(dist_dir, sdist_name))
|
| 553 |
+
|
| 554 |
+
def test_build_sdist_version_change(self, build_backend):
|
| 555 |
+
sdist_into_directory = os.path.abspath("out_sdist")
|
| 556 |
+
os.makedirs(sdist_into_directory)
|
| 557 |
+
|
| 558 |
+
sdist_name = build_backend.build_sdist(sdist_into_directory)
|
| 559 |
+
assert os.path.isfile(os.path.join(sdist_into_directory, sdist_name))
|
| 560 |
+
|
| 561 |
+
# if the setup.py changes subsequent call of the build meta
|
| 562 |
+
# should still succeed, given the
|
| 563 |
+
# sdist_directory the frontend specifies is empty
|
| 564 |
+
setup_loc = os.path.abspath("setup.py")
|
| 565 |
+
if not os.path.exists(setup_loc):
|
| 566 |
+
setup_loc = os.path.abspath("setup.cfg")
|
| 567 |
+
|
| 568 |
+
with open(setup_loc, 'rt', encoding="utf-8") as file_handler:
|
| 569 |
+
content = file_handler.read()
|
| 570 |
+
with open(setup_loc, 'wt', encoding="utf-8") as file_handler:
|
| 571 |
+
file_handler.write(content.replace("version='0.0.0'", "version='0.0.1'"))
|
| 572 |
+
|
| 573 |
+
shutil.rmtree(sdist_into_directory)
|
| 574 |
+
os.makedirs(sdist_into_directory)
|
| 575 |
+
|
| 576 |
+
sdist_name = build_backend.build_sdist("out_sdist")
|
| 577 |
+
assert os.path.isfile(os.path.join(os.path.abspath("out_sdist"), sdist_name))
|
| 578 |
+
|
| 579 |
+
def test_build_sdist_pyproject_toml_exists(self, tmpdir_cwd):
|
| 580 |
+
files = {
|
| 581 |
+
'setup.py': DALS(
|
| 582 |
+
"""
|
| 583 |
+
__import__('setuptools').setup(
|
| 584 |
+
name='foo',
|
| 585 |
+
version='0.0.0',
|
| 586 |
+
py_modules=['hello']
|
| 587 |
+
)"""
|
| 588 |
+
),
|
| 589 |
+
'hello.py': '',
|
| 590 |
+
'pyproject.toml': DALS(
|
| 591 |
+
"""
|
| 592 |
+
[build-system]
|
| 593 |
+
requires = ["setuptools", "wheel"]
|
| 594 |
+
build-backend = "setuptools.build_meta"
|
| 595 |
+
"""
|
| 596 |
+
),
|
| 597 |
+
}
|
| 598 |
+
path.build(files)
|
| 599 |
+
build_backend = self.get_build_backend()
|
| 600 |
+
targz_path = build_backend.build_sdist("temp")
|
| 601 |
+
with tarfile.open(os.path.join("temp", targz_path)) as tar:
|
| 602 |
+
assert any('pyproject.toml' in name for name in tar.getnames())
|
| 603 |
+
|
| 604 |
+
def test_build_sdist_setup_py_exists(self, tmpdir_cwd):
|
| 605 |
+
# If build_sdist is called from a script other than setup.py,
|
| 606 |
+
# ensure setup.py is included
|
| 607 |
+
path.build(defns[0])
|
| 608 |
+
|
| 609 |
+
build_backend = self.get_build_backend()
|
| 610 |
+
targz_path = build_backend.build_sdist("temp")
|
| 611 |
+
with tarfile.open(os.path.join("temp", targz_path)) as tar:
|
| 612 |
+
assert any('setup.py' in name for name in tar.getnames())
|
| 613 |
+
|
| 614 |
+
def test_build_sdist_setup_py_manifest_excluded(self, tmpdir_cwd):
|
| 615 |
+
# Ensure that MANIFEST.in can exclude setup.py
|
| 616 |
+
files = {
|
| 617 |
+
'setup.py': DALS(
|
| 618 |
+
"""
|
| 619 |
+
__import__('setuptools').setup(
|
| 620 |
+
name='foo',
|
| 621 |
+
version='0.0.0',
|
| 622 |
+
py_modules=['hello']
|
| 623 |
+
)"""
|
| 624 |
+
),
|
| 625 |
+
'hello.py': '',
|
| 626 |
+
'MANIFEST.in': DALS(
|
| 627 |
+
"""
|
| 628 |
+
exclude setup.py
|
| 629 |
+
"""
|
| 630 |
+
),
|
| 631 |
+
}
|
| 632 |
+
|
| 633 |
+
path.build(files)
|
| 634 |
+
|
| 635 |
+
build_backend = self.get_build_backend()
|
| 636 |
+
targz_path = build_backend.build_sdist("temp")
|
| 637 |
+
with tarfile.open(os.path.join("temp", targz_path)) as tar:
|
| 638 |
+
assert not any('setup.py' in name for name in tar.getnames())
|
| 639 |
+
|
| 640 |
+
def test_build_sdist_builds_targz_even_if_zip_indicated(self, tmpdir_cwd):
|
| 641 |
+
files = {
|
| 642 |
+
'setup.py': DALS(
|
| 643 |
+
"""
|
| 644 |
+
__import__('setuptools').setup(
|
| 645 |
+
name='foo',
|
| 646 |
+
version='0.0.0',
|
| 647 |
+
py_modules=['hello']
|
| 648 |
+
)"""
|
| 649 |
+
),
|
| 650 |
+
'hello.py': '',
|
| 651 |
+
'setup.cfg': DALS(
|
| 652 |
+
"""
|
| 653 |
+
[sdist]
|
| 654 |
+
formats=zip
|
| 655 |
+
"""
|
| 656 |
+
),
|
| 657 |
+
}
|
| 658 |
+
|
| 659 |
+
path.build(files)
|
| 660 |
+
|
| 661 |
+
build_backend = self.get_build_backend()
|
| 662 |
+
build_backend.build_sdist("temp")
|
| 663 |
+
|
| 664 |
+
_relative_path_import_files = {
|
| 665 |
+
'setup.py': DALS(
|
| 666 |
+
"""
|
| 667 |
+
__import__('setuptools').setup(
|
| 668 |
+
name='foo',
|
| 669 |
+
version=__import__('hello').__version__,
|
| 670 |
+
py_modules=['hello']
|
| 671 |
+
)"""
|
| 672 |
+
),
|
| 673 |
+
'hello.py': '__version__ = "0.0.0"',
|
| 674 |
+
'setup.cfg': DALS(
|
| 675 |
+
"""
|
| 676 |
+
[sdist]
|
| 677 |
+
formats=zip
|
| 678 |
+
"""
|
| 679 |
+
),
|
| 680 |
+
}
|
| 681 |
+
|
| 682 |
+
def test_build_sdist_relative_path_import(self, tmpdir_cwd):
|
| 683 |
+
path.build(self._relative_path_import_files)
|
| 684 |
+
build_backend = self.get_build_backend()
|
| 685 |
+
with pytest.raises(ImportError, match="^No module named 'hello'$"):
|
| 686 |
+
build_backend.build_sdist("temp")
|
| 687 |
+
|
| 688 |
+
_simple_pyproject_example = {
|
| 689 |
+
"pyproject.toml": DALS(
|
| 690 |
+
"""
|
| 691 |
+
[project]
|
| 692 |
+
name = "proj"
|
| 693 |
+
version = "42"
|
| 694 |
+
"""
|
| 695 |
+
),
|
| 696 |
+
"src": {"proj": {"__init__.py": ""}},
|
| 697 |
+
}
|
| 698 |
+
|
| 699 |
+
def _assert_link_tree(self, parent_dir):
|
| 700 |
+
"""All files in the directory should be either links or hard links"""
|
| 701 |
+
files = list(Path(parent_dir).glob("**/*"))
|
| 702 |
+
assert files # Should not be empty
|
| 703 |
+
for file in files:
|
| 704 |
+
assert file.is_symlink() or os.stat(file).st_nlink > 0
|
| 705 |
+
|
| 706 |
+
def test_editable_without_config_settings(self, tmpdir_cwd):
|
| 707 |
+
"""
|
| 708 |
+
Sanity check to ensure tests with --mode=strict are different from the ones
|
| 709 |
+
without --mode.
|
| 710 |
+
|
| 711 |
+
--mode=strict should create a local directory with a package tree.
|
| 712 |
+
The directory should not get created otherwise.
|
| 713 |
+
"""
|
| 714 |
+
path.build(self._simple_pyproject_example)
|
| 715 |
+
build_backend = self.get_build_backend()
|
| 716 |
+
assert not Path("build").exists()
|
| 717 |
+
build_backend.build_editable("temp")
|
| 718 |
+
assert not Path("build").exists()
|
| 719 |
+
|
| 720 |
+
def test_build_wheel_inplace(self, tmpdir_cwd):
|
| 721 |
+
config_settings = {"--build-option": ["build_ext", "--inplace"]}
|
| 722 |
+
path.build(self._simple_pyproject_example)
|
| 723 |
+
build_backend = self.get_build_backend()
|
| 724 |
+
assert not Path("build").exists()
|
| 725 |
+
Path("build").mkdir()
|
| 726 |
+
build_backend.prepare_metadata_for_build_wheel("build", config_settings)
|
| 727 |
+
build_backend.build_wheel("build", config_settings)
|
| 728 |
+
assert Path("build/proj-42-py3-none-any.whl").exists()
|
| 729 |
+
|
| 730 |
+
@pytest.mark.parametrize("config_settings", [{"editable-mode": "strict"}])
|
| 731 |
+
def test_editable_with_config_settings(self, tmpdir_cwd, config_settings):
|
| 732 |
+
path.build({**self._simple_pyproject_example, '_meta': {}})
|
| 733 |
+
assert not Path("build").exists()
|
| 734 |
+
build_backend = self.get_build_backend()
|
| 735 |
+
build_backend.prepare_metadata_for_build_editable("_meta", config_settings)
|
| 736 |
+
build_backend.build_editable("temp", config_settings, "_meta")
|
| 737 |
+
self._assert_link_tree(next(Path("build").glob("__editable__.*")))
|
| 738 |
+
|
| 739 |
+
@pytest.mark.parametrize(
|
| 740 |
+
("setup_literal", "requirements"),
|
| 741 |
+
[
|
| 742 |
+
("'foo'", ['foo']),
|
| 743 |
+
("['foo']", ['foo']),
|
| 744 |
+
(r"'foo\n'", ['foo']),
|
| 745 |
+
(r"'foo\n\n'", ['foo']),
|
| 746 |
+
("['foo', 'bar']", ['foo', 'bar']),
|
| 747 |
+
(r"'# Has a comment line\nfoo'", ['foo']),
|
| 748 |
+
(r"'foo # Has an inline comment'", ['foo']),
|
| 749 |
+
(r"'foo \\\n >=3.0'", ['foo>=3.0']),
|
| 750 |
+
(r"'foo\nbar'", ['foo', 'bar']),
|
| 751 |
+
(r"'foo\nbar\n'", ['foo', 'bar']),
|
| 752 |
+
(r"['foo\n', 'bar\n']", ['foo', 'bar']),
|
| 753 |
+
],
|
| 754 |
+
)
|
| 755 |
+
@pytest.mark.parametrize('use_wheel', [True, False])
|
| 756 |
+
def test_setup_requires(self, setup_literal, requirements, use_wheel, tmpdir_cwd):
|
| 757 |
+
files = {
|
| 758 |
+
'setup.py': DALS(
|
| 759 |
+
"""
|
| 760 |
+
from setuptools import setup
|
| 761 |
+
|
| 762 |
+
setup(
|
| 763 |
+
name="qux",
|
| 764 |
+
version="0.0.0",
|
| 765 |
+
py_modules=["hello"],
|
| 766 |
+
setup_requires={setup_literal},
|
| 767 |
+
)
|
| 768 |
+
"""
|
| 769 |
+
).format(setup_literal=setup_literal),
|
| 770 |
+
'hello.py': DALS(
|
| 771 |
+
"""
|
| 772 |
+
def run():
|
| 773 |
+
print('hello')
|
| 774 |
+
"""
|
| 775 |
+
),
|
| 776 |
+
}
|
| 777 |
+
|
| 778 |
+
path.build(files)
|
| 779 |
+
|
| 780 |
+
build_backend = self.get_build_backend()
|
| 781 |
+
|
| 782 |
+
if use_wheel:
|
| 783 |
+
get_requires = build_backend.get_requires_for_build_wheel
|
| 784 |
+
else:
|
| 785 |
+
get_requires = build_backend.get_requires_for_build_sdist
|
| 786 |
+
|
| 787 |
+
# Ensure that the build requirements are properly parsed
|
| 788 |
+
expected = sorted(requirements)
|
| 789 |
+
actual = get_requires()
|
| 790 |
+
|
| 791 |
+
assert expected == sorted(actual)
|
| 792 |
+
|
| 793 |
+
def test_setup_requires_with_auto_discovery(self, tmpdir_cwd):
|
| 794 |
+
# Make sure patches introduced to retrieve setup_requires don't accidentally
|
| 795 |
+
# activate auto-discovery and cause problems due to the incomplete set of
|
| 796 |
+
# attributes passed to MinimalDistribution
|
| 797 |
+
files = {
|
| 798 |
+
'pyproject.toml': DALS(
|
| 799 |
+
"""
|
| 800 |
+
[project]
|
| 801 |
+
name = "proj"
|
| 802 |
+
version = "42"
|
| 803 |
+
"""
|
| 804 |
+
),
|
| 805 |
+
"setup.py": DALS(
|
| 806 |
+
"""
|
| 807 |
+
__import__('setuptools').setup(
|
| 808 |
+
setup_requires=["foo"],
|
| 809 |
+
py_modules = ["hello", "world"]
|
| 810 |
+
)
|
| 811 |
+
"""
|
| 812 |
+
),
|
| 813 |
+
'hello.py': "'hello'",
|
| 814 |
+
'world.py': "'world'",
|
| 815 |
+
}
|
| 816 |
+
path.build(files)
|
| 817 |
+
build_backend = self.get_build_backend()
|
| 818 |
+
setup_requires = build_backend.get_requires_for_build_wheel()
|
| 819 |
+
assert setup_requires == ["foo"]
|
| 820 |
+
|
| 821 |
+
def test_dont_install_setup_requires(self, tmpdir_cwd):
|
| 822 |
+
files = {
|
| 823 |
+
'setup.py': DALS(
|
| 824 |
+
"""
|
| 825 |
+
from setuptools import setup
|
| 826 |
+
|
| 827 |
+
setup(
|
| 828 |
+
name="qux",
|
| 829 |
+
version="0.0.0",
|
| 830 |
+
py_modules=["hello"],
|
| 831 |
+
setup_requires=["does-not-exist >99"],
|
| 832 |
+
)
|
| 833 |
+
"""
|
| 834 |
+
),
|
| 835 |
+
'hello.py': DALS(
|
| 836 |
+
"""
|
| 837 |
+
def run():
|
| 838 |
+
print('hello')
|
| 839 |
+
"""
|
| 840 |
+
),
|
| 841 |
+
}
|
| 842 |
+
|
| 843 |
+
path.build(files)
|
| 844 |
+
|
| 845 |
+
build_backend = self.get_build_backend()
|
| 846 |
+
|
| 847 |
+
dist_dir = os.path.abspath('pip-dist-info')
|
| 848 |
+
os.makedirs(dist_dir)
|
| 849 |
+
|
| 850 |
+
# does-not-exist can't be satisfied, so if it attempts to install
|
| 851 |
+
# setup_requires, it will fail.
|
| 852 |
+
build_backend.prepare_metadata_for_build_wheel(dist_dir)
|
| 853 |
+
|
| 854 |
+
_sys_argv_0_passthrough = {
|
| 855 |
+
'setup.py': DALS(
|
| 856 |
+
"""
|
| 857 |
+
import os
|
| 858 |
+
import sys
|
| 859 |
+
|
| 860 |
+
__import__('setuptools').setup(
|
| 861 |
+
name='foo',
|
| 862 |
+
version='0.0.0',
|
| 863 |
+
)
|
| 864 |
+
|
| 865 |
+
sys_argv = os.path.abspath(sys.argv[0])
|
| 866 |
+
file_path = os.path.abspath('setup.py')
|
| 867 |
+
assert sys_argv == file_path
|
| 868 |
+
"""
|
| 869 |
+
)
|
| 870 |
+
}
|
| 871 |
+
|
| 872 |
+
def test_sys_argv_passthrough(self, tmpdir_cwd):
|
| 873 |
+
path.build(self._sys_argv_0_passthrough)
|
| 874 |
+
build_backend = self.get_build_backend()
|
| 875 |
+
with pytest.raises(AssertionError):
|
| 876 |
+
build_backend.build_sdist("temp")
|
| 877 |
+
|
| 878 |
+
_setup_py_file_abspath = {
|
| 879 |
+
'setup.py': DALS(
|
| 880 |
+
"""
|
| 881 |
+
import os
|
| 882 |
+
assert os.path.isabs(__file__)
|
| 883 |
+
__import__('setuptools').setup(
|
| 884 |
+
name='foo',
|
| 885 |
+
version='0.0.0',
|
| 886 |
+
py_modules=['hello'],
|
| 887 |
+
setup_requires=['six'],
|
| 888 |
+
)
|
| 889 |
+
"""
|
| 890 |
+
)
|
| 891 |
+
}
|
| 892 |
+
|
| 893 |
+
def test_setup_py_file_abspath(self, tmpdir_cwd):
|
| 894 |
+
path.build(self._setup_py_file_abspath)
|
| 895 |
+
build_backend = self.get_build_backend()
|
| 896 |
+
build_backend.build_sdist("temp")
|
| 897 |
+
|
| 898 |
+
@pytest.mark.parametrize('build_hook', ('build_sdist', 'build_wheel'))
|
| 899 |
+
def test_build_with_empty_setuppy(self, build_backend, build_hook):
|
| 900 |
+
files = {'setup.py': ''}
|
| 901 |
+
path.build(files)
|
| 902 |
+
|
| 903 |
+
msg = re.escape('No distribution was found.')
|
| 904 |
+
with pytest.raises(ValueError, match=msg):
|
| 905 |
+
getattr(build_backend, build_hook)("temp")
|
| 906 |
+
|
| 907 |
+
|
| 908 |
+
class TestBuildMetaLegacyBackend(TestBuildMetaBackend):
|
| 909 |
+
backend_name = 'setuptools.build_meta:__legacy__'
|
| 910 |
+
|
| 911 |
+
# build_meta_legacy-specific tests
|
| 912 |
+
def test_build_sdist_relative_path_import(self, tmpdir_cwd):
|
| 913 |
+
# This must fail in build_meta, but must pass in build_meta_legacy
|
| 914 |
+
path.build(self._relative_path_import_files)
|
| 915 |
+
|
| 916 |
+
build_backend = self.get_build_backend()
|
| 917 |
+
build_backend.build_sdist("temp")
|
| 918 |
+
|
| 919 |
+
def test_sys_argv_passthrough(self, tmpdir_cwd):
|
| 920 |
+
path.build(self._sys_argv_0_passthrough)
|
| 921 |
+
|
| 922 |
+
build_backend = self.get_build_backend()
|
| 923 |
+
build_backend.build_sdist("temp")
|
| 924 |
+
|
| 925 |
+
|
| 926 |
+
def test_legacy_editable_install(venv, tmpdir, tmpdir_cwd):
|
| 927 |
+
pyproject = """
|
| 928 |
+
[build-system]
|
| 929 |
+
requires = ["setuptools"]
|
| 930 |
+
build-backend = "setuptools.build_meta"
|
| 931 |
+
[project]
|
| 932 |
+
name = "myproj"
|
| 933 |
+
version = "42"
|
| 934 |
+
"""
|
| 935 |
+
path.build({"pyproject.toml": DALS(pyproject), "mymod.py": ""})
|
| 936 |
+
|
| 937 |
+
# First: sanity check
|
| 938 |
+
cmd = ["pip", "install", "--no-build-isolation", "-e", "."]
|
| 939 |
+
output = venv.run(cmd, cwd=tmpdir).lower()
|
| 940 |
+
assert "running setup.py develop for myproj" not in output
|
| 941 |
+
assert "created wheel for myproj" in output
|
| 942 |
+
|
| 943 |
+
# Then: real test
|
| 944 |
+
env = {**os.environ, "SETUPTOOLS_ENABLE_FEATURES": "legacy-editable"}
|
| 945 |
+
cmd = ["pip", "install", "--no-build-isolation", "-e", "."]
|
| 946 |
+
output = venv.run(cmd, cwd=tmpdir, env=env).lower()
|
| 947 |
+
assert "running setup.py develop for myproj" in output
|
| 948 |
+
|
| 949 |
+
|
| 950 |
+
@pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning")
|
| 951 |
+
def test_sys_exit_0_in_setuppy(monkeypatch, tmp_path):
|
| 952 |
+
"""Setuptools should be resilient to setup.py with ``sys.exit(0)`` (#3973)."""
|
| 953 |
+
monkeypatch.chdir(tmp_path)
|
| 954 |
+
setuppy = """
|
| 955 |
+
import sys, setuptools
|
| 956 |
+
setuptools.setup(name='foo', version='0.0.0')
|
| 957 |
+
sys.exit(0)
|
| 958 |
+
"""
|
| 959 |
+
(tmp_path / "setup.py").write_text(DALS(setuppy), encoding="utf-8")
|
| 960 |
+
backend = BuildBackend(backend_name="setuptools.build_meta")
|
| 961 |
+
assert backend.get_requires_for_build_wheel() == []
|
| 962 |
+
|
| 963 |
+
|
| 964 |
+
def test_system_exit_in_setuppy(monkeypatch, tmp_path):
|
| 965 |
+
monkeypatch.chdir(tmp_path)
|
| 966 |
+
setuppy = "import sys; sys.exit('some error')"
|
| 967 |
+
(tmp_path / "setup.py").write_text(setuppy, encoding="utf-8")
|
| 968 |
+
with pytest.raises(SystemExit, match="some error"):
|
| 969 |
+
backend = BuildBackend(backend_name="setuptools.build_meta")
|
| 970 |
+
backend.get_requires_for_build_wheel()
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_build_py.py
ADDED
|
@@ -0,0 +1,480 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import shutil
|
| 3 |
+
import stat
|
| 4 |
+
import warnings
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
from unittest.mock import Mock
|
| 7 |
+
|
| 8 |
+
import jaraco.path
|
| 9 |
+
import pytest
|
| 10 |
+
|
| 11 |
+
from setuptools import SetuptoolsDeprecationWarning
|
| 12 |
+
from setuptools.dist import Distribution
|
| 13 |
+
|
| 14 |
+
from .textwrap import DALS
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def test_directories_in_package_data_glob(tmpdir_cwd):
|
| 18 |
+
"""
|
| 19 |
+
Directories matching the glob in package_data should
|
| 20 |
+
not be included in the package data.
|
| 21 |
+
|
| 22 |
+
Regression test for #261.
|
| 23 |
+
"""
|
| 24 |
+
dist = Distribution(
|
| 25 |
+
dict(
|
| 26 |
+
script_name='setup.py',
|
| 27 |
+
script_args=['build_py'],
|
| 28 |
+
packages=[''],
|
| 29 |
+
package_data={'': ['path/*']},
|
| 30 |
+
)
|
| 31 |
+
)
|
| 32 |
+
os.makedirs('path/subpath')
|
| 33 |
+
dist.parse_command_line()
|
| 34 |
+
dist.run_commands()
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def test_recursive_in_package_data_glob(tmpdir_cwd):
|
| 38 |
+
"""
|
| 39 |
+
Files matching recursive globs (**) in package_data should
|
| 40 |
+
be included in the package data.
|
| 41 |
+
|
| 42 |
+
#1806
|
| 43 |
+
"""
|
| 44 |
+
dist = Distribution(
|
| 45 |
+
dict(
|
| 46 |
+
script_name='setup.py',
|
| 47 |
+
script_args=['build_py'],
|
| 48 |
+
packages=[''],
|
| 49 |
+
package_data={'': ['path/**/data']},
|
| 50 |
+
)
|
| 51 |
+
)
|
| 52 |
+
os.makedirs('path/subpath/subsubpath')
|
| 53 |
+
open('path/subpath/subsubpath/data', 'wb').close()
|
| 54 |
+
|
| 55 |
+
dist.parse_command_line()
|
| 56 |
+
dist.run_commands()
|
| 57 |
+
|
| 58 |
+
assert stat.S_ISREG(os.stat('build/lib/path/subpath/subsubpath/data').st_mode), (
|
| 59 |
+
"File is not included"
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def test_read_only(tmpdir_cwd):
|
| 64 |
+
"""
|
| 65 |
+
Ensure read-only flag is not preserved in copy
|
| 66 |
+
for package modules and package data, as that
|
| 67 |
+
causes problems with deleting read-only files on
|
| 68 |
+
Windows.
|
| 69 |
+
|
| 70 |
+
#1451
|
| 71 |
+
"""
|
| 72 |
+
dist = Distribution(
|
| 73 |
+
dict(
|
| 74 |
+
script_name='setup.py',
|
| 75 |
+
script_args=['build_py'],
|
| 76 |
+
packages=['pkg'],
|
| 77 |
+
package_data={'pkg': ['data.dat']},
|
| 78 |
+
)
|
| 79 |
+
)
|
| 80 |
+
os.makedirs('pkg')
|
| 81 |
+
open('pkg/__init__.py', 'wb').close()
|
| 82 |
+
open('pkg/data.dat', 'wb').close()
|
| 83 |
+
os.chmod('pkg/__init__.py', stat.S_IREAD)
|
| 84 |
+
os.chmod('pkg/data.dat', stat.S_IREAD)
|
| 85 |
+
dist.parse_command_line()
|
| 86 |
+
dist.run_commands()
|
| 87 |
+
shutil.rmtree('build')
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
@pytest.mark.xfail(
|
| 91 |
+
'platform.system() == "Windows"',
|
| 92 |
+
reason="On Windows, files do not have executable bits",
|
| 93 |
+
raises=AssertionError,
|
| 94 |
+
strict=True,
|
| 95 |
+
)
|
| 96 |
+
def test_executable_data(tmpdir_cwd):
|
| 97 |
+
"""
|
| 98 |
+
Ensure executable bit is preserved in copy for
|
| 99 |
+
package data, as users rely on it for scripts.
|
| 100 |
+
|
| 101 |
+
#2041
|
| 102 |
+
"""
|
| 103 |
+
dist = Distribution(
|
| 104 |
+
dict(
|
| 105 |
+
script_name='setup.py',
|
| 106 |
+
script_args=['build_py'],
|
| 107 |
+
packages=['pkg'],
|
| 108 |
+
package_data={'pkg': ['run-me']},
|
| 109 |
+
)
|
| 110 |
+
)
|
| 111 |
+
os.makedirs('pkg')
|
| 112 |
+
open('pkg/__init__.py', 'wb').close()
|
| 113 |
+
open('pkg/run-me', 'wb').close()
|
| 114 |
+
os.chmod('pkg/run-me', 0o700)
|
| 115 |
+
|
| 116 |
+
dist.parse_command_line()
|
| 117 |
+
dist.run_commands()
|
| 118 |
+
|
| 119 |
+
assert os.stat('build/lib/pkg/run-me').st_mode & stat.S_IEXEC, (
|
| 120 |
+
"Script is not executable"
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
EXAMPLE_WITH_MANIFEST = {
|
| 125 |
+
"setup.cfg": DALS(
|
| 126 |
+
"""
|
| 127 |
+
[metadata]
|
| 128 |
+
name = mypkg
|
| 129 |
+
version = 42
|
| 130 |
+
|
| 131 |
+
[options]
|
| 132 |
+
include_package_data = True
|
| 133 |
+
packages = find:
|
| 134 |
+
|
| 135 |
+
[options.packages.find]
|
| 136 |
+
exclude = *.tests*
|
| 137 |
+
"""
|
| 138 |
+
),
|
| 139 |
+
"mypkg": {
|
| 140 |
+
"__init__.py": "",
|
| 141 |
+
"resource_file.txt": "",
|
| 142 |
+
"tests": {
|
| 143 |
+
"__init__.py": "",
|
| 144 |
+
"test_mypkg.py": "",
|
| 145 |
+
"test_file.txt": "",
|
| 146 |
+
},
|
| 147 |
+
},
|
| 148 |
+
"MANIFEST.in": DALS(
|
| 149 |
+
"""
|
| 150 |
+
global-include *.py *.txt
|
| 151 |
+
global-exclude *.py[cod]
|
| 152 |
+
prune dist
|
| 153 |
+
prune build
|
| 154 |
+
prune *.egg-info
|
| 155 |
+
"""
|
| 156 |
+
),
|
| 157 |
+
}
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
def test_excluded_subpackages(tmpdir_cwd):
|
| 161 |
+
jaraco.path.build(EXAMPLE_WITH_MANIFEST)
|
| 162 |
+
dist = Distribution({"script_name": "%PEP 517%"})
|
| 163 |
+
dist.parse_config_files()
|
| 164 |
+
|
| 165 |
+
build_py = dist.get_command_obj("build_py")
|
| 166 |
+
|
| 167 |
+
msg = r"Python recognizes 'mypkg\.tests' as an importable package"
|
| 168 |
+
with pytest.warns(SetuptoolsDeprecationWarning, match=msg):
|
| 169 |
+
# TODO: To fix #3260 we need some transition period to deprecate the
|
| 170 |
+
# existing behavior of `include_package_data`. After the transition, we
|
| 171 |
+
# should remove the warning and fix the behaviour.
|
| 172 |
+
|
| 173 |
+
if os.getenv("SETUPTOOLS_USE_DISTUTILS") == "stdlib":
|
| 174 |
+
# pytest.warns reset the warning filter temporarily
|
| 175 |
+
# https://github.com/pytest-dev/pytest/issues/4011#issuecomment-423494810
|
| 176 |
+
warnings.filterwarnings(
|
| 177 |
+
"ignore",
|
| 178 |
+
"'encoding' argument not specified",
|
| 179 |
+
module="distutils.text_file",
|
| 180 |
+
# This warning is already fixed in pypa/distutils but not in stdlib
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
build_py.finalize_options()
|
| 184 |
+
build_py.run()
|
| 185 |
+
|
| 186 |
+
build_dir = Path(dist.get_command_obj("build_py").build_lib)
|
| 187 |
+
assert (build_dir / "mypkg/__init__.py").exists()
|
| 188 |
+
assert (build_dir / "mypkg/resource_file.txt").exists()
|
| 189 |
+
|
| 190 |
+
# Setuptools is configured to ignore `mypkg.tests`, therefore the following
|
| 191 |
+
# files/dirs should not be included in the distribution.
|
| 192 |
+
for f in [
|
| 193 |
+
"mypkg/tests/__init__.py",
|
| 194 |
+
"mypkg/tests/test_mypkg.py",
|
| 195 |
+
"mypkg/tests/test_file.txt",
|
| 196 |
+
"mypkg/tests",
|
| 197 |
+
]:
|
| 198 |
+
with pytest.raises(AssertionError):
|
| 199 |
+
# TODO: Enforce the following assertion once #3260 is fixed
|
| 200 |
+
# (remove context manager and the following xfail).
|
| 201 |
+
assert not (build_dir / f).exists()
|
| 202 |
+
|
| 203 |
+
pytest.xfail("#3260")
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
@pytest.mark.filterwarnings("ignore::setuptools.SetuptoolsDeprecationWarning")
|
| 207 |
+
def test_existing_egg_info(tmpdir_cwd, monkeypatch):
|
| 208 |
+
"""When provided with the ``existing_egg_info_dir`` attribute, build_py should not
|
| 209 |
+
attempt to run egg_info again.
|
| 210 |
+
"""
|
| 211 |
+
# == Pre-condition ==
|
| 212 |
+
# Generate an egg-info dir
|
| 213 |
+
jaraco.path.build(EXAMPLE_WITH_MANIFEST)
|
| 214 |
+
dist = Distribution({"script_name": "%PEP 517%"})
|
| 215 |
+
dist.parse_config_files()
|
| 216 |
+
assert dist.include_package_data
|
| 217 |
+
|
| 218 |
+
egg_info = dist.get_command_obj("egg_info")
|
| 219 |
+
dist.run_command("egg_info")
|
| 220 |
+
egg_info_dir = next(Path(egg_info.egg_base).glob("*.egg-info"))
|
| 221 |
+
assert egg_info_dir.is_dir()
|
| 222 |
+
|
| 223 |
+
# == Setup ==
|
| 224 |
+
build_py = dist.get_command_obj("build_py")
|
| 225 |
+
build_py.finalize_options()
|
| 226 |
+
egg_info = dist.get_command_obj("egg_info")
|
| 227 |
+
egg_info_run = Mock(side_effect=egg_info.run)
|
| 228 |
+
monkeypatch.setattr(egg_info, "run", egg_info_run)
|
| 229 |
+
|
| 230 |
+
# == Remove caches ==
|
| 231 |
+
# egg_info is called when build_py looks for data_files, which gets cached.
|
| 232 |
+
# We need to ensure it is not cached yet, otherwise it may impact on the tests
|
| 233 |
+
build_py.__dict__.pop('data_files', None)
|
| 234 |
+
dist.reinitialize_command(egg_info)
|
| 235 |
+
|
| 236 |
+
# == Sanity check ==
|
| 237 |
+
# Ensure that if existing_egg_info is not given, build_py attempts to run egg_info
|
| 238 |
+
build_py.existing_egg_info_dir = None
|
| 239 |
+
build_py.run()
|
| 240 |
+
egg_info_run.assert_called()
|
| 241 |
+
|
| 242 |
+
# == Remove caches ==
|
| 243 |
+
egg_info_run.reset_mock()
|
| 244 |
+
build_py.__dict__.pop('data_files', None)
|
| 245 |
+
dist.reinitialize_command(egg_info)
|
| 246 |
+
|
| 247 |
+
# == Actual test ==
|
| 248 |
+
# Ensure that if existing_egg_info_dir is given, egg_info doesn't run
|
| 249 |
+
build_py.existing_egg_info_dir = egg_info_dir
|
| 250 |
+
build_py.run()
|
| 251 |
+
egg_info_run.assert_not_called()
|
| 252 |
+
assert build_py.data_files
|
| 253 |
+
|
| 254 |
+
# Make sure the list of outputs is actually OK
|
| 255 |
+
outputs = map(lambda x: x.replace(os.sep, "/"), build_py.get_outputs())
|
| 256 |
+
assert outputs
|
| 257 |
+
example = str(Path(build_py.build_lib, "mypkg/__init__.py")).replace(os.sep, "/")
|
| 258 |
+
assert example in outputs
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
EXAMPLE_ARBITRARY_MAPPING = {
|
| 262 |
+
"pyproject.toml": DALS(
|
| 263 |
+
"""
|
| 264 |
+
[project]
|
| 265 |
+
name = "mypkg"
|
| 266 |
+
version = "42"
|
| 267 |
+
|
| 268 |
+
[tool.setuptools]
|
| 269 |
+
packages = ["mypkg", "mypkg.sub1", "mypkg.sub2", "mypkg.sub2.nested"]
|
| 270 |
+
|
| 271 |
+
[tool.setuptools.package-dir]
|
| 272 |
+
"" = "src"
|
| 273 |
+
"mypkg.sub2" = "src/mypkg/_sub2"
|
| 274 |
+
"mypkg.sub2.nested" = "other"
|
| 275 |
+
"""
|
| 276 |
+
),
|
| 277 |
+
"src": {
|
| 278 |
+
"mypkg": {
|
| 279 |
+
"__init__.py": "",
|
| 280 |
+
"resource_file.txt": "",
|
| 281 |
+
"sub1": {
|
| 282 |
+
"__init__.py": "",
|
| 283 |
+
"mod1.py": "",
|
| 284 |
+
},
|
| 285 |
+
"_sub2": {
|
| 286 |
+
"mod2.py": "",
|
| 287 |
+
},
|
| 288 |
+
},
|
| 289 |
+
},
|
| 290 |
+
"other": {
|
| 291 |
+
"__init__.py": "",
|
| 292 |
+
"mod3.py": "",
|
| 293 |
+
},
|
| 294 |
+
"MANIFEST.in": DALS(
|
| 295 |
+
"""
|
| 296 |
+
global-include *.py *.txt
|
| 297 |
+
global-exclude *.py[cod]
|
| 298 |
+
"""
|
| 299 |
+
),
|
| 300 |
+
}
|
| 301 |
+
|
| 302 |
+
|
| 303 |
+
def test_get_outputs(tmpdir_cwd):
|
| 304 |
+
jaraco.path.build(EXAMPLE_ARBITRARY_MAPPING)
|
| 305 |
+
dist = Distribution({"script_name": "%test%"})
|
| 306 |
+
dist.parse_config_files()
|
| 307 |
+
|
| 308 |
+
build_py = dist.get_command_obj("build_py")
|
| 309 |
+
build_py.editable_mode = True
|
| 310 |
+
build_py.ensure_finalized()
|
| 311 |
+
build_lib = build_py.build_lib.replace(os.sep, "/")
|
| 312 |
+
outputs = {x.replace(os.sep, "/") for x in build_py.get_outputs()}
|
| 313 |
+
assert outputs == {
|
| 314 |
+
f"{build_lib}/mypkg/__init__.py",
|
| 315 |
+
f"{build_lib}/mypkg/resource_file.txt",
|
| 316 |
+
f"{build_lib}/mypkg/sub1/__init__.py",
|
| 317 |
+
f"{build_lib}/mypkg/sub1/mod1.py",
|
| 318 |
+
f"{build_lib}/mypkg/sub2/mod2.py",
|
| 319 |
+
f"{build_lib}/mypkg/sub2/nested/__init__.py",
|
| 320 |
+
f"{build_lib}/mypkg/sub2/nested/mod3.py",
|
| 321 |
+
}
|
| 322 |
+
mapping = {
|
| 323 |
+
k.replace(os.sep, "/"): v.replace(os.sep, "/")
|
| 324 |
+
for k, v in build_py.get_output_mapping().items()
|
| 325 |
+
}
|
| 326 |
+
assert mapping == {
|
| 327 |
+
f"{build_lib}/mypkg/__init__.py": "src/mypkg/__init__.py",
|
| 328 |
+
f"{build_lib}/mypkg/resource_file.txt": "src/mypkg/resource_file.txt",
|
| 329 |
+
f"{build_lib}/mypkg/sub1/__init__.py": "src/mypkg/sub1/__init__.py",
|
| 330 |
+
f"{build_lib}/mypkg/sub1/mod1.py": "src/mypkg/sub1/mod1.py",
|
| 331 |
+
f"{build_lib}/mypkg/sub2/mod2.py": "src/mypkg/_sub2/mod2.py",
|
| 332 |
+
f"{build_lib}/mypkg/sub2/nested/__init__.py": "other/__init__.py",
|
| 333 |
+
f"{build_lib}/mypkg/sub2/nested/mod3.py": "other/mod3.py",
|
| 334 |
+
}
|
| 335 |
+
|
| 336 |
+
|
| 337 |
+
class TestTypeInfoFiles:
|
| 338 |
+
PYPROJECTS = {
|
| 339 |
+
"default_pyproject": DALS(
|
| 340 |
+
"""
|
| 341 |
+
[project]
|
| 342 |
+
name = "foo"
|
| 343 |
+
version = "1"
|
| 344 |
+
"""
|
| 345 |
+
),
|
| 346 |
+
"dont_include_package_data": DALS(
|
| 347 |
+
"""
|
| 348 |
+
[project]
|
| 349 |
+
name = "foo"
|
| 350 |
+
version = "1"
|
| 351 |
+
|
| 352 |
+
[tool.setuptools]
|
| 353 |
+
include-package-data = false
|
| 354 |
+
"""
|
| 355 |
+
),
|
| 356 |
+
"exclude_type_info": DALS(
|
| 357 |
+
"""
|
| 358 |
+
[project]
|
| 359 |
+
name = "foo"
|
| 360 |
+
version = "1"
|
| 361 |
+
|
| 362 |
+
[tool.setuptools]
|
| 363 |
+
include-package-data = false
|
| 364 |
+
|
| 365 |
+
[tool.setuptools.exclude-package-data]
|
| 366 |
+
"*" = ["py.typed", "*.pyi"]
|
| 367 |
+
"""
|
| 368 |
+
),
|
| 369 |
+
}
|
| 370 |
+
|
| 371 |
+
EXAMPLES = {
|
| 372 |
+
"simple_namespace": {
|
| 373 |
+
"directory_structure": {
|
| 374 |
+
"foo": {
|
| 375 |
+
"bar.pyi": "",
|
| 376 |
+
"py.typed": "",
|
| 377 |
+
"__init__.py": "",
|
| 378 |
+
}
|
| 379 |
+
},
|
| 380 |
+
"expected_type_files": {"foo/bar.pyi", "foo/py.typed"},
|
| 381 |
+
},
|
| 382 |
+
"nested_inside_namespace": {
|
| 383 |
+
"directory_structure": {
|
| 384 |
+
"foo": {
|
| 385 |
+
"bar": {
|
| 386 |
+
"py.typed": "",
|
| 387 |
+
"mod.pyi": "",
|
| 388 |
+
}
|
| 389 |
+
}
|
| 390 |
+
},
|
| 391 |
+
"expected_type_files": {"foo/bar/mod.pyi", "foo/bar/py.typed"},
|
| 392 |
+
},
|
| 393 |
+
"namespace_nested_inside_regular": {
|
| 394 |
+
"directory_structure": {
|
| 395 |
+
"foo": {
|
| 396 |
+
"namespace": {
|
| 397 |
+
"foo.pyi": "",
|
| 398 |
+
},
|
| 399 |
+
"__init__.pyi": "",
|
| 400 |
+
"py.typed": "",
|
| 401 |
+
}
|
| 402 |
+
},
|
| 403 |
+
"expected_type_files": {
|
| 404 |
+
"foo/namespace/foo.pyi",
|
| 405 |
+
"foo/__init__.pyi",
|
| 406 |
+
"foo/py.typed",
|
| 407 |
+
},
|
| 408 |
+
},
|
| 409 |
+
}
|
| 410 |
+
|
| 411 |
+
@pytest.mark.parametrize(
|
| 412 |
+
"pyproject",
|
| 413 |
+
[
|
| 414 |
+
"default_pyproject",
|
| 415 |
+
pytest.param(
|
| 416 |
+
"dont_include_package_data",
|
| 417 |
+
marks=pytest.mark.xfail(reason="pypa/setuptools#4350"),
|
| 418 |
+
),
|
| 419 |
+
],
|
| 420 |
+
)
|
| 421 |
+
@pytest.mark.parametrize("example", EXAMPLES.keys())
|
| 422 |
+
def test_type_files_included_by_default(self, tmpdir_cwd, pyproject, example):
|
| 423 |
+
structure = {
|
| 424 |
+
**self.EXAMPLES[example]["directory_structure"],
|
| 425 |
+
"pyproject.toml": self.PYPROJECTS[pyproject],
|
| 426 |
+
}
|
| 427 |
+
expected_type_files = self.EXAMPLES[example]["expected_type_files"]
|
| 428 |
+
jaraco.path.build(structure)
|
| 429 |
+
|
| 430 |
+
build_py = get_finalized_build_py()
|
| 431 |
+
outputs = get_outputs(build_py)
|
| 432 |
+
assert expected_type_files <= outputs
|
| 433 |
+
|
| 434 |
+
@pytest.mark.parametrize("pyproject", ["exclude_type_info"])
|
| 435 |
+
@pytest.mark.parametrize("example", EXAMPLES.keys())
|
| 436 |
+
def test_type_files_can_be_excluded(self, tmpdir_cwd, pyproject, example):
|
| 437 |
+
structure = {
|
| 438 |
+
**self.EXAMPLES[example]["directory_structure"],
|
| 439 |
+
"pyproject.toml": self.PYPROJECTS[pyproject],
|
| 440 |
+
}
|
| 441 |
+
expected_type_files = self.EXAMPLES[example]["expected_type_files"]
|
| 442 |
+
jaraco.path.build(structure)
|
| 443 |
+
|
| 444 |
+
build_py = get_finalized_build_py()
|
| 445 |
+
outputs = get_outputs(build_py)
|
| 446 |
+
assert expected_type_files.isdisjoint(outputs)
|
| 447 |
+
|
| 448 |
+
def test_stub_only_package(self, tmpdir_cwd):
|
| 449 |
+
structure = {
|
| 450 |
+
"pyproject.toml": DALS(
|
| 451 |
+
"""
|
| 452 |
+
[project]
|
| 453 |
+
name = "foo-stubs"
|
| 454 |
+
version = "1"
|
| 455 |
+
"""
|
| 456 |
+
),
|
| 457 |
+
"foo-stubs": {"__init__.pyi": "", "bar.pyi": ""},
|
| 458 |
+
}
|
| 459 |
+
expected_type_files = {"foo-stubs/__init__.pyi", "foo-stubs/bar.pyi"}
|
| 460 |
+
jaraco.path.build(structure)
|
| 461 |
+
|
| 462 |
+
build_py = get_finalized_build_py()
|
| 463 |
+
outputs = get_outputs(build_py)
|
| 464 |
+
assert expected_type_files <= outputs
|
| 465 |
+
|
| 466 |
+
|
| 467 |
+
def get_finalized_build_py(script_name="%build_py-test%"):
|
| 468 |
+
dist = Distribution({"script_name": script_name})
|
| 469 |
+
dist.parse_config_files()
|
| 470 |
+
build_py = dist.get_command_obj("build_py")
|
| 471 |
+
build_py.finalize_options()
|
| 472 |
+
return build_py
|
| 473 |
+
|
| 474 |
+
|
| 475 |
+
def get_outputs(build_py):
|
| 476 |
+
build_dir = Path(build_py.build_lib)
|
| 477 |
+
return {
|
| 478 |
+
os.path.relpath(x, build_dir).replace(os.sep, "/")
|
| 479 |
+
for x in build_py.get_outputs()
|
| 480 |
+
}
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_develop.py
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""develop tests"""
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import pathlib
|
| 5 |
+
import platform
|
| 6 |
+
import subprocess
|
| 7 |
+
import sys
|
| 8 |
+
|
| 9 |
+
import pytest
|
| 10 |
+
|
| 11 |
+
from setuptools._path import paths_on_pythonpath
|
| 12 |
+
from setuptools.command.develop import develop
|
| 13 |
+
from setuptools.dist import Distribution
|
| 14 |
+
|
| 15 |
+
from . import contexts, namespaces
|
| 16 |
+
|
| 17 |
+
SETUP_PY = """\
|
| 18 |
+
from setuptools import setup
|
| 19 |
+
|
| 20 |
+
setup(name='foo',
|
| 21 |
+
packages=['foo'],
|
| 22 |
+
)
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
INIT_PY = """print "foo"
|
| 26 |
+
"""
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
@pytest.fixture
|
| 30 |
+
def temp_user(monkeypatch):
|
| 31 |
+
with contexts.tempdir() as user_base:
|
| 32 |
+
with contexts.tempdir() as user_site:
|
| 33 |
+
monkeypatch.setattr('site.USER_BASE', user_base)
|
| 34 |
+
monkeypatch.setattr('site.USER_SITE', user_site)
|
| 35 |
+
yield
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
@pytest.fixture
|
| 39 |
+
def test_env(tmpdir, temp_user):
|
| 40 |
+
target = tmpdir
|
| 41 |
+
foo = target.mkdir('foo')
|
| 42 |
+
setup = target / 'setup.py'
|
| 43 |
+
if setup.isfile():
|
| 44 |
+
raise ValueError(dir(target))
|
| 45 |
+
with setup.open('w') as f:
|
| 46 |
+
f.write(SETUP_PY)
|
| 47 |
+
init = foo / '__init__.py'
|
| 48 |
+
with init.open('w') as f:
|
| 49 |
+
f.write(INIT_PY)
|
| 50 |
+
with target.as_cwd():
|
| 51 |
+
yield target
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class TestDevelop:
|
| 55 |
+
in_virtualenv = hasattr(sys, 'real_prefix')
|
| 56 |
+
in_venv = hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix
|
| 57 |
+
|
| 58 |
+
def test_console_scripts(self, tmpdir):
|
| 59 |
+
"""
|
| 60 |
+
Test that console scripts are installed and that they reference
|
| 61 |
+
only the project by name and not the current version.
|
| 62 |
+
"""
|
| 63 |
+
pytest.skip(
|
| 64 |
+
"TODO: needs a fixture to cause 'develop' "
|
| 65 |
+
"to be invoked without mutating environment."
|
| 66 |
+
)
|
| 67 |
+
settings = dict(
|
| 68 |
+
name='foo',
|
| 69 |
+
packages=['foo'],
|
| 70 |
+
version='0.0',
|
| 71 |
+
entry_points={
|
| 72 |
+
'console_scripts': [
|
| 73 |
+
'foocmd = foo:foo',
|
| 74 |
+
],
|
| 75 |
+
},
|
| 76 |
+
)
|
| 77 |
+
dist = Distribution(settings)
|
| 78 |
+
dist.script_name = 'setup.py'
|
| 79 |
+
cmd = develop(dist)
|
| 80 |
+
cmd.ensure_finalized()
|
| 81 |
+
cmd.install_dir = tmpdir
|
| 82 |
+
cmd.run()
|
| 83 |
+
# assert '0.0' not in foocmd_text
|
| 84 |
+
|
| 85 |
+
@pytest.mark.xfail(reason="legacy behavior retained for compatibility #4167")
|
| 86 |
+
def test_egg_link_filename(self):
|
| 87 |
+
settings = dict(
|
| 88 |
+
name='Foo $$$ Bar_baz-bing',
|
| 89 |
+
)
|
| 90 |
+
dist = Distribution(settings)
|
| 91 |
+
cmd = develop(dist)
|
| 92 |
+
cmd.ensure_finalized()
|
| 93 |
+
link = pathlib.Path(cmd.egg_link)
|
| 94 |
+
assert link.suffix == '.egg-link'
|
| 95 |
+
assert link.stem == 'Foo_Bar_baz_bing'
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
class TestResolver:
|
| 99 |
+
"""
|
| 100 |
+
TODO: These tests were written with a minimal understanding
|
| 101 |
+
of what _resolve_setup_path is intending to do. Come up with
|
| 102 |
+
more meaningful cases that look like real-world scenarios.
|
| 103 |
+
"""
|
| 104 |
+
|
| 105 |
+
def test_resolve_setup_path_cwd(self):
|
| 106 |
+
assert develop._resolve_setup_path('.', '.', '.') == '.'
|
| 107 |
+
|
| 108 |
+
def test_resolve_setup_path_one_dir(self):
|
| 109 |
+
assert develop._resolve_setup_path('pkgs', '.', 'pkgs') == '../'
|
| 110 |
+
|
| 111 |
+
def test_resolve_setup_path_one_dir_trailing_slash(self):
|
| 112 |
+
assert develop._resolve_setup_path('pkgs/', '.', 'pkgs') == '../'
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
class TestNamespaces:
|
| 116 |
+
@staticmethod
|
| 117 |
+
def install_develop(src_dir, target):
|
| 118 |
+
develop_cmd = [
|
| 119 |
+
sys.executable,
|
| 120 |
+
'setup.py',
|
| 121 |
+
'develop',
|
| 122 |
+
'--install-dir',
|
| 123 |
+
str(target),
|
| 124 |
+
]
|
| 125 |
+
with src_dir.as_cwd():
|
| 126 |
+
with paths_on_pythonpath([str(target)]):
|
| 127 |
+
subprocess.check_call(develop_cmd)
|
| 128 |
+
|
| 129 |
+
@pytest.mark.skipif(
|
| 130 |
+
bool(os.environ.get("APPVEYOR")),
|
| 131 |
+
reason="https://github.com/pypa/setuptools/issues/851",
|
| 132 |
+
)
|
| 133 |
+
@pytest.mark.skipif(
|
| 134 |
+
platform.python_implementation() == 'PyPy',
|
| 135 |
+
reason="https://github.com/pypa/setuptools/issues/1202",
|
| 136 |
+
)
|
| 137 |
+
def test_namespace_package_importable(self, tmpdir):
|
| 138 |
+
"""
|
| 139 |
+
Installing two packages sharing the same namespace, one installed
|
| 140 |
+
naturally using pip or `--single-version-externally-managed`
|
| 141 |
+
and the other installed using `develop` should leave the namespace
|
| 142 |
+
in tact and both packages reachable by import.
|
| 143 |
+
"""
|
| 144 |
+
pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
|
| 145 |
+
pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB')
|
| 146 |
+
target = tmpdir / 'packages'
|
| 147 |
+
# use pip to install to the target directory
|
| 148 |
+
install_cmd = [
|
| 149 |
+
sys.executable,
|
| 150 |
+
'-m',
|
| 151 |
+
'pip',
|
| 152 |
+
'install',
|
| 153 |
+
str(pkg_A),
|
| 154 |
+
'-t',
|
| 155 |
+
str(target),
|
| 156 |
+
]
|
| 157 |
+
subprocess.check_call(install_cmd)
|
| 158 |
+
self.install_develop(pkg_B, target)
|
| 159 |
+
namespaces.make_site_dir(target)
|
| 160 |
+
try_import = [
|
| 161 |
+
sys.executable,
|
| 162 |
+
'-c',
|
| 163 |
+
'import myns.pkgA; import myns.pkgB',
|
| 164 |
+
]
|
| 165 |
+
with paths_on_pythonpath([str(target)]):
|
| 166 |
+
subprocess.check_call(try_import)
|
| 167 |
+
|
| 168 |
+
# additionally ensure that pkg_resources import works
|
| 169 |
+
pkg_resources_imp = [
|
| 170 |
+
sys.executable,
|
| 171 |
+
'-c',
|
| 172 |
+
'import pkg_resources',
|
| 173 |
+
]
|
| 174 |
+
with paths_on_pythonpath([str(target)]):
|
| 175 |
+
subprocess.check_call(pkg_resources_imp)
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_editable_install.py
ADDED
|
@@ -0,0 +1,1289 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import platform
|
| 5 |
+
import stat
|
| 6 |
+
import subprocess
|
| 7 |
+
import sys
|
| 8 |
+
from copy import deepcopy
|
| 9 |
+
from importlib import import_module
|
| 10 |
+
from importlib.machinery import EXTENSION_SUFFIXES
|
| 11 |
+
from pathlib import Path
|
| 12 |
+
from textwrap import dedent
|
| 13 |
+
from typing import Any
|
| 14 |
+
from unittest.mock import Mock
|
| 15 |
+
from uuid import uuid4
|
| 16 |
+
|
| 17 |
+
import jaraco.envs
|
| 18 |
+
import jaraco.path
|
| 19 |
+
import pytest
|
| 20 |
+
from path import Path as _Path
|
| 21 |
+
|
| 22 |
+
from setuptools._importlib import resources as importlib_resources
|
| 23 |
+
from setuptools.command.editable_wheel import (
|
| 24 |
+
_DebuggingTips,
|
| 25 |
+
_encode_pth,
|
| 26 |
+
_find_namespaces,
|
| 27 |
+
_find_package_roots,
|
| 28 |
+
_find_virtual_namespaces,
|
| 29 |
+
_finder_template,
|
| 30 |
+
_LinkTree,
|
| 31 |
+
_TopLevelFinder,
|
| 32 |
+
editable_wheel,
|
| 33 |
+
)
|
| 34 |
+
from setuptools.dist import Distribution
|
| 35 |
+
from setuptools.extension import Extension
|
| 36 |
+
from setuptools.warnings import SetuptoolsDeprecationWarning
|
| 37 |
+
|
| 38 |
+
from . import contexts, namespaces
|
| 39 |
+
|
| 40 |
+
from distutils.core import run_setup
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
@pytest.fixture(params=["strict", "lenient"])
|
| 44 |
+
def editable_opts(request):
|
| 45 |
+
if request.param == "strict":
|
| 46 |
+
return ["--config-settings", "editable-mode=strict"]
|
| 47 |
+
return []
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
EXAMPLE = {
|
| 51 |
+
'pyproject.toml': dedent(
|
| 52 |
+
"""\
|
| 53 |
+
[build-system]
|
| 54 |
+
requires = ["setuptools"]
|
| 55 |
+
build-backend = "setuptools.build_meta"
|
| 56 |
+
|
| 57 |
+
[project]
|
| 58 |
+
name = "mypkg"
|
| 59 |
+
version = "3.14159"
|
| 60 |
+
license = {text = "MIT"}
|
| 61 |
+
description = "This is a Python package"
|
| 62 |
+
dynamic = ["readme"]
|
| 63 |
+
classifiers = [
|
| 64 |
+
"Development Status :: 5 - Production/Stable",
|
| 65 |
+
"Intended Audience :: Developers"
|
| 66 |
+
]
|
| 67 |
+
urls = {Homepage = "https://github.com"}
|
| 68 |
+
|
| 69 |
+
[tool.setuptools]
|
| 70 |
+
package-dir = {"" = "src"}
|
| 71 |
+
packages = {find = {where = ["src"]}}
|
| 72 |
+
license-files = ["LICENSE*"]
|
| 73 |
+
|
| 74 |
+
[tool.setuptools.dynamic]
|
| 75 |
+
readme = {file = "README.rst"}
|
| 76 |
+
|
| 77 |
+
[tool.distutils.egg_info]
|
| 78 |
+
tag-build = ".post0"
|
| 79 |
+
"""
|
| 80 |
+
),
|
| 81 |
+
"MANIFEST.in": dedent(
|
| 82 |
+
"""\
|
| 83 |
+
global-include *.py *.txt
|
| 84 |
+
global-exclude *.py[cod]
|
| 85 |
+
prune dist
|
| 86 |
+
prune build
|
| 87 |
+
"""
|
| 88 |
+
).strip(),
|
| 89 |
+
"README.rst": "This is a ``README``",
|
| 90 |
+
"LICENSE.txt": "---- placeholder MIT license ----",
|
| 91 |
+
"src": {
|
| 92 |
+
"mypkg": {
|
| 93 |
+
"__init__.py": dedent(
|
| 94 |
+
"""\
|
| 95 |
+
import sys
|
| 96 |
+
from importlib.metadata import PackageNotFoundError, version
|
| 97 |
+
|
| 98 |
+
try:
|
| 99 |
+
__version__ = version(__name__)
|
| 100 |
+
except PackageNotFoundError:
|
| 101 |
+
__version__ = "unknown"
|
| 102 |
+
"""
|
| 103 |
+
),
|
| 104 |
+
"__main__.py": dedent(
|
| 105 |
+
"""\
|
| 106 |
+
from importlib.resources import read_text
|
| 107 |
+
from . import __version__, __name__ as parent
|
| 108 |
+
from .mod import x
|
| 109 |
+
|
| 110 |
+
data = read_text(parent, "data.txt")
|
| 111 |
+
print(__version__, data, x)
|
| 112 |
+
"""
|
| 113 |
+
),
|
| 114 |
+
"mod.py": "x = ''",
|
| 115 |
+
"data.txt": "Hello World",
|
| 116 |
+
}
|
| 117 |
+
},
|
| 118 |
+
}
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
SETUP_SCRIPT_STUB = "__import__('setuptools').setup()"
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
@pytest.mark.xfail(sys.platform == "darwin", reason="pypa/setuptools#4328")
|
| 125 |
+
@pytest.mark.parametrize(
|
| 126 |
+
"files",
|
| 127 |
+
[
|
| 128 |
+
{**EXAMPLE, "setup.py": SETUP_SCRIPT_STUB},
|
| 129 |
+
EXAMPLE, # No setup.py script
|
| 130 |
+
],
|
| 131 |
+
)
|
| 132 |
+
def test_editable_with_pyproject(tmp_path, venv, files, editable_opts):
|
| 133 |
+
project = tmp_path / "mypkg"
|
| 134 |
+
project.mkdir()
|
| 135 |
+
jaraco.path.build(files, prefix=project)
|
| 136 |
+
|
| 137 |
+
cmd = [
|
| 138 |
+
"python",
|
| 139 |
+
"-m",
|
| 140 |
+
"pip",
|
| 141 |
+
"install",
|
| 142 |
+
"--no-build-isolation", # required to force current version of setuptools
|
| 143 |
+
"-e",
|
| 144 |
+
str(project),
|
| 145 |
+
*editable_opts,
|
| 146 |
+
]
|
| 147 |
+
print(venv.run(cmd))
|
| 148 |
+
|
| 149 |
+
cmd = ["python", "-m", "mypkg"]
|
| 150 |
+
assert venv.run(cmd).strip() == "3.14159.post0 Hello World"
|
| 151 |
+
|
| 152 |
+
(project / "src/mypkg/data.txt").write_text("foobar", encoding="utf-8")
|
| 153 |
+
(project / "src/mypkg/mod.py").write_text("x = 42", encoding="utf-8")
|
| 154 |
+
assert venv.run(cmd).strip() == "3.14159.post0 foobar 42"
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
def test_editable_with_flat_layout(tmp_path, venv, editable_opts):
|
| 158 |
+
files = {
|
| 159 |
+
"mypkg": {
|
| 160 |
+
"pyproject.toml": dedent(
|
| 161 |
+
"""\
|
| 162 |
+
[build-system]
|
| 163 |
+
requires = ["setuptools", "wheel"]
|
| 164 |
+
build-backend = "setuptools.build_meta"
|
| 165 |
+
|
| 166 |
+
[project]
|
| 167 |
+
name = "mypkg"
|
| 168 |
+
version = "3.14159"
|
| 169 |
+
|
| 170 |
+
[tool.setuptools]
|
| 171 |
+
packages = ["pkg"]
|
| 172 |
+
py-modules = ["mod"]
|
| 173 |
+
"""
|
| 174 |
+
),
|
| 175 |
+
"pkg": {"__init__.py": "a = 4"},
|
| 176 |
+
"mod.py": "b = 2",
|
| 177 |
+
},
|
| 178 |
+
}
|
| 179 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 180 |
+
project = tmp_path / "mypkg"
|
| 181 |
+
|
| 182 |
+
cmd = [
|
| 183 |
+
"python",
|
| 184 |
+
"-m",
|
| 185 |
+
"pip",
|
| 186 |
+
"install",
|
| 187 |
+
"--no-build-isolation", # required to force current version of setuptools
|
| 188 |
+
"-e",
|
| 189 |
+
str(project),
|
| 190 |
+
*editable_opts,
|
| 191 |
+
]
|
| 192 |
+
print(venv.run(cmd))
|
| 193 |
+
cmd = ["python", "-c", "import pkg, mod; print(pkg.a, mod.b)"]
|
| 194 |
+
assert venv.run(cmd).strip() == "4 2"
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
def test_editable_with_single_module(tmp_path, venv, editable_opts):
|
| 198 |
+
files = {
|
| 199 |
+
"mypkg": {
|
| 200 |
+
"pyproject.toml": dedent(
|
| 201 |
+
"""\
|
| 202 |
+
[build-system]
|
| 203 |
+
requires = ["setuptools", "wheel"]
|
| 204 |
+
build-backend = "setuptools.build_meta"
|
| 205 |
+
|
| 206 |
+
[project]
|
| 207 |
+
name = "mod"
|
| 208 |
+
version = "3.14159"
|
| 209 |
+
|
| 210 |
+
[tool.setuptools]
|
| 211 |
+
py-modules = ["mod"]
|
| 212 |
+
"""
|
| 213 |
+
),
|
| 214 |
+
"mod.py": "b = 2",
|
| 215 |
+
},
|
| 216 |
+
}
|
| 217 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 218 |
+
project = tmp_path / "mypkg"
|
| 219 |
+
|
| 220 |
+
cmd = [
|
| 221 |
+
"python",
|
| 222 |
+
"-m",
|
| 223 |
+
"pip",
|
| 224 |
+
"install",
|
| 225 |
+
"--no-build-isolation", # required to force current version of setuptools
|
| 226 |
+
"-e",
|
| 227 |
+
str(project),
|
| 228 |
+
*editable_opts,
|
| 229 |
+
]
|
| 230 |
+
print(venv.run(cmd))
|
| 231 |
+
cmd = ["python", "-c", "import mod; print(mod.b)"]
|
| 232 |
+
assert venv.run(cmd).strip() == "2"
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
class TestLegacyNamespaces:
|
| 236 |
+
# legacy => pkg_resources.declare_namespace(...) + setup(namespace_packages=...)
|
| 237 |
+
|
| 238 |
+
def test_nspkg_file_is_unique(self, tmp_path, monkeypatch):
|
| 239 |
+
deprecation = pytest.warns(
|
| 240 |
+
SetuptoolsDeprecationWarning, match=".*namespace_packages parameter.*"
|
| 241 |
+
)
|
| 242 |
+
installation_dir = tmp_path / ".installation_dir"
|
| 243 |
+
installation_dir.mkdir()
|
| 244 |
+
examples = (
|
| 245 |
+
"myns.pkgA",
|
| 246 |
+
"myns.pkgB",
|
| 247 |
+
"myns.n.pkgA",
|
| 248 |
+
"myns.n.pkgB",
|
| 249 |
+
)
|
| 250 |
+
|
| 251 |
+
for name in examples:
|
| 252 |
+
pkg = namespaces.build_namespace_package(tmp_path, name, version="42")
|
| 253 |
+
with deprecation, monkeypatch.context() as ctx:
|
| 254 |
+
ctx.chdir(pkg)
|
| 255 |
+
dist = run_setup("setup.py", stop_after="config")
|
| 256 |
+
cmd = editable_wheel(dist)
|
| 257 |
+
cmd.finalize_options()
|
| 258 |
+
editable_name = cmd.get_finalized_command("dist_info").name
|
| 259 |
+
cmd._install_namespaces(installation_dir, editable_name)
|
| 260 |
+
|
| 261 |
+
files = list(installation_dir.glob("*-nspkg.pth"))
|
| 262 |
+
assert len(files) == len(examples)
|
| 263 |
+
|
| 264 |
+
@pytest.mark.parametrize(
|
| 265 |
+
"impl",
|
| 266 |
+
(
|
| 267 |
+
"pkg_resources",
|
| 268 |
+
# "pkgutil", => does not work
|
| 269 |
+
),
|
| 270 |
+
)
|
| 271 |
+
@pytest.mark.parametrize("ns", ("myns.n",))
|
| 272 |
+
def test_namespace_package_importable(
|
| 273 |
+
self, venv, tmp_path, ns, impl, editable_opts
|
| 274 |
+
):
|
| 275 |
+
"""
|
| 276 |
+
Installing two packages sharing the same namespace, one installed
|
| 277 |
+
naturally using pip or `--single-version-externally-managed`
|
| 278 |
+
and the other installed in editable mode should leave the namespace
|
| 279 |
+
intact and both packages reachable by import.
|
| 280 |
+
(Ported from test_develop).
|
| 281 |
+
"""
|
| 282 |
+
build_system = """\
|
| 283 |
+
[build-system]
|
| 284 |
+
requires = ["setuptools"]
|
| 285 |
+
build-backend = "setuptools.build_meta"
|
| 286 |
+
"""
|
| 287 |
+
pkg_A = namespaces.build_namespace_package(tmp_path, f"{ns}.pkgA", impl=impl)
|
| 288 |
+
pkg_B = namespaces.build_namespace_package(tmp_path, f"{ns}.pkgB", impl=impl)
|
| 289 |
+
(pkg_A / "pyproject.toml").write_text(build_system, encoding="utf-8")
|
| 290 |
+
(pkg_B / "pyproject.toml").write_text(build_system, encoding="utf-8")
|
| 291 |
+
# use pip to install to the target directory
|
| 292 |
+
opts = editable_opts[:]
|
| 293 |
+
opts.append("--no-build-isolation") # force current version of setuptools
|
| 294 |
+
venv.run(["python", "-m", "pip", "install", str(pkg_A), *opts])
|
| 295 |
+
venv.run(["python", "-m", "pip", "install", "-e", str(pkg_B), *opts])
|
| 296 |
+
venv.run(["python", "-c", f"import {ns}.pkgA; import {ns}.pkgB"])
|
| 297 |
+
# additionally ensure that pkg_resources import works
|
| 298 |
+
venv.run(["python", "-c", "import pkg_resources"])
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
class TestPep420Namespaces:
|
| 302 |
+
def test_namespace_package_importable(self, venv, tmp_path, editable_opts):
|
| 303 |
+
"""
|
| 304 |
+
Installing two packages sharing the same namespace, one installed
|
| 305 |
+
normally using pip and the other installed in editable mode
|
| 306 |
+
should allow importing both packages.
|
| 307 |
+
"""
|
| 308 |
+
pkg_A = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgA')
|
| 309 |
+
pkg_B = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgB')
|
| 310 |
+
# use pip to install to the target directory
|
| 311 |
+
opts = editable_opts[:]
|
| 312 |
+
opts.append("--no-build-isolation") # force current version of setuptools
|
| 313 |
+
venv.run(["python", "-m", "pip", "install", str(pkg_A), *opts])
|
| 314 |
+
venv.run(["python", "-m", "pip", "install", "-e", str(pkg_B), *opts])
|
| 315 |
+
venv.run(["python", "-c", "import myns.n.pkgA; import myns.n.pkgB"])
|
| 316 |
+
|
| 317 |
+
def test_namespace_created_via_package_dir(self, venv, tmp_path, editable_opts):
|
| 318 |
+
"""Currently users can create a namespace by tweaking `package_dir`"""
|
| 319 |
+
files = {
|
| 320 |
+
"pkgA": {
|
| 321 |
+
"pyproject.toml": dedent(
|
| 322 |
+
"""\
|
| 323 |
+
[build-system]
|
| 324 |
+
requires = ["setuptools", "wheel"]
|
| 325 |
+
build-backend = "setuptools.build_meta"
|
| 326 |
+
|
| 327 |
+
[project]
|
| 328 |
+
name = "pkgA"
|
| 329 |
+
version = "3.14159"
|
| 330 |
+
|
| 331 |
+
[tool.setuptools]
|
| 332 |
+
package-dir = {"myns.n.pkgA" = "src"}
|
| 333 |
+
"""
|
| 334 |
+
),
|
| 335 |
+
"src": {"__init__.py": "a = 1"},
|
| 336 |
+
},
|
| 337 |
+
}
|
| 338 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 339 |
+
pkg_A = tmp_path / "pkgA"
|
| 340 |
+
pkg_B = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgB')
|
| 341 |
+
pkg_C = namespaces.build_pep420_namespace_package(tmp_path, 'myns.n.pkgC')
|
| 342 |
+
|
| 343 |
+
# use pip to install to the target directory
|
| 344 |
+
opts = editable_opts[:]
|
| 345 |
+
opts.append("--no-build-isolation") # force current version of setuptools
|
| 346 |
+
venv.run(["python", "-m", "pip", "install", str(pkg_A), *opts])
|
| 347 |
+
venv.run(["python", "-m", "pip", "install", "-e", str(pkg_B), *opts])
|
| 348 |
+
venv.run(["python", "-m", "pip", "install", "-e", str(pkg_C), *opts])
|
| 349 |
+
venv.run(["python", "-c", "from myns.n import pkgA, pkgB, pkgC"])
|
| 350 |
+
|
| 351 |
+
def test_namespace_accidental_config_in_lenient_mode(self, venv, tmp_path):
|
| 352 |
+
"""Sometimes users might specify an ``include`` pattern that ignores parent
|
| 353 |
+
packages. In a normal installation this would ignore all modules inside the
|
| 354 |
+
parent packages, and make them namespaces (reported in issue #3504),
|
| 355 |
+
so the editable mode should preserve this behaviour.
|
| 356 |
+
"""
|
| 357 |
+
files = {
|
| 358 |
+
"pkgA": {
|
| 359 |
+
"pyproject.toml": dedent(
|
| 360 |
+
"""\
|
| 361 |
+
[build-system]
|
| 362 |
+
requires = ["setuptools", "wheel"]
|
| 363 |
+
build-backend = "setuptools.build_meta"
|
| 364 |
+
|
| 365 |
+
[project]
|
| 366 |
+
name = "pkgA"
|
| 367 |
+
version = "3.14159"
|
| 368 |
+
|
| 369 |
+
[tool.setuptools]
|
| 370 |
+
packages.find.include = ["mypkg.*"]
|
| 371 |
+
"""
|
| 372 |
+
),
|
| 373 |
+
"mypkg": {
|
| 374 |
+
"__init__.py": "",
|
| 375 |
+
"other.py": "b = 1",
|
| 376 |
+
"n": {
|
| 377 |
+
"__init__.py": "",
|
| 378 |
+
"pkgA.py": "a = 1",
|
| 379 |
+
},
|
| 380 |
+
},
|
| 381 |
+
"MANIFEST.in": EXAMPLE["MANIFEST.in"],
|
| 382 |
+
},
|
| 383 |
+
}
|
| 384 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 385 |
+
pkg_A = tmp_path / "pkgA"
|
| 386 |
+
|
| 387 |
+
# use pip to install to the target directory
|
| 388 |
+
opts = ["--no-build-isolation"] # force current version of setuptools
|
| 389 |
+
venv.run(["python", "-m", "pip", "-v", "install", "-e", str(pkg_A), *opts])
|
| 390 |
+
out = venv.run(["python", "-c", "from mypkg.n import pkgA; print(pkgA.a)"])
|
| 391 |
+
assert out.strip() == "1"
|
| 392 |
+
cmd = """\
|
| 393 |
+
try:
|
| 394 |
+
import mypkg.other
|
| 395 |
+
except ImportError:
|
| 396 |
+
print("mypkg.other not defined")
|
| 397 |
+
"""
|
| 398 |
+
out = venv.run(["python", "-c", dedent(cmd)])
|
| 399 |
+
assert "mypkg.other not defined" in out
|
| 400 |
+
|
| 401 |
+
|
| 402 |
+
def test_editable_with_prefix(tmp_path, sample_project, editable_opts):
|
| 403 |
+
"""
|
| 404 |
+
Editable install to a prefix should be discoverable.
|
| 405 |
+
"""
|
| 406 |
+
prefix = tmp_path / 'prefix'
|
| 407 |
+
|
| 408 |
+
# figure out where pip will likely install the package
|
| 409 |
+
site_packages_all = [
|
| 410 |
+
prefix / Path(path).relative_to(sys.prefix)
|
| 411 |
+
for path in sys.path
|
| 412 |
+
if 'site-packages' in path and path.startswith(sys.prefix)
|
| 413 |
+
]
|
| 414 |
+
|
| 415 |
+
for sp in site_packages_all:
|
| 416 |
+
sp.mkdir(parents=True)
|
| 417 |
+
|
| 418 |
+
# install workaround
|
| 419 |
+
_addsitedirs(site_packages_all)
|
| 420 |
+
|
| 421 |
+
env = dict(os.environ, PYTHONPATH=os.pathsep.join(map(str, site_packages_all)))
|
| 422 |
+
cmd = [
|
| 423 |
+
sys.executable,
|
| 424 |
+
'-m',
|
| 425 |
+
'pip',
|
| 426 |
+
'install',
|
| 427 |
+
'--editable',
|
| 428 |
+
str(sample_project),
|
| 429 |
+
'--prefix',
|
| 430 |
+
str(prefix),
|
| 431 |
+
'--no-build-isolation',
|
| 432 |
+
*editable_opts,
|
| 433 |
+
]
|
| 434 |
+
subprocess.check_call(cmd, env=env)
|
| 435 |
+
|
| 436 |
+
# now run 'sample' with the prefix on the PYTHONPATH
|
| 437 |
+
bin = 'Scripts' if platform.system() == 'Windows' else 'bin'
|
| 438 |
+
exe = prefix / bin / 'sample'
|
| 439 |
+
subprocess.check_call([exe], env=env)
|
| 440 |
+
|
| 441 |
+
|
| 442 |
+
class TestFinderTemplate:
|
| 443 |
+
"""This test focus in getting a particular implementation detail right.
|
| 444 |
+
If at some point in time the implementation is changed for something different,
|
| 445 |
+
this test can be modified or even excluded.
|
| 446 |
+
"""
|
| 447 |
+
|
| 448 |
+
def install_finder(self, finder):
|
| 449 |
+
loc = {}
|
| 450 |
+
exec(finder, loc, loc)
|
| 451 |
+
loc["install"]()
|
| 452 |
+
|
| 453 |
+
def test_packages(self, tmp_path):
|
| 454 |
+
files = {
|
| 455 |
+
"src1": {
|
| 456 |
+
"pkg1": {
|
| 457 |
+
"__init__.py": "",
|
| 458 |
+
"subpkg": {"mod1.py": "a = 42"},
|
| 459 |
+
},
|
| 460 |
+
},
|
| 461 |
+
"src2": {"mod2.py": "a = 43"},
|
| 462 |
+
}
|
| 463 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 464 |
+
|
| 465 |
+
mapping = {
|
| 466 |
+
"pkg1": str(tmp_path / "src1/pkg1"),
|
| 467 |
+
"mod2": str(tmp_path / "src2/mod2"),
|
| 468 |
+
}
|
| 469 |
+
template = _finder_template(str(uuid4()), mapping, {})
|
| 470 |
+
|
| 471 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 472 |
+
for mod in ("pkg1", "pkg1.subpkg", "pkg1.subpkg.mod1", "mod2"):
|
| 473 |
+
sys.modules.pop(mod, None)
|
| 474 |
+
|
| 475 |
+
self.install_finder(template)
|
| 476 |
+
mod1 = import_module("pkg1.subpkg.mod1")
|
| 477 |
+
mod2 = import_module("mod2")
|
| 478 |
+
subpkg = import_module("pkg1.subpkg")
|
| 479 |
+
|
| 480 |
+
assert mod1.a == 42
|
| 481 |
+
assert mod2.a == 43
|
| 482 |
+
expected = str((tmp_path / "src1/pkg1/subpkg").resolve())
|
| 483 |
+
assert_path(subpkg, expected)
|
| 484 |
+
|
| 485 |
+
def test_namespace(self, tmp_path):
|
| 486 |
+
files = {"pkg": {"__init__.py": "a = 13", "text.txt": "abc"}}
|
| 487 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 488 |
+
|
| 489 |
+
mapping = {"ns.othername": str(tmp_path / "pkg")}
|
| 490 |
+
namespaces = {"ns": []}
|
| 491 |
+
|
| 492 |
+
template = _finder_template(str(uuid4()), mapping, namespaces)
|
| 493 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 494 |
+
for mod in ("ns", "ns.othername"):
|
| 495 |
+
sys.modules.pop(mod, None)
|
| 496 |
+
|
| 497 |
+
self.install_finder(template)
|
| 498 |
+
pkg = import_module("ns.othername")
|
| 499 |
+
text = importlib_resources.files(pkg) / "text.txt"
|
| 500 |
+
|
| 501 |
+
expected = str((tmp_path / "pkg").resolve())
|
| 502 |
+
assert_path(pkg, expected)
|
| 503 |
+
assert pkg.a == 13
|
| 504 |
+
|
| 505 |
+
# Make sure resources can also be found
|
| 506 |
+
assert text.read_text(encoding="utf-8") == "abc"
|
| 507 |
+
|
| 508 |
+
def test_combine_namespaces(self, tmp_path):
|
| 509 |
+
files = {
|
| 510 |
+
"src1": {"ns": {"pkg1": {"__init__.py": "a = 13"}}},
|
| 511 |
+
"src2": {"ns": {"mod2.py": "b = 37"}},
|
| 512 |
+
}
|
| 513 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 514 |
+
|
| 515 |
+
mapping = {
|
| 516 |
+
"ns.pkgA": str(tmp_path / "src1/ns/pkg1"),
|
| 517 |
+
"ns": str(tmp_path / "src2/ns"),
|
| 518 |
+
}
|
| 519 |
+
namespaces_ = {"ns": [str(tmp_path / "src1"), str(tmp_path / "src2")]}
|
| 520 |
+
template = _finder_template(str(uuid4()), mapping, namespaces_)
|
| 521 |
+
|
| 522 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 523 |
+
for mod in ("ns", "ns.pkgA", "ns.mod2"):
|
| 524 |
+
sys.modules.pop(mod, None)
|
| 525 |
+
|
| 526 |
+
self.install_finder(template)
|
| 527 |
+
pkgA = import_module("ns.pkgA")
|
| 528 |
+
mod2 = import_module("ns.mod2")
|
| 529 |
+
|
| 530 |
+
expected = str((tmp_path / "src1/ns/pkg1").resolve())
|
| 531 |
+
assert_path(pkgA, expected)
|
| 532 |
+
assert pkgA.a == 13
|
| 533 |
+
assert mod2.b == 37
|
| 534 |
+
|
| 535 |
+
def test_combine_namespaces_nested(self, tmp_path):
|
| 536 |
+
"""
|
| 537 |
+
Users may attempt to combine namespace packages in a nested way via
|
| 538 |
+
``package_dir`` as shown in pypa/setuptools#4248.
|
| 539 |
+
"""
|
| 540 |
+
|
| 541 |
+
files = {
|
| 542 |
+
"src": {"my_package": {"my_module.py": "a = 13"}},
|
| 543 |
+
"src2": {"my_package2": {"my_module2.py": "b = 37"}},
|
| 544 |
+
}
|
| 545 |
+
|
| 546 |
+
stack = jaraco.path.DirectoryStack()
|
| 547 |
+
with stack.context(tmp_path):
|
| 548 |
+
jaraco.path.build(files)
|
| 549 |
+
attrs = {
|
| 550 |
+
"script_name": "%PEP 517%",
|
| 551 |
+
"package_dir": {
|
| 552 |
+
"different_name": "src/my_package",
|
| 553 |
+
"different_name.subpkg": "src2/my_package2",
|
| 554 |
+
},
|
| 555 |
+
"packages": ["different_name", "different_name.subpkg"],
|
| 556 |
+
}
|
| 557 |
+
dist = Distribution(attrs)
|
| 558 |
+
finder = _TopLevelFinder(dist, str(uuid4()))
|
| 559 |
+
code = next(v for k, v in finder.get_implementation() if k.endswith(".py"))
|
| 560 |
+
|
| 561 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 562 |
+
for mod in attrs["packages"]:
|
| 563 |
+
sys.modules.pop(mod, None)
|
| 564 |
+
|
| 565 |
+
self.install_finder(code)
|
| 566 |
+
mod1 = import_module("different_name.my_module")
|
| 567 |
+
mod2 = import_module("different_name.subpkg.my_module2")
|
| 568 |
+
|
| 569 |
+
expected = str((tmp_path / "src/my_package/my_module.py").resolve())
|
| 570 |
+
assert str(Path(mod1.__file__).resolve()) == expected
|
| 571 |
+
|
| 572 |
+
expected = str((tmp_path / "src2/my_package2/my_module2.py").resolve())
|
| 573 |
+
assert str(Path(mod2.__file__).resolve()) == expected
|
| 574 |
+
|
| 575 |
+
assert mod1.a == 13
|
| 576 |
+
assert mod2.b == 37
|
| 577 |
+
|
| 578 |
+
def test_dynamic_path_computation(self, tmp_path):
|
| 579 |
+
# Follows the example in PEP 420
|
| 580 |
+
files = {
|
| 581 |
+
"project1": {"parent": {"child": {"one.py": "x = 1"}}},
|
| 582 |
+
"project2": {"parent": {"child": {"two.py": "x = 2"}}},
|
| 583 |
+
"project3": {"parent": {"child": {"three.py": "x = 3"}}},
|
| 584 |
+
}
|
| 585 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 586 |
+
mapping = {}
|
| 587 |
+
namespaces_ = {"parent": [str(tmp_path / "project1/parent")]}
|
| 588 |
+
template = _finder_template(str(uuid4()), mapping, namespaces_)
|
| 589 |
+
|
| 590 |
+
mods = (f"parent.child.{name}" for name in ("one", "two", "three"))
|
| 591 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 592 |
+
for mod in ("parent", "parent.child", "parent.child", *mods):
|
| 593 |
+
sys.modules.pop(mod, None)
|
| 594 |
+
|
| 595 |
+
self.install_finder(template)
|
| 596 |
+
|
| 597 |
+
one = import_module("parent.child.one")
|
| 598 |
+
assert one.x == 1
|
| 599 |
+
|
| 600 |
+
with pytest.raises(ImportError):
|
| 601 |
+
import_module("parent.child.two")
|
| 602 |
+
|
| 603 |
+
sys.path.append(str(tmp_path / "project2"))
|
| 604 |
+
two = import_module("parent.child.two")
|
| 605 |
+
assert two.x == 2
|
| 606 |
+
|
| 607 |
+
with pytest.raises(ImportError):
|
| 608 |
+
import_module("parent.child.three")
|
| 609 |
+
|
| 610 |
+
sys.path.append(str(tmp_path / "project3"))
|
| 611 |
+
three = import_module("parent.child.three")
|
| 612 |
+
assert three.x == 3
|
| 613 |
+
|
| 614 |
+
def test_no_recursion(self, tmp_path):
|
| 615 |
+
# See issue #3550
|
| 616 |
+
files = {
|
| 617 |
+
"pkg": {
|
| 618 |
+
"__init__.py": "from . import pkg",
|
| 619 |
+
},
|
| 620 |
+
}
|
| 621 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 622 |
+
|
| 623 |
+
mapping = {
|
| 624 |
+
"pkg": str(tmp_path / "pkg"),
|
| 625 |
+
}
|
| 626 |
+
template = _finder_template(str(uuid4()), mapping, {})
|
| 627 |
+
|
| 628 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 629 |
+
sys.modules.pop("pkg", None)
|
| 630 |
+
|
| 631 |
+
self.install_finder(template)
|
| 632 |
+
with pytest.raises(ImportError, match="pkg"):
|
| 633 |
+
import_module("pkg")
|
| 634 |
+
|
| 635 |
+
def test_similar_name(self, tmp_path):
|
| 636 |
+
files = {
|
| 637 |
+
"foo": {
|
| 638 |
+
"__init__.py": "",
|
| 639 |
+
"bar": {
|
| 640 |
+
"__init__.py": "",
|
| 641 |
+
},
|
| 642 |
+
},
|
| 643 |
+
}
|
| 644 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 645 |
+
|
| 646 |
+
mapping = {
|
| 647 |
+
"foo": str(tmp_path / "foo"),
|
| 648 |
+
}
|
| 649 |
+
template = _finder_template(str(uuid4()), mapping, {})
|
| 650 |
+
|
| 651 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 652 |
+
sys.modules.pop("foo", None)
|
| 653 |
+
sys.modules.pop("foo.bar", None)
|
| 654 |
+
|
| 655 |
+
self.install_finder(template)
|
| 656 |
+
with pytest.raises(ImportError, match="foobar"):
|
| 657 |
+
import_module("foobar")
|
| 658 |
+
|
| 659 |
+
def test_case_sensitivity(self, tmp_path):
|
| 660 |
+
files = {
|
| 661 |
+
"foo": {
|
| 662 |
+
"__init__.py": "",
|
| 663 |
+
"lowercase.py": "x = 1",
|
| 664 |
+
"bar": {
|
| 665 |
+
"__init__.py": "",
|
| 666 |
+
"lowercase.py": "x = 2",
|
| 667 |
+
},
|
| 668 |
+
},
|
| 669 |
+
}
|
| 670 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 671 |
+
mapping = {
|
| 672 |
+
"foo": str(tmp_path / "foo"),
|
| 673 |
+
}
|
| 674 |
+
template = _finder_template(str(uuid4()), mapping, {})
|
| 675 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 676 |
+
sys.modules.pop("foo", None)
|
| 677 |
+
|
| 678 |
+
self.install_finder(template)
|
| 679 |
+
with pytest.raises(ImportError, match="'FOO'"):
|
| 680 |
+
import_module("FOO")
|
| 681 |
+
|
| 682 |
+
with pytest.raises(ImportError, match="'foo\\.LOWERCASE'"):
|
| 683 |
+
import_module("foo.LOWERCASE")
|
| 684 |
+
|
| 685 |
+
with pytest.raises(ImportError, match="'foo\\.bar\\.Lowercase'"):
|
| 686 |
+
import_module("foo.bar.Lowercase")
|
| 687 |
+
|
| 688 |
+
with pytest.raises(ImportError, match="'foo\\.BAR'"):
|
| 689 |
+
import_module("foo.BAR.lowercase")
|
| 690 |
+
|
| 691 |
+
with pytest.raises(ImportError, match="'FOO'"):
|
| 692 |
+
import_module("FOO.bar.lowercase")
|
| 693 |
+
|
| 694 |
+
mod = import_module("foo.lowercase")
|
| 695 |
+
assert mod.x == 1
|
| 696 |
+
|
| 697 |
+
mod = import_module("foo.bar.lowercase")
|
| 698 |
+
assert mod.x == 2
|
| 699 |
+
|
| 700 |
+
def test_namespace_case_sensitivity(self, tmp_path):
|
| 701 |
+
files = {
|
| 702 |
+
"pkg": {
|
| 703 |
+
"__init__.py": "a = 13",
|
| 704 |
+
"foo": {
|
| 705 |
+
"__init__.py": "b = 37",
|
| 706 |
+
"bar.py": "c = 42",
|
| 707 |
+
},
|
| 708 |
+
},
|
| 709 |
+
}
|
| 710 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 711 |
+
|
| 712 |
+
mapping = {"ns.othername": str(tmp_path / "pkg")}
|
| 713 |
+
namespaces = {"ns": []}
|
| 714 |
+
|
| 715 |
+
template = _finder_template(str(uuid4()), mapping, namespaces)
|
| 716 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 717 |
+
for mod in ("ns", "ns.othername"):
|
| 718 |
+
sys.modules.pop(mod, None)
|
| 719 |
+
|
| 720 |
+
self.install_finder(template)
|
| 721 |
+
pkg = import_module("ns.othername")
|
| 722 |
+
expected = str((tmp_path / "pkg").resolve())
|
| 723 |
+
assert_path(pkg, expected)
|
| 724 |
+
assert pkg.a == 13
|
| 725 |
+
|
| 726 |
+
foo = import_module("ns.othername.foo")
|
| 727 |
+
assert foo.b == 37
|
| 728 |
+
|
| 729 |
+
bar = import_module("ns.othername.foo.bar")
|
| 730 |
+
assert bar.c == 42
|
| 731 |
+
|
| 732 |
+
with pytest.raises(ImportError, match="'NS'"):
|
| 733 |
+
import_module("NS.othername.foo")
|
| 734 |
+
|
| 735 |
+
with pytest.raises(ImportError, match="'ns\\.othername\\.FOO\\'"):
|
| 736 |
+
import_module("ns.othername.FOO")
|
| 737 |
+
|
| 738 |
+
with pytest.raises(ImportError, match="'ns\\.othername\\.foo\\.BAR\\'"):
|
| 739 |
+
import_module("ns.othername.foo.BAR")
|
| 740 |
+
|
| 741 |
+
def test_intermediate_packages(self, tmp_path):
|
| 742 |
+
"""
|
| 743 |
+
The finder should not import ``fullname`` if the intermediate segments
|
| 744 |
+
don't exist (see pypa/setuptools#4019).
|
| 745 |
+
"""
|
| 746 |
+
files = {
|
| 747 |
+
"src": {
|
| 748 |
+
"mypkg": {
|
| 749 |
+
"__init__.py": "",
|
| 750 |
+
"config.py": "a = 13",
|
| 751 |
+
"helloworld.py": "b = 13",
|
| 752 |
+
"components": {
|
| 753 |
+
"config.py": "a = 37",
|
| 754 |
+
},
|
| 755 |
+
},
|
| 756 |
+
}
|
| 757 |
+
}
|
| 758 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 759 |
+
|
| 760 |
+
mapping = {"mypkg": str(tmp_path / "src/mypkg")}
|
| 761 |
+
template = _finder_template(str(uuid4()), mapping, {})
|
| 762 |
+
|
| 763 |
+
with contexts.save_paths(), contexts.save_sys_modules():
|
| 764 |
+
for mod in (
|
| 765 |
+
"mypkg",
|
| 766 |
+
"mypkg.config",
|
| 767 |
+
"mypkg.helloworld",
|
| 768 |
+
"mypkg.components",
|
| 769 |
+
"mypkg.components.config",
|
| 770 |
+
"mypkg.components.helloworld",
|
| 771 |
+
):
|
| 772 |
+
sys.modules.pop(mod, None)
|
| 773 |
+
|
| 774 |
+
self.install_finder(template)
|
| 775 |
+
|
| 776 |
+
config = import_module("mypkg.components.config")
|
| 777 |
+
assert config.a == 37
|
| 778 |
+
|
| 779 |
+
helloworld = import_module("mypkg.helloworld")
|
| 780 |
+
assert helloworld.b == 13
|
| 781 |
+
|
| 782 |
+
with pytest.raises(ImportError):
|
| 783 |
+
import_module("mypkg.components.helloworld")
|
| 784 |
+
|
| 785 |
+
|
| 786 |
+
def test_pkg_roots(tmp_path):
|
| 787 |
+
"""This test focus in getting a particular implementation detail right.
|
| 788 |
+
If at some point in time the implementation is changed for something different,
|
| 789 |
+
this test can be modified or even excluded.
|
| 790 |
+
"""
|
| 791 |
+
files = {
|
| 792 |
+
"a": {"b": {"__init__.py": "ab = 1"}, "__init__.py": "a = 1"},
|
| 793 |
+
"d": {"__init__.py": "d = 1", "e": {"__init__.py": "de = 1"}},
|
| 794 |
+
"f": {"g": {"h": {"__init__.py": "fgh = 1"}}},
|
| 795 |
+
"other": {"__init__.py": "abc = 1"},
|
| 796 |
+
"another": {"__init__.py": "abcxyz = 1"},
|
| 797 |
+
"yet_another": {"__init__.py": "mnopq = 1"},
|
| 798 |
+
}
|
| 799 |
+
jaraco.path.build(files, prefix=tmp_path)
|
| 800 |
+
package_dir = {
|
| 801 |
+
"a.b.c": "other",
|
| 802 |
+
"a.b.c.x.y.z": "another",
|
| 803 |
+
"m.n.o.p.q": "yet_another",
|
| 804 |
+
}
|
| 805 |
+
packages = [
|
| 806 |
+
"a",
|
| 807 |
+
"a.b",
|
| 808 |
+
"a.b.c",
|
| 809 |
+
"a.b.c.x.y",
|
| 810 |
+
"a.b.c.x.y.z",
|
| 811 |
+
"d",
|
| 812 |
+
"d.e",
|
| 813 |
+
"f",
|
| 814 |
+
"f.g",
|
| 815 |
+
"f.g.h",
|
| 816 |
+
"m.n.o.p.q",
|
| 817 |
+
]
|
| 818 |
+
roots = _find_package_roots(packages, package_dir, tmp_path)
|
| 819 |
+
assert roots == {
|
| 820 |
+
"a": str(tmp_path / "a"),
|
| 821 |
+
"a.b.c": str(tmp_path / "other"),
|
| 822 |
+
"a.b.c.x.y.z": str(tmp_path / "another"),
|
| 823 |
+
"d": str(tmp_path / "d"),
|
| 824 |
+
"f": str(tmp_path / "f"),
|
| 825 |
+
"m.n.o.p.q": str(tmp_path / "yet_another"),
|
| 826 |
+
}
|
| 827 |
+
|
| 828 |
+
ns = set(dict(_find_namespaces(packages, roots)))
|
| 829 |
+
assert ns == {"f", "f.g"}
|
| 830 |
+
|
| 831 |
+
ns = set(_find_virtual_namespaces(roots))
|
| 832 |
+
assert ns == {"a.b", "a.b.c.x", "a.b.c.x.y", "m", "m.n", "m.n.o", "m.n.o.p"}
|
| 833 |
+
|
| 834 |
+
|
| 835 |
+
class TestOverallBehaviour:
|
| 836 |
+
PYPROJECT = """\
|
| 837 |
+
[build-system]
|
| 838 |
+
requires = ["setuptools"]
|
| 839 |
+
build-backend = "setuptools.build_meta"
|
| 840 |
+
|
| 841 |
+
[project]
|
| 842 |
+
name = "mypkg"
|
| 843 |
+
version = "3.14159"
|
| 844 |
+
"""
|
| 845 |
+
|
| 846 |
+
# Any: Would need a TypedDict. Keep it simple for tests
|
| 847 |
+
FLAT_LAYOUT: dict[str, Any] = {
|
| 848 |
+
"pyproject.toml": dedent(PYPROJECT),
|
| 849 |
+
"MANIFEST.in": EXAMPLE["MANIFEST.in"],
|
| 850 |
+
"otherfile.py": "",
|
| 851 |
+
"mypkg": {
|
| 852 |
+
"__init__.py": "",
|
| 853 |
+
"mod1.py": "var = 42",
|
| 854 |
+
"subpackage": {
|
| 855 |
+
"__init__.py": "",
|
| 856 |
+
"mod2.py": "var = 13",
|
| 857 |
+
"resource_file.txt": "resource 39",
|
| 858 |
+
},
|
| 859 |
+
},
|
| 860 |
+
}
|
| 861 |
+
|
| 862 |
+
EXAMPLES = {
|
| 863 |
+
"flat-layout": FLAT_LAYOUT,
|
| 864 |
+
"src-layout": {
|
| 865 |
+
"pyproject.toml": dedent(PYPROJECT),
|
| 866 |
+
"MANIFEST.in": EXAMPLE["MANIFEST.in"],
|
| 867 |
+
"otherfile.py": "",
|
| 868 |
+
"src": {"mypkg": FLAT_LAYOUT["mypkg"]},
|
| 869 |
+
},
|
| 870 |
+
"custom-layout": {
|
| 871 |
+
"pyproject.toml": dedent(PYPROJECT)
|
| 872 |
+
+ dedent(
|
| 873 |
+
"""\
|
| 874 |
+
[tool.setuptools]
|
| 875 |
+
packages = ["mypkg", "mypkg.subpackage"]
|
| 876 |
+
|
| 877 |
+
[tool.setuptools.package-dir]
|
| 878 |
+
"mypkg.subpackage" = "other"
|
| 879 |
+
"""
|
| 880 |
+
),
|
| 881 |
+
"MANIFEST.in": EXAMPLE["MANIFEST.in"],
|
| 882 |
+
"otherfile.py": "",
|
| 883 |
+
"mypkg": {
|
| 884 |
+
"__init__.py": "",
|
| 885 |
+
"mod1.py": FLAT_LAYOUT["mypkg"]["mod1.py"],
|
| 886 |
+
},
|
| 887 |
+
"other": FLAT_LAYOUT["mypkg"]["subpackage"],
|
| 888 |
+
},
|
| 889 |
+
"namespace": {
|
| 890 |
+
"pyproject.toml": dedent(PYPROJECT),
|
| 891 |
+
"MANIFEST.in": EXAMPLE["MANIFEST.in"],
|
| 892 |
+
"otherfile.py": "",
|
| 893 |
+
"src": {
|
| 894 |
+
"mypkg": {
|
| 895 |
+
"mod1.py": FLAT_LAYOUT["mypkg"]["mod1.py"],
|
| 896 |
+
"subpackage": FLAT_LAYOUT["mypkg"]["subpackage"],
|
| 897 |
+
},
|
| 898 |
+
},
|
| 899 |
+
},
|
| 900 |
+
}
|
| 901 |
+
|
| 902 |
+
@pytest.mark.xfail(sys.platform == "darwin", reason="pypa/setuptools#4328")
|
| 903 |
+
@pytest.mark.parametrize("layout", EXAMPLES.keys())
|
| 904 |
+
def test_editable_install(self, tmp_path, venv, layout, editable_opts):
|
| 905 |
+
project, _ = install_project(
|
| 906 |
+
"mypkg", venv, tmp_path, self.EXAMPLES[layout], *editable_opts
|
| 907 |
+
)
|
| 908 |
+
|
| 909 |
+
# Ensure stray files are not importable
|
| 910 |
+
cmd_import_error = """\
|
| 911 |
+
try:
|
| 912 |
+
import otherfile
|
| 913 |
+
except ImportError as ex:
|
| 914 |
+
print(ex)
|
| 915 |
+
"""
|
| 916 |
+
out = venv.run(["python", "-c", dedent(cmd_import_error)])
|
| 917 |
+
assert "No module named 'otherfile'" in out
|
| 918 |
+
|
| 919 |
+
# Ensure the modules are importable
|
| 920 |
+
cmd_get_vars = """\
|
| 921 |
+
import mypkg, mypkg.mod1, mypkg.subpackage.mod2
|
| 922 |
+
print(mypkg.mod1.var, mypkg.subpackage.mod2.var)
|
| 923 |
+
"""
|
| 924 |
+
out = venv.run(["python", "-c", dedent(cmd_get_vars)])
|
| 925 |
+
assert "42 13" in out
|
| 926 |
+
|
| 927 |
+
# Ensure resources are reachable
|
| 928 |
+
cmd_get_resource = """\
|
| 929 |
+
import mypkg.subpackage
|
| 930 |
+
from setuptools._importlib import resources as importlib_resources
|
| 931 |
+
text = importlib_resources.files(mypkg.subpackage) / "resource_file.txt"
|
| 932 |
+
print(text.read_text(encoding="utf-8"))
|
| 933 |
+
"""
|
| 934 |
+
out = venv.run(["python", "-c", dedent(cmd_get_resource)])
|
| 935 |
+
assert "resource 39" in out
|
| 936 |
+
|
| 937 |
+
# Ensure files are editable
|
| 938 |
+
mod1 = next(project.glob("**/mod1.py"))
|
| 939 |
+
mod2 = next(project.glob("**/mod2.py"))
|
| 940 |
+
resource_file = next(project.glob("**/resource_file.txt"))
|
| 941 |
+
|
| 942 |
+
mod1.write_text("var = 17", encoding="utf-8")
|
| 943 |
+
mod2.write_text("var = 781", encoding="utf-8")
|
| 944 |
+
resource_file.write_text("resource 374", encoding="utf-8")
|
| 945 |
+
|
| 946 |
+
out = venv.run(["python", "-c", dedent(cmd_get_vars)])
|
| 947 |
+
assert "42 13" not in out
|
| 948 |
+
assert "17 781" in out
|
| 949 |
+
|
| 950 |
+
out = venv.run(["python", "-c", dedent(cmd_get_resource)])
|
| 951 |
+
assert "resource 39" not in out
|
| 952 |
+
assert "resource 374" in out
|
| 953 |
+
|
| 954 |
+
|
| 955 |
+
class TestLinkTree:
|
| 956 |
+
FILES = deepcopy(TestOverallBehaviour.EXAMPLES["src-layout"])
|
| 957 |
+
FILES["pyproject.toml"] += dedent(
|
| 958 |
+
"""\
|
| 959 |
+
[tool.setuptools]
|
| 960 |
+
# Temporary workaround: both `include-package-data` and `package-data` configs
|
| 961 |
+
# can be removed after #3260 is fixed.
|
| 962 |
+
include-package-data = false
|
| 963 |
+
package-data = {"*" = ["*.txt"]}
|
| 964 |
+
|
| 965 |
+
[tool.setuptools.packages.find]
|
| 966 |
+
where = ["src"]
|
| 967 |
+
exclude = ["*.subpackage*"]
|
| 968 |
+
"""
|
| 969 |
+
)
|
| 970 |
+
FILES["src"]["mypkg"]["resource.not_in_manifest"] = "abc"
|
| 971 |
+
|
| 972 |
+
def test_generated_tree(self, tmp_path):
|
| 973 |
+
jaraco.path.build(self.FILES, prefix=tmp_path)
|
| 974 |
+
|
| 975 |
+
with _Path(tmp_path):
|
| 976 |
+
name = "mypkg-3.14159"
|
| 977 |
+
dist = Distribution({"script_name": "%PEP 517%"})
|
| 978 |
+
dist.parse_config_files()
|
| 979 |
+
|
| 980 |
+
wheel = Mock()
|
| 981 |
+
aux = tmp_path / ".aux"
|
| 982 |
+
build = tmp_path / ".build"
|
| 983 |
+
aux.mkdir()
|
| 984 |
+
build.mkdir()
|
| 985 |
+
|
| 986 |
+
build_py = dist.get_command_obj("build_py")
|
| 987 |
+
build_py.editable_mode = True
|
| 988 |
+
build_py.build_lib = str(build)
|
| 989 |
+
build_py.ensure_finalized()
|
| 990 |
+
outputs = build_py.get_outputs()
|
| 991 |
+
output_mapping = build_py.get_output_mapping()
|
| 992 |
+
|
| 993 |
+
make_tree = _LinkTree(dist, name, aux, build)
|
| 994 |
+
make_tree(wheel, outputs, output_mapping)
|
| 995 |
+
|
| 996 |
+
mod1 = next(aux.glob("**/mod1.py"))
|
| 997 |
+
expected = tmp_path / "src/mypkg/mod1.py"
|
| 998 |
+
assert_link_to(mod1, expected)
|
| 999 |
+
|
| 1000 |
+
assert next(aux.glob("**/subpackage"), None) is None
|
| 1001 |
+
assert next(aux.glob("**/mod2.py"), None) is None
|
| 1002 |
+
assert next(aux.glob("**/resource_file.txt"), None) is None
|
| 1003 |
+
|
| 1004 |
+
assert next(aux.glob("**/resource.not_in_manifest"), None) is None
|
| 1005 |
+
|
| 1006 |
+
def test_strict_install(self, tmp_path, venv):
|
| 1007 |
+
opts = ["--config-settings", "editable-mode=strict"]
|
| 1008 |
+
install_project("mypkg", venv, tmp_path, self.FILES, *opts)
|
| 1009 |
+
|
| 1010 |
+
out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"])
|
| 1011 |
+
assert "42" in out
|
| 1012 |
+
|
| 1013 |
+
# Ensure packages excluded from distribution are not importable
|
| 1014 |
+
cmd_import_error = """\
|
| 1015 |
+
try:
|
| 1016 |
+
from mypkg import subpackage
|
| 1017 |
+
except ImportError as ex:
|
| 1018 |
+
print(ex)
|
| 1019 |
+
"""
|
| 1020 |
+
out = venv.run(["python", "-c", dedent(cmd_import_error)])
|
| 1021 |
+
assert "cannot import name 'subpackage'" in out
|
| 1022 |
+
|
| 1023 |
+
# Ensure resource files excluded from distribution are not reachable
|
| 1024 |
+
cmd_get_resource = """\
|
| 1025 |
+
import mypkg
|
| 1026 |
+
from setuptools._importlib import resources as importlib_resources
|
| 1027 |
+
try:
|
| 1028 |
+
text = importlib_resources.files(mypkg) / "resource.not_in_manifest"
|
| 1029 |
+
print(text.read_text(encoding="utf-8"))
|
| 1030 |
+
except FileNotFoundError as ex:
|
| 1031 |
+
print(ex)
|
| 1032 |
+
"""
|
| 1033 |
+
out = venv.run(["python", "-c", dedent(cmd_get_resource)])
|
| 1034 |
+
assert "No such file or directory" in out
|
| 1035 |
+
assert "resource.not_in_manifest" in out
|
| 1036 |
+
|
| 1037 |
+
|
| 1038 |
+
@pytest.mark.filterwarnings("ignore:.*compat.*:setuptools.SetuptoolsDeprecationWarning")
|
| 1039 |
+
def test_compat_install(tmp_path, venv):
|
| 1040 |
+
# TODO: Remove `compat` after Dec/2022.
|
| 1041 |
+
opts = ["--config-settings", "editable-mode=compat"]
|
| 1042 |
+
files = TestOverallBehaviour.EXAMPLES["custom-layout"]
|
| 1043 |
+
install_project("mypkg", venv, tmp_path, files, *opts)
|
| 1044 |
+
|
| 1045 |
+
out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"])
|
| 1046 |
+
assert "42" in out
|
| 1047 |
+
|
| 1048 |
+
expected_path = comparable_path(str(tmp_path))
|
| 1049 |
+
|
| 1050 |
+
# Compatible behaviour will make spurious modules and excluded
|
| 1051 |
+
# files importable directly from the original path
|
| 1052 |
+
for cmd in (
|
| 1053 |
+
"import otherfile; print(otherfile)",
|
| 1054 |
+
"import other; print(other)",
|
| 1055 |
+
"import mypkg; print(mypkg)",
|
| 1056 |
+
):
|
| 1057 |
+
out = comparable_path(venv.run(["python", "-c", cmd]))
|
| 1058 |
+
assert expected_path in out
|
| 1059 |
+
|
| 1060 |
+
# Compatible behaviour will not consider custom mappings
|
| 1061 |
+
cmd = """\
|
| 1062 |
+
try:
|
| 1063 |
+
from mypkg import subpackage;
|
| 1064 |
+
except ImportError as ex:
|
| 1065 |
+
print(ex)
|
| 1066 |
+
"""
|
| 1067 |
+
out = venv.run(["python", "-c", dedent(cmd)])
|
| 1068 |
+
assert "cannot import name 'subpackage'" in out
|
| 1069 |
+
|
| 1070 |
+
|
| 1071 |
+
def test_pbr_integration(tmp_path, venv, editable_opts):
|
| 1072 |
+
"""Ensure editable installs work with pbr, issue #3500"""
|
| 1073 |
+
files = {
|
| 1074 |
+
"pyproject.toml": dedent(
|
| 1075 |
+
"""\
|
| 1076 |
+
[build-system]
|
| 1077 |
+
requires = ["setuptools"]
|
| 1078 |
+
build-backend = "setuptools.build_meta"
|
| 1079 |
+
"""
|
| 1080 |
+
),
|
| 1081 |
+
"setup.py": dedent(
|
| 1082 |
+
"""\
|
| 1083 |
+
__import__('setuptools').setup(
|
| 1084 |
+
pbr=True,
|
| 1085 |
+
setup_requires=["pbr"],
|
| 1086 |
+
)
|
| 1087 |
+
"""
|
| 1088 |
+
),
|
| 1089 |
+
"setup.cfg": dedent(
|
| 1090 |
+
"""\
|
| 1091 |
+
[metadata]
|
| 1092 |
+
name = mypkg
|
| 1093 |
+
|
| 1094 |
+
[files]
|
| 1095 |
+
packages =
|
| 1096 |
+
mypkg
|
| 1097 |
+
"""
|
| 1098 |
+
),
|
| 1099 |
+
"mypkg": {
|
| 1100 |
+
"__init__.py": "",
|
| 1101 |
+
"hello.py": "print('Hello world!')",
|
| 1102 |
+
},
|
| 1103 |
+
"other": {"test.txt": "Another file in here."},
|
| 1104 |
+
}
|
| 1105 |
+
venv.run(["python", "-m", "pip", "install", "pbr"])
|
| 1106 |
+
|
| 1107 |
+
with contexts.environment(PBR_VERSION="0.42"):
|
| 1108 |
+
install_project("mypkg", venv, tmp_path, files, *editable_opts)
|
| 1109 |
+
|
| 1110 |
+
out = venv.run(["python", "-c", "import mypkg.hello"])
|
| 1111 |
+
assert "Hello world!" in out
|
| 1112 |
+
|
| 1113 |
+
|
| 1114 |
+
class TestCustomBuildPy:
|
| 1115 |
+
"""
|
| 1116 |
+
Issue #3501 indicates that some plugins/customizations might rely on:
|
| 1117 |
+
|
| 1118 |
+
1. ``build_py`` not running
|
| 1119 |
+
2. ``build_py`` always copying files to ``build_lib``
|
| 1120 |
+
|
| 1121 |
+
During the transition period setuptools should prevent potential errors from
|
| 1122 |
+
happening due to those assumptions.
|
| 1123 |
+
"""
|
| 1124 |
+
|
| 1125 |
+
# TODO: Remove tests after _run_build_steps is removed.
|
| 1126 |
+
|
| 1127 |
+
FILES = {
|
| 1128 |
+
**TestOverallBehaviour.EXAMPLES["flat-layout"],
|
| 1129 |
+
"setup.py": dedent(
|
| 1130 |
+
"""\
|
| 1131 |
+
import pathlib
|
| 1132 |
+
from setuptools import setup
|
| 1133 |
+
from setuptools.command.build_py import build_py as orig
|
| 1134 |
+
|
| 1135 |
+
class my_build_py(orig):
|
| 1136 |
+
def run(self):
|
| 1137 |
+
super().run()
|
| 1138 |
+
raise ValueError("TEST_RAISE")
|
| 1139 |
+
|
| 1140 |
+
setup(cmdclass={"build_py": my_build_py})
|
| 1141 |
+
"""
|
| 1142 |
+
),
|
| 1143 |
+
}
|
| 1144 |
+
|
| 1145 |
+
def test_safeguarded_from_errors(self, tmp_path, venv):
|
| 1146 |
+
"""Ensure that errors in custom build_py are reported as warnings"""
|
| 1147 |
+
# Warnings should show up
|
| 1148 |
+
_, out = install_project("mypkg", venv, tmp_path, self.FILES)
|
| 1149 |
+
assert "SetuptoolsDeprecationWarning" in out
|
| 1150 |
+
assert "ValueError: TEST_RAISE" in out
|
| 1151 |
+
# but installation should be successful
|
| 1152 |
+
out = venv.run(["python", "-c", "import mypkg.mod1; print(mypkg.mod1.var)"])
|
| 1153 |
+
assert "42" in out
|
| 1154 |
+
|
| 1155 |
+
|
| 1156 |
+
class TestCustomBuildWheel:
|
| 1157 |
+
def install_custom_build_wheel(self, dist):
|
| 1158 |
+
bdist_wheel_cls = dist.get_command_class("bdist_wheel")
|
| 1159 |
+
|
| 1160 |
+
class MyBdistWheel(bdist_wheel_cls):
|
| 1161 |
+
def get_tag(self):
|
| 1162 |
+
# In issue #3513, we can see that some extensions may try to access
|
| 1163 |
+
# the `plat_name` property in bdist_wheel
|
| 1164 |
+
if self.plat_name.startswith("macosx-"):
|
| 1165 |
+
_ = "macOS platform"
|
| 1166 |
+
return super().get_tag()
|
| 1167 |
+
|
| 1168 |
+
dist.cmdclass["bdist_wheel"] = MyBdistWheel
|
| 1169 |
+
|
| 1170 |
+
def test_access_plat_name(self, tmpdir_cwd):
|
| 1171 |
+
# Even when a custom bdist_wheel tries to access plat_name the build should
|
| 1172 |
+
# be successful
|
| 1173 |
+
jaraco.path.build({"module.py": "x = 42"})
|
| 1174 |
+
dist = Distribution()
|
| 1175 |
+
dist.script_name = "setup.py"
|
| 1176 |
+
dist.set_defaults()
|
| 1177 |
+
self.install_custom_build_wheel(dist)
|
| 1178 |
+
cmd = editable_wheel(dist)
|
| 1179 |
+
cmd.ensure_finalized()
|
| 1180 |
+
cmd.run()
|
| 1181 |
+
wheel_file = str(next(Path().glob('dist/*.whl')))
|
| 1182 |
+
assert "editable" in wheel_file
|
| 1183 |
+
|
| 1184 |
+
|
| 1185 |
+
class TestCustomBuildExt:
|
| 1186 |
+
def install_custom_build_ext_distutils(self, dist):
|
| 1187 |
+
from distutils.command.build_ext import build_ext as build_ext_cls
|
| 1188 |
+
|
| 1189 |
+
class MyBuildExt(build_ext_cls):
|
| 1190 |
+
pass
|
| 1191 |
+
|
| 1192 |
+
dist.cmdclass["build_ext"] = MyBuildExt
|
| 1193 |
+
|
| 1194 |
+
@pytest.mark.skipif(
|
| 1195 |
+
sys.platform != "linux", reason="compilers may fail without correct setup"
|
| 1196 |
+
)
|
| 1197 |
+
def test_distutils_leave_inplace_files(self, tmpdir_cwd):
|
| 1198 |
+
jaraco.path.build({"module.c": ""})
|
| 1199 |
+
attrs = {
|
| 1200 |
+
"ext_modules": [Extension("module", ["module.c"])],
|
| 1201 |
+
}
|
| 1202 |
+
dist = Distribution(attrs)
|
| 1203 |
+
dist.script_name = "setup.py"
|
| 1204 |
+
dist.set_defaults()
|
| 1205 |
+
self.install_custom_build_ext_distutils(dist)
|
| 1206 |
+
cmd = editable_wheel(dist)
|
| 1207 |
+
cmd.ensure_finalized()
|
| 1208 |
+
cmd.run()
|
| 1209 |
+
wheel_file = str(next(Path().glob('dist/*.whl')))
|
| 1210 |
+
assert "editable" in wheel_file
|
| 1211 |
+
files = [p for p in Path().glob("module.*") if p.suffix != ".c"]
|
| 1212 |
+
assert len(files) == 1
|
| 1213 |
+
name = files[0].name
|
| 1214 |
+
assert any(name.endswith(ext) for ext in EXTENSION_SUFFIXES)
|
| 1215 |
+
|
| 1216 |
+
|
| 1217 |
+
def test_debugging_tips(tmpdir_cwd, monkeypatch):
|
| 1218 |
+
"""Make sure to display useful debugging tips to the user."""
|
| 1219 |
+
jaraco.path.build({"module.py": "x = 42"})
|
| 1220 |
+
dist = Distribution()
|
| 1221 |
+
dist.script_name = "setup.py"
|
| 1222 |
+
dist.set_defaults()
|
| 1223 |
+
cmd = editable_wheel(dist)
|
| 1224 |
+
cmd.ensure_finalized()
|
| 1225 |
+
|
| 1226 |
+
SimulatedErr = type("SimulatedErr", (Exception,), {})
|
| 1227 |
+
simulated_failure = Mock(side_effect=SimulatedErr())
|
| 1228 |
+
monkeypatch.setattr(cmd, "get_finalized_command", simulated_failure)
|
| 1229 |
+
|
| 1230 |
+
expected_msg = "following steps are recommended to help debug"
|
| 1231 |
+
with pytest.raises(SimulatedErr), pytest.warns(_DebuggingTips, match=expected_msg):
|
| 1232 |
+
cmd.run()
|
| 1233 |
+
|
| 1234 |
+
|
| 1235 |
+
@pytest.mark.filterwarnings("error")
|
| 1236 |
+
def test_encode_pth():
|
| 1237 |
+
"""Ensure _encode_pth function does not produce encoding warnings"""
|
| 1238 |
+
content = _encode_pth("tkmilan_ç_utf8") # no warnings (would be turned into errors)
|
| 1239 |
+
assert isinstance(content, bytes)
|
| 1240 |
+
|
| 1241 |
+
|
| 1242 |
+
def install_project(name, venv, tmp_path, files, *opts):
|
| 1243 |
+
project = tmp_path / name
|
| 1244 |
+
project.mkdir()
|
| 1245 |
+
jaraco.path.build(files, prefix=project)
|
| 1246 |
+
opts = [*opts, "--no-build-isolation"] # force current version of setuptools
|
| 1247 |
+
out = venv.run(
|
| 1248 |
+
["python", "-m", "pip", "-v", "install", "-e", str(project), *opts],
|
| 1249 |
+
stderr=subprocess.STDOUT,
|
| 1250 |
+
)
|
| 1251 |
+
return project, out
|
| 1252 |
+
|
| 1253 |
+
|
| 1254 |
+
def _addsitedirs(new_dirs):
|
| 1255 |
+
"""To use this function, it is necessary to insert new_dir in front of sys.path.
|
| 1256 |
+
The Python process will try to import a ``sitecustomize`` module on startup.
|
| 1257 |
+
If we manipulate sys.path/PYTHONPATH, we can force it to run our code,
|
| 1258 |
+
which invokes ``addsitedir`` and ensure ``.pth`` files are loaded.
|
| 1259 |
+
"""
|
| 1260 |
+
content = '\n'.join(
|
| 1261 |
+
("import site",)
|
| 1262 |
+
+ tuple(f"site.addsitedir({os.fspath(new_dir)!r})" for new_dir in new_dirs)
|
| 1263 |
+
)
|
| 1264 |
+
(new_dirs[0] / "sitecustomize.py").write_text(content, encoding="utf-8")
|
| 1265 |
+
|
| 1266 |
+
|
| 1267 |
+
# ---- Assertion Helpers ----
|
| 1268 |
+
|
| 1269 |
+
|
| 1270 |
+
def assert_path(pkg, expected):
|
| 1271 |
+
# __path__ is not guaranteed to exist, so we have to account for that
|
| 1272 |
+
if pkg.__path__:
|
| 1273 |
+
path = next(iter(pkg.__path__), None)
|
| 1274 |
+
if path:
|
| 1275 |
+
assert str(Path(path).resolve()) == expected
|
| 1276 |
+
|
| 1277 |
+
|
| 1278 |
+
def assert_link_to(file: Path, other: Path) -> None:
|
| 1279 |
+
if file.is_symlink():
|
| 1280 |
+
assert str(file.resolve()) == str(other.resolve())
|
| 1281 |
+
else:
|
| 1282 |
+
file_stat = file.stat()
|
| 1283 |
+
other_stat = other.stat()
|
| 1284 |
+
assert file_stat[stat.ST_INO] == other_stat[stat.ST_INO]
|
| 1285 |
+
assert file_stat[stat.ST_DEV] == other_stat[stat.ST_DEV]
|
| 1286 |
+
|
| 1287 |
+
|
| 1288 |
+
def comparable_path(str_with_path: str) -> str:
|
| 1289 |
+
return str_with_path.lower().replace(os.sep, "/").replace("//", "/")
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_glob.py
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pytest
|
| 2 |
+
from jaraco import path
|
| 3 |
+
|
| 4 |
+
from setuptools.glob import glob
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
@pytest.mark.parametrize(
|
| 8 |
+
('tree', 'pattern', 'matches'),
|
| 9 |
+
(
|
| 10 |
+
('', b'', []),
|
| 11 |
+
('', '', []),
|
| 12 |
+
(
|
| 13 |
+
"""
|
| 14 |
+
appveyor.yml
|
| 15 |
+
CHANGES.rst
|
| 16 |
+
LICENSE
|
| 17 |
+
MANIFEST.in
|
| 18 |
+
pyproject.toml
|
| 19 |
+
README.rst
|
| 20 |
+
setup.cfg
|
| 21 |
+
setup.py
|
| 22 |
+
""",
|
| 23 |
+
'*.rst',
|
| 24 |
+
('CHANGES.rst', 'README.rst'),
|
| 25 |
+
),
|
| 26 |
+
(
|
| 27 |
+
"""
|
| 28 |
+
appveyor.yml
|
| 29 |
+
CHANGES.rst
|
| 30 |
+
LICENSE
|
| 31 |
+
MANIFEST.in
|
| 32 |
+
pyproject.toml
|
| 33 |
+
README.rst
|
| 34 |
+
setup.cfg
|
| 35 |
+
setup.py
|
| 36 |
+
""",
|
| 37 |
+
b'*.rst',
|
| 38 |
+
(b'CHANGES.rst', b'README.rst'),
|
| 39 |
+
),
|
| 40 |
+
),
|
| 41 |
+
)
|
| 42 |
+
def test_glob(monkeypatch, tmpdir, tree, pattern, matches):
|
| 43 |
+
monkeypatch.chdir(tmpdir)
|
| 44 |
+
path.build({name: '' for name in tree.split()})
|
| 45 |
+
assert list(sorted(glob(pattern))) == list(sorted(matches))
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/test_namespaces.py
ADDED
|
@@ -0,0 +1,138 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import subprocess
|
| 2 |
+
import sys
|
| 3 |
+
|
| 4 |
+
from setuptools._path import paths_on_pythonpath
|
| 5 |
+
|
| 6 |
+
from . import namespaces
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class TestNamespaces:
|
| 10 |
+
def test_mixed_site_and_non_site(self, tmpdir):
|
| 11 |
+
"""
|
| 12 |
+
Installing two packages sharing the same namespace, one installed
|
| 13 |
+
to a site dir and the other installed just to a path on PYTHONPATH
|
| 14 |
+
should leave the namespace in tact and both packages reachable by
|
| 15 |
+
import.
|
| 16 |
+
"""
|
| 17 |
+
pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
|
| 18 |
+
pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB')
|
| 19 |
+
site_packages = tmpdir / 'site-packages'
|
| 20 |
+
path_packages = tmpdir / 'path-packages'
|
| 21 |
+
targets = site_packages, path_packages
|
| 22 |
+
# use pip to install to the target directory
|
| 23 |
+
install_cmd = [
|
| 24 |
+
sys.executable,
|
| 25 |
+
'-m',
|
| 26 |
+
'pip.__main__',
|
| 27 |
+
'install',
|
| 28 |
+
str(pkg_A),
|
| 29 |
+
'-t',
|
| 30 |
+
str(site_packages),
|
| 31 |
+
]
|
| 32 |
+
subprocess.check_call(install_cmd)
|
| 33 |
+
namespaces.make_site_dir(site_packages)
|
| 34 |
+
install_cmd = [
|
| 35 |
+
sys.executable,
|
| 36 |
+
'-m',
|
| 37 |
+
'pip.__main__',
|
| 38 |
+
'install',
|
| 39 |
+
str(pkg_B),
|
| 40 |
+
'-t',
|
| 41 |
+
str(path_packages),
|
| 42 |
+
]
|
| 43 |
+
subprocess.check_call(install_cmd)
|
| 44 |
+
try_import = [
|
| 45 |
+
sys.executable,
|
| 46 |
+
'-c',
|
| 47 |
+
'import myns.pkgA; import myns.pkgB',
|
| 48 |
+
]
|
| 49 |
+
with paths_on_pythonpath(map(str, targets)):
|
| 50 |
+
subprocess.check_call(try_import)
|
| 51 |
+
|
| 52 |
+
def test_pkg_resources_import(self, tmpdir):
|
| 53 |
+
"""
|
| 54 |
+
Ensure that a namespace package doesn't break on import
|
| 55 |
+
of pkg_resources.
|
| 56 |
+
"""
|
| 57 |
+
pkg = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
|
| 58 |
+
target = tmpdir / 'packages'
|
| 59 |
+
target.mkdir()
|
| 60 |
+
install_cmd = [
|
| 61 |
+
sys.executable,
|
| 62 |
+
'-m',
|
| 63 |
+
'pip',
|
| 64 |
+
'install',
|
| 65 |
+
'-t',
|
| 66 |
+
str(target),
|
| 67 |
+
str(pkg),
|
| 68 |
+
]
|
| 69 |
+
with paths_on_pythonpath([str(target)]):
|
| 70 |
+
subprocess.check_call(install_cmd)
|
| 71 |
+
namespaces.make_site_dir(target)
|
| 72 |
+
try_import = [
|
| 73 |
+
sys.executable,
|
| 74 |
+
'-c',
|
| 75 |
+
'import pkg_resources',
|
| 76 |
+
]
|
| 77 |
+
with paths_on_pythonpath([str(target)]):
|
| 78 |
+
subprocess.check_call(try_import)
|
| 79 |
+
|
| 80 |
+
def test_namespace_package_installed_and_cwd(self, tmpdir):
|
| 81 |
+
"""
|
| 82 |
+
Installing a namespace packages but also having it in the current
|
| 83 |
+
working directory, only one version should take precedence.
|
| 84 |
+
"""
|
| 85 |
+
pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
|
| 86 |
+
target = tmpdir / 'packages'
|
| 87 |
+
# use pip to install to the target directory
|
| 88 |
+
install_cmd = [
|
| 89 |
+
sys.executable,
|
| 90 |
+
'-m',
|
| 91 |
+
'pip.__main__',
|
| 92 |
+
'install',
|
| 93 |
+
str(pkg_A),
|
| 94 |
+
'-t',
|
| 95 |
+
str(target),
|
| 96 |
+
]
|
| 97 |
+
subprocess.check_call(install_cmd)
|
| 98 |
+
namespaces.make_site_dir(target)
|
| 99 |
+
|
| 100 |
+
# ensure that package imports and pkg_resources imports
|
| 101 |
+
pkg_resources_imp = [
|
| 102 |
+
sys.executable,
|
| 103 |
+
'-c',
|
| 104 |
+
'import pkg_resources; import myns.pkgA',
|
| 105 |
+
]
|
| 106 |
+
with paths_on_pythonpath([str(target)]):
|
| 107 |
+
subprocess.check_call(pkg_resources_imp, cwd=str(pkg_A))
|
| 108 |
+
|
| 109 |
+
def test_packages_in_the_same_namespace_installed_and_cwd(self, tmpdir):
|
| 110 |
+
"""
|
| 111 |
+
Installing one namespace package and also have another in the same
|
| 112 |
+
namespace in the current working directory, both of them must be
|
| 113 |
+
importable.
|
| 114 |
+
"""
|
| 115 |
+
pkg_A = namespaces.build_namespace_package(tmpdir, 'myns.pkgA')
|
| 116 |
+
pkg_B = namespaces.build_namespace_package(tmpdir, 'myns.pkgB')
|
| 117 |
+
target = tmpdir / 'packages'
|
| 118 |
+
# use pip to install to the target directory
|
| 119 |
+
install_cmd = [
|
| 120 |
+
sys.executable,
|
| 121 |
+
'-m',
|
| 122 |
+
'pip.__main__',
|
| 123 |
+
'install',
|
| 124 |
+
str(pkg_A),
|
| 125 |
+
'-t',
|
| 126 |
+
str(target),
|
| 127 |
+
]
|
| 128 |
+
subprocess.check_call(install_cmd)
|
| 129 |
+
namespaces.make_site_dir(target)
|
| 130 |
+
|
| 131 |
+
# ensure that all packages import and pkg_resources imports
|
| 132 |
+
pkg_resources_imp = [
|
| 133 |
+
sys.executable,
|
| 134 |
+
'-c',
|
| 135 |
+
'import pkg_resources; import myns.pkgA; import myns.pkgB',
|
| 136 |
+
]
|
| 137 |
+
with paths_on_pythonpath([str(target)]):
|
| 138 |
+
subprocess.check_call(pkg_resources_imp, cwd=str(pkg_B))
|
mantis_evalkit/lib/python3.10/site-packages/setuptools/tests/textwrap.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import textwrap
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def DALS(s):
|
| 5 |
+
"dedent and left-strip"
|
| 6 |
+
return textwrap.dedent(s).lstrip()
|