content
stringlengths
1
103k
path
stringlengths
8
216
filename
stringlengths
2
179
language
stringclasses
15 values
size_bytes
int64
2
189k
quality_score
float64
0.5
0.95
complexity
float64
0
1
documentation_ratio
float64
0
1
repository
stringclasses
5 values
stars
int64
0
1k
created_date
stringdate
2023-07-10 19:21:08
2025-07-09 19:11:45
license
stringclasses
4 values
is_test
bool
2 classes
file_hash
stringlengths
32
32
import subprocess\nfrom pathlib import Path\n\nimport pytest\n\n\n# PyInstaller has been very unproactive about replacing 'imp' with 'importlib'.\n@pytest.mark.filterwarnings('ignore::DeprecationWarning')\n# It also leaks io.BytesIO()s.\n@pytest.mark.filterwarnings('ignore::ResourceWarning')\n@pytest.mark.parametrize("mode", ["--onedir", "--onefile"])\n@pytest.mark.slow\ndef test_pyinstaller(mode, tmp_path):\n """Compile and run pyinstaller-smoke.py using PyInstaller."""\n\n pyinstaller_cli = pytest.importorskip("PyInstaller.__main__").run\n\n source = Path(__file__).with_name("pyinstaller-smoke.py").resolve()\n args = [\n # Place all generated files in ``tmp_path``.\n '--workpath', str(tmp_path / "build"),\n '--distpath', str(tmp_path / "dist"),\n '--specpath', str(tmp_path),\n mode,\n str(source),\n ]\n pyinstaller_cli(args)\n\n if mode == "--onefile":\n exe = tmp_path / "dist" / source.stem\n else:\n exe = tmp_path / "dist" / source.stem / source.stem\n\n p = subprocess.run([str(exe)], check=True, stdout=subprocess.PIPE)\n assert p.stdout.strip() == b"I made it!"\n
.venv\Lib\site-packages\numpy\_pyinstaller\tests\test_pyinstaller.py
test_pyinstaller.py
Python
1,170
0.95
0.057143
0.107143
awesome-app
831
2024-11-16T19:45:57.418857
MIT
true
87575a08d8cb2b388a398519a3d1311b
import pytest\n\nfrom numpy.testing import IS_EDITABLE, IS_WASM\n\nif IS_WASM:\n pytest.skip(\n "WASM/Pyodide does not use or support Fortran",\n allow_module_level=True\n )\n\n\nif IS_EDITABLE:\n pytest.skip(\n "Editable install doesn't support tests with a compile step",\n allow_module_level=True\n )\n
.venv\Lib\site-packages\numpy\_pyinstaller\tests\__init__.py
__init__.py
Python
345
0.85
0.125
0
vue-tools
386
2023-09-22T18:29:31.147478
BSD-3-Clause
true
00111e98b442351da700056a5765bb06
\n\n
.venv\Lib\site-packages\numpy\_pyinstaller\tests\__pycache__\pyinstaller-smoke.cpython-313.pyc
pyinstaller-smoke.cpython-313.pyc
Other
2,595
0.95
0
0
react-lib
25
2025-04-04T16:39:22.615248
MIT
true
f82b21c7a88837a8441fb383f1d4ff18
\n\n
.venv\Lib\site-packages\numpy\_pyinstaller\tests\__pycache__\test_pyinstaller.cpython-313.pyc
test_pyinstaller.cpython-313.pyc
Other
1,963
0.85
0
0
node-utils
503
2024-02-07T12:57:51.000203
MIT
true
b2ab150c20cf4b4d446abb76ad732eed
\n\n
.venv\Lib\site-packages\numpy\_pyinstaller\tests\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
570
0.7
0
0
node-utils
157
2025-02-25T05:15:43.715540
GPL-3.0
true
a2a04b48818995f3595bb556e8d11208
\n\n
.venv\Lib\site-packages\numpy\_pyinstaller\__pycache__\hook-numpy.cpython-313.pyc
hook-numpy.cpython-313.pyc
Other
938
0.95
0.0625
0
vue-tools
822
2024-04-18T07:09:31.763268
Apache-2.0
false
689803ee7d6227d9cf6de0fe55082541
\n\n
.venv\Lib\site-packages\numpy\_pyinstaller\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
193
0.7
0
0
python-kit
733
2024-11-09T08:15:26.726167
Apache-2.0
false
cb4b0755ffbecad53f0a2c35f446b5c8
"""A module for creating docstrings for sphinx ``data`` domains."""\n\nimport re\nimport textwrap\n\nfrom ._array_like import NDArray\n\n_docstrings_list = []\n\n\ndef add_newdoc(name: str, value: str, doc: str) -> None:\n """Append ``_docstrings_list`` with a docstring for `name`.\n\n Parameters\n ----------\n name : str\n The name of the object.\n value : str\n A string-representation of the object.\n doc : str\n The docstring of the object.\n\n """\n _docstrings_list.append((name, value, doc))\n\n\ndef _parse_docstrings() -> str:\n """Convert all docstrings in ``_docstrings_list`` into a single\n sphinx-legible text block.\n\n """\n type_list_ret = []\n for name, value, doc in _docstrings_list:\n s = textwrap.dedent(doc).replace("\n", "\n ")\n\n # Replace sections by rubrics\n lines = s.split("\n")\n new_lines = []\n indent = ""\n for line in lines:\n m = re.match(r'^(\s+)[-=]+\s*$', line)\n if m and new_lines:\n prev = textwrap.dedent(new_lines.pop())\n if prev == "Examples":\n indent = ""\n new_lines.append(f'{m.group(1)}.. rubric:: {prev}')\n else:\n indent = 4 * " "\n new_lines.append(f'{m.group(1)}.. admonition:: {prev}')\n new_lines.append("")\n else:\n new_lines.append(f"{indent}{line}")\n\n s = "\n".join(new_lines)\n s_block = f""".. data:: {name}\n :value: {value}\n {s}"""\n type_list_ret.append(s_block)\n return "\n".join(type_list_ret)\n\n\nadd_newdoc('ArrayLike', 'typing.Union[...]',\n """\n A `~typing.Union` representing objects that can be coerced\n into an `~numpy.ndarray`.\n\n Among others this includes the likes of:\n\n * Scalars.\n * (Nested) sequences.\n * Objects implementing the `~class.__array__` protocol.\n\n .. versionadded:: 1.20\n\n See Also\n --------\n :term:`array_like`:\n Any scalar or sequence that can be interpreted as an ndarray.\n\n Examples\n --------\n .. code-block:: python\n\n >>> import numpy as np\n >>> import numpy.typing as npt\n\n >>> def as_array(a: npt.ArrayLike) -> np.ndarray:\n ... return np.array(a)\n\n """)\n\nadd_newdoc('DTypeLike', 'typing.Union[...]',\n """\n A `~typing.Union` representing objects that can be coerced\n into a `~numpy.dtype`.\n\n Among others this includes the likes of:\n\n * :class:`type` objects.\n * Character codes or the names of :class:`type` objects.\n * Objects with the ``.dtype`` attribute.\n\n .. versionadded:: 1.20\n\n See Also\n --------\n :ref:`Specifying and constructing data types <arrays.dtypes.constructing>`\n A comprehensive overview of all objects that can be coerced\n into data types.\n\n Examples\n --------\n .. code-block:: python\n\n >>> import numpy as np\n >>> import numpy.typing as npt\n\n >>> def as_dtype(d: npt.DTypeLike) -> np.dtype:\n ... return np.dtype(d)\n\n """)\n\nadd_newdoc('NDArray', repr(NDArray),\n """\n A `np.ndarray[tuple[Any, ...], np.dtype[ScalarT]] <numpy.ndarray>`\n type alias :term:`generic <generic type>` w.r.t. its\n `dtype.type <numpy.dtype.type>`.\n\n Can be used during runtime for typing arrays with a given dtype\n and unspecified shape.\n\n .. versionadded:: 1.21\n\n Examples\n --------\n .. code-block:: python\n\n >>> import numpy as np\n >>> import numpy.typing as npt\n\n >>> print(npt.NDArray)\n numpy.ndarray[tuple[typing.Any, ...], numpy.dtype[~_ScalarT]]\n\n >>> print(npt.NDArray[np.float64])\n numpy.ndarray[tuple[typing.Any, ...], numpy.dtype[numpy.float64]]\n\n >>> NDArrayInt = npt.NDArray[np.int_]\n >>> a: NDArrayInt = np.arange(10)\n\n >>> def func(a: npt.ArrayLike) -> npt.NDArray[Any]:\n ... return np.array(a)\n\n """)\n\n_docstrings = _parse_docstrings()\n
.venv\Lib\site-packages\numpy\_typing\_add_docstring.py
_add_docstring.py
Python
4,152
0.95
0.104575
0.063063
node-utils
472
2023-08-30T12:50:26.684598
BSD-3-Clause
false
61ce78624deb2a4dd87bd6cdbc464da4
import sys\nfrom collections.abc import Callable, Collection, Sequence\nfrom typing import TYPE_CHECKING, Any, Protocol, TypeAlias, TypeVar, runtime_checkable\n\nimport numpy as np\nfrom numpy import dtype\n\nfrom ._nbit_base import _32Bit, _64Bit\nfrom ._nested_sequence import _NestedSequence\nfrom ._shape import _AnyShape\n\nif TYPE_CHECKING:\n StringDType = np.dtypes.StringDType\nelse:\n # at runtime outside of type checking importing this from numpy.dtypes\n # would lead to a circular import\n from numpy._core.multiarray import StringDType\n\n_T = TypeVar("_T")\n_ScalarT = TypeVar("_ScalarT", bound=np.generic)\n_DTypeT = TypeVar("_DTypeT", bound=dtype[Any])\n_DTypeT_co = TypeVar("_DTypeT_co", covariant=True, bound=dtype[Any])\n\nNDArray: TypeAlias = np.ndarray[_AnyShape, dtype[_ScalarT]]\n\n# The `_SupportsArray` protocol only cares about the default dtype\n# (i.e. `dtype=None` or no `dtype` parameter at all) of the to-be returned\n# array.\n# Concrete implementations of the protocol are responsible for adding\n# any and all remaining overloads\n@runtime_checkable\nclass _SupportsArray(Protocol[_DTypeT_co]):\n def __array__(self) -> np.ndarray[Any, _DTypeT_co]: ...\n\n\n@runtime_checkable\nclass _SupportsArrayFunc(Protocol):\n """A protocol class representing `~class.__array_function__`."""\n def __array_function__(\n self,\n func: Callable[..., Any],\n types: Collection[type[Any]],\n args: tuple[Any, ...],\n kwargs: dict[str, Any],\n ) -> object: ...\n\n\n# TODO: Wait until mypy supports recursive objects in combination with typevars\n_FiniteNestedSequence: TypeAlias = (\n _T\n | Sequence[_T]\n | Sequence[Sequence[_T]]\n | Sequence[Sequence[Sequence[_T]]]\n | Sequence[Sequence[Sequence[Sequence[_T]]]]\n)\n\n# A subset of `npt.ArrayLike` that can be parametrized w.r.t. `np.generic`\n_ArrayLike: TypeAlias = (\n _SupportsArray[dtype[_ScalarT]]\n | _NestedSequence[_SupportsArray[dtype[_ScalarT]]]\n)\n\n# A union representing array-like objects; consists of two typevars:\n# One representing types that can be parametrized w.r.t. `np.dtype`\n# and another one for the rest\n_DualArrayLike: TypeAlias = (\n _SupportsArray[_DTypeT]\n | _NestedSequence[_SupportsArray[_DTypeT]]\n | _T\n | _NestedSequence[_T]\n)\n\nif sys.version_info >= (3, 12):\n from collections.abc import Buffer as _Buffer\nelse:\n @runtime_checkable\n class _Buffer(Protocol):\n def __buffer__(self, flags: int, /) -> memoryview: ...\n\nArrayLike: TypeAlias = _Buffer | _DualArrayLike[dtype[Any], complex | bytes | str]\n\n# `ArrayLike<X>_co`: array-like objects that can be coerced into `X`\n# given the casting rules `same_kind`\n_ArrayLikeBool_co: TypeAlias = _DualArrayLike[dtype[np.bool], bool]\n_ArrayLikeUInt_co: TypeAlias = _DualArrayLike[dtype[np.bool | np.unsignedinteger], bool]\n_ArrayLikeInt_co: TypeAlias = _DualArrayLike[dtype[np.bool | np.integer], int]\n_ArrayLikeFloat_co: TypeAlias = _DualArrayLike[dtype[np.bool | np.integer | np.floating], float]\n_ArrayLikeComplex_co: TypeAlias = _DualArrayLike[dtype[np.bool | np.number], complex]\n_ArrayLikeNumber_co: TypeAlias = _ArrayLikeComplex_co\n_ArrayLikeTD64_co: TypeAlias = _DualArrayLike[dtype[np.bool | np.integer | np.timedelta64], int]\n_ArrayLikeDT64_co: TypeAlias = _ArrayLike[np.datetime64]\n_ArrayLikeObject_co: TypeAlias = _ArrayLike[np.object_]\n\n_ArrayLikeVoid_co: TypeAlias = _ArrayLike[np.void]\n_ArrayLikeBytes_co: TypeAlias = _DualArrayLike[dtype[np.bytes_], bytes]\n_ArrayLikeStr_co: TypeAlias = _DualArrayLike[dtype[np.str_], str]\n_ArrayLikeString_co: TypeAlias = _DualArrayLike[StringDType, str]\n_ArrayLikeAnyString_co: TypeAlias = _DualArrayLike[dtype[np.character] | StringDType, bytes | str]\n\n__Float64_co: TypeAlias = np.floating[_64Bit] | np.float32 | np.float16 | np.integer | np.bool\n__Complex128_co: TypeAlias = np.number[_64Bit] | np.number[_32Bit] | np.float16 | np.integer | np.bool\n_ArrayLikeFloat64_co: TypeAlias = _DualArrayLike[dtype[__Float64_co], float]\n_ArrayLikeComplex128_co: TypeAlias = _DualArrayLike[dtype[__Complex128_co], complex]\n\n# NOTE: This includes `builtins.bool`, but not `numpy.bool`.\n_ArrayLikeInt: TypeAlias = _DualArrayLike[dtype[np.integer], int]\n
.venv\Lib\site-packages\numpy\_typing\_array_like.py
_array_like.py
Python
4,294
0.95
0.113208
0.170455
node-utils
292
2025-04-23T16:47:49.525873
GPL-3.0
false
42c8d880edfca392e98ca44c3ed6aab4
"""\nA module with various ``typing.Protocol`` subclasses that implement\nthe ``__call__`` magic method.\n\nSee the `Mypy documentation`_ on protocols for more details.\n\n.. _`Mypy documentation`: https://mypy.readthedocs.io/en/stable/protocols.html#callback-protocols\n\n"""\n\nfrom typing import (\n Any,\n NoReturn,\n Protocol,\n TypeAlias,\n TypeVar,\n final,\n overload,\n type_check_only,\n)\n\nimport numpy as np\nfrom numpy import (\n complex128,\n complexfloating,\n float64,\n floating,\n generic,\n int8,\n int_,\n integer,\n number,\n signedinteger,\n unsignedinteger,\n)\n\nfrom . import NBitBase\nfrom ._array_like import NDArray\nfrom ._nbit import _NBitInt\nfrom ._nested_sequence import _NestedSequence\nfrom ._scalars import (\n _BoolLike_co,\n _IntLike_co,\n _NumberLike_co,\n)\n\n_T1 = TypeVar("_T1")\n_T2 = TypeVar("_T2")\n_T1_contra = TypeVar("_T1_contra", contravariant=True)\n_T2_contra = TypeVar("_T2_contra", contravariant=True)\n\n_2Tuple: TypeAlias = tuple[_T1, _T1]\n\n_NBit1 = TypeVar("_NBit1", bound=NBitBase)\n_NBit2 = TypeVar("_NBit2", bound=NBitBase)\n\n_IntType = TypeVar("_IntType", bound=integer)\n_FloatType = TypeVar("_FloatType", bound=floating)\n_NumberType = TypeVar("_NumberType", bound=number)\n_NumberType_co = TypeVar("_NumberType_co", covariant=True, bound=number)\n_GenericType_co = TypeVar("_GenericType_co", covariant=True, bound=generic)\n\n@type_check_only\nclass _BoolOp(Protocol[_GenericType_co]):\n @overload\n def __call__(self, other: _BoolLike_co, /) -> _GenericType_co: ...\n @overload # platform dependent\n def __call__(self, other: int, /) -> int_: ...\n @overload\n def __call__(self, other: float, /) -> float64: ...\n @overload\n def __call__(self, other: complex, /) -> complex128: ...\n @overload\n def __call__(self, other: _NumberType, /) -> _NumberType: ...\n\n@type_check_only\nclass _BoolBitOp(Protocol[_GenericType_co]):\n @overload\n def __call__(self, other: _BoolLike_co, /) -> _GenericType_co: ...\n @overload # platform dependent\n def __call__(self, other: int, /) -> int_: ...\n @overload\n def __call__(self, other: _IntType, /) -> _IntType: ...\n\n@type_check_only\nclass _BoolSub(Protocol):\n # Note that `other: bool` is absent here\n @overload\n def __call__(self, other: bool, /) -> NoReturn: ...\n @overload # platform dependent\n def __call__(self, other: int, /) -> int_: ...\n @overload\n def __call__(self, other: float, /) -> float64: ...\n @overload\n def __call__(self, other: complex, /) -> complex128: ...\n @overload\n def __call__(self, other: _NumberType, /) -> _NumberType: ...\n\n@type_check_only\nclass _BoolTrueDiv(Protocol):\n @overload\n def __call__(self, other: float | _IntLike_co, /) -> float64: ...\n @overload\n def __call__(self, other: complex, /) -> complex128: ...\n @overload\n def __call__(self, other: _NumberType, /) -> _NumberType: ...\n\n@type_check_only\nclass _BoolMod(Protocol):\n @overload\n def __call__(self, other: _BoolLike_co, /) -> int8: ...\n @overload # platform dependent\n def __call__(self, other: int, /) -> int_: ...\n @overload\n def __call__(self, other: float, /) -> float64: ...\n @overload\n def __call__(self, other: _IntType, /) -> _IntType: ...\n @overload\n def __call__(self, other: _FloatType, /) -> _FloatType: ...\n\n@type_check_only\nclass _BoolDivMod(Protocol):\n @overload\n def __call__(self, other: _BoolLike_co, /) -> _2Tuple[int8]: ...\n @overload # platform dependent\n def __call__(self, other: int, /) -> _2Tuple[int_]: ...\n @overload\n def __call__(self, other: float, /) -> _2Tuple[np.float64]: ...\n @overload\n def __call__(self, other: _IntType, /) -> _2Tuple[_IntType]: ...\n @overload\n def __call__(self, other: _FloatType, /) -> _2Tuple[_FloatType]: ...\n\n@type_check_only\nclass _IntTrueDiv(Protocol[_NBit1]):\n @overload\n def __call__(self, other: bool, /) -> floating[_NBit1]: ...\n @overload\n def __call__(self, other: int, /) -> floating[_NBit1] | floating[_NBitInt]: ...\n @overload\n def __call__(self, other: float, /) -> floating[_NBit1] | float64: ...\n @overload\n def __call__(\n self, other: complex, /\n ) -> complexfloating[_NBit1, _NBit1] | complex128: ...\n @overload\n def __call__(\n self, other: integer[_NBit2], /\n ) -> floating[_NBit1] | floating[_NBit2]: ...\n\n@type_check_only\nclass _UnsignedIntOp(Protocol[_NBit1]):\n # NOTE: `uint64 + signedinteger -> float64`\n @overload\n def __call__(self, other: int, /) -> unsignedinteger[_NBit1]: ...\n @overload\n def __call__(self, other: float, /) -> float64: ...\n @overload\n def __call__(self, other: complex, /) -> complex128: ...\n @overload\n def __call__(self, other: unsignedinteger[_NBit2], /) -> unsignedinteger[_NBit1] | unsignedinteger[_NBit2]: ...\n @overload\n def __call__(self, other: signedinteger, /) -> Any: ...\n\n@type_check_only\nclass _UnsignedIntBitOp(Protocol[_NBit1]):\n @overload\n def __call__(self, other: bool, /) -> unsignedinteger[_NBit1]: ...\n @overload\n def __call__(self, other: int, /) -> signedinteger: ...\n @overload\n def __call__(self, other: signedinteger, /) -> signedinteger: ...\n @overload\n def __call__(\n self, other: unsignedinteger[_NBit2], /\n ) -> unsignedinteger[_NBit1] | unsignedinteger[_NBit2]: ...\n\n@type_check_only\nclass _UnsignedIntMod(Protocol[_NBit1]):\n @overload\n def __call__(self, other: bool, /) -> unsignedinteger[_NBit1]: ...\n @overload\n def __call__(self, other: int | signedinteger, /) -> Any: ...\n @overload\n def __call__(self, other: float, /) -> floating[_NBit1] | float64: ...\n @overload\n def __call__(\n self, other: unsignedinteger[_NBit2], /\n ) -> unsignedinteger[_NBit1] | unsignedinteger[_NBit2]: ...\n\n@type_check_only\nclass _UnsignedIntDivMod(Protocol[_NBit1]):\n @overload\n def __call__(self, other: bool, /) -> _2Tuple[signedinteger[_NBit1]]: ...\n @overload\n def __call__(self, other: int | signedinteger, /) -> _2Tuple[Any]: ...\n @overload\n def __call__(self, other: float, /) -> _2Tuple[floating[_NBit1]] | _2Tuple[float64]: ...\n @overload\n def __call__(\n self, other: unsignedinteger[_NBit2], /\n ) -> _2Tuple[unsignedinteger[_NBit1]] | _2Tuple[unsignedinteger[_NBit2]]: ...\n\n@type_check_only\nclass _SignedIntOp(Protocol[_NBit1]):\n @overload\n def __call__(self, other: int, /) -> signedinteger[_NBit1]: ...\n @overload\n def __call__(self, other: float, /) -> float64: ...\n @overload\n def __call__(self, other: complex, /) -> complex128: ...\n @overload\n def __call__(self, other: signedinteger[_NBit2], /) -> signedinteger[_NBit1] | signedinteger[_NBit2]: ...\n\n@type_check_only\nclass _SignedIntBitOp(Protocol[_NBit1]):\n @overload\n def __call__(self, other: bool, /) -> signedinteger[_NBit1]: ...\n @overload\n def __call__(self, other: int, /) -> signedinteger[_NBit1] | int_: ...\n @overload\n def __call__(\n self, other: signedinteger[_NBit2], /\n ) -> signedinteger[_NBit1] | signedinteger[_NBit2]: ...\n\n@type_check_only\nclass _SignedIntMod(Protocol[_NBit1]):\n @overload\n def __call__(self, other: bool, /) -> signedinteger[_NBit1]: ...\n @overload\n def __call__(self, other: int, /) -> signedinteger[_NBit1] | int_: ...\n @overload\n def __call__(self, other: float, /) -> floating[_NBit1] | float64: ...\n @overload\n def __call__(\n self, other: signedinteger[_NBit2], /\n ) -> signedinteger[_NBit1] | signedinteger[_NBit2]: ...\n\n@type_check_only\nclass _SignedIntDivMod(Protocol[_NBit1]):\n @overload\n def __call__(self, other: bool, /) -> _2Tuple[signedinteger[_NBit1]]: ...\n @overload\n def __call__(self, other: int, /) -> _2Tuple[signedinteger[_NBit1]] | _2Tuple[int_]: ...\n @overload\n def __call__(self, other: float, /) -> _2Tuple[floating[_NBit1]] | _2Tuple[float64]: ...\n @overload\n def __call__(\n self, other: signedinteger[_NBit2], /\n ) -> _2Tuple[signedinteger[_NBit1]] | _2Tuple[signedinteger[_NBit2]]: ...\n\n@type_check_only\nclass _FloatOp(Protocol[_NBit1]):\n @overload\n def __call__(self, other: int, /) -> floating[_NBit1]: ...\n @overload\n def __call__(self, other: float, /) -> floating[_NBit1] | float64: ...\n @overload\n def __call__(\n self, other: complex, /\n ) -> complexfloating[_NBit1, _NBit1] | complex128: ...\n @overload\n def __call__(\n self, other: integer[_NBit2] | floating[_NBit2], /\n ) -> floating[_NBit1] | floating[_NBit2]: ...\n\n@type_check_only\nclass _FloatMod(Protocol[_NBit1]):\n @overload\n def __call__(self, other: bool, /) -> floating[_NBit1]: ...\n @overload\n def __call__(self, other: int, /) -> floating[_NBit1] | floating[_NBitInt]: ...\n @overload\n def __call__(self, other: float, /) -> floating[_NBit1] | float64: ...\n @overload\n def __call__(\n self, other: integer[_NBit2] | floating[_NBit2], /\n ) -> floating[_NBit1] | floating[_NBit2]: ...\n\nclass _FloatDivMod(Protocol[_NBit1]):\n @overload\n def __call__(self, other: bool, /) -> _2Tuple[floating[_NBit1]]: ...\n @overload\n def __call__(\n self, other: int, /\n ) -> _2Tuple[floating[_NBit1]] | _2Tuple[floating[_NBitInt]]: ...\n @overload\n def __call__(\n self, other: float, /\n ) -> _2Tuple[floating[_NBit1]] | _2Tuple[float64]: ...\n @overload\n def __call__(\n self, other: integer[_NBit2] | floating[_NBit2], /\n ) -> _2Tuple[floating[_NBit1]] | _2Tuple[floating[_NBit2]]: ...\n\n@type_check_only\nclass _NumberOp(Protocol):\n def __call__(self, other: _NumberLike_co, /) -> Any: ...\n\n@final\n@type_check_only\nclass _SupportsLT(Protocol):\n def __lt__(self, other: Any, /) -> Any: ...\n\n@final\n@type_check_only\nclass _SupportsLE(Protocol):\n def __le__(self, other: Any, /) -> Any: ...\n\n@final\n@type_check_only\nclass _SupportsGT(Protocol):\n def __gt__(self, other: Any, /) -> Any: ...\n\n@final\n@type_check_only\nclass _SupportsGE(Protocol):\n def __ge__(self, other: Any, /) -> Any: ...\n\n@final\n@type_check_only\nclass _ComparisonOpLT(Protocol[_T1_contra, _T2_contra]):\n @overload\n def __call__(self, other: _T1_contra, /) -> np.bool: ...\n @overload\n def __call__(self, other: _T2_contra, /) -> NDArray[np.bool]: ...\n @overload\n def __call__(self, other: _NestedSequence[_SupportsGT], /) -> NDArray[np.bool]: ...\n @overload\n def __call__(self, other: _SupportsGT, /) -> np.bool: ...\n\n@final\n@type_check_only\nclass _ComparisonOpLE(Protocol[_T1_contra, _T2_contra]):\n @overload\n def __call__(self, other: _T1_contra, /) -> np.bool: ...\n @overload\n def __call__(self, other: _T2_contra, /) -> NDArray[np.bool]: ...\n @overload\n def __call__(self, other: _NestedSequence[_SupportsGE], /) -> NDArray[np.bool]: ...\n @overload\n def __call__(self, other: _SupportsGE, /) -> np.bool: ...\n\n@final\n@type_check_only\nclass _ComparisonOpGT(Protocol[_T1_contra, _T2_contra]):\n @overload\n def __call__(self, other: _T1_contra, /) -> np.bool: ...\n @overload\n def __call__(self, other: _T2_contra, /) -> NDArray[np.bool]: ...\n @overload\n def __call__(self, other: _NestedSequence[_SupportsLT], /) -> NDArray[np.bool]: ...\n @overload\n def __call__(self, other: _SupportsLT, /) -> np.bool: ...\n\n@final\n@type_check_only\nclass _ComparisonOpGE(Protocol[_T1_contra, _T2_contra]):\n @overload\n def __call__(self, other: _T1_contra, /) -> np.bool: ...\n @overload\n def __call__(self, other: _T2_contra, /) -> NDArray[np.bool]: ...\n @overload\n def __call__(self, other: _NestedSequence[_SupportsGT], /) -> NDArray[np.bool]: ...\n @overload\n def __call__(self, other: _SupportsGT, /) -> np.bool: ...\n
.venv\Lib\site-packages\numpy\_typing\_callable.pyi
_callable.pyi
Other
12,133
0.95
0.338798
0.006079
vue-tools
861
2024-04-14T09:31:04.714498
Apache-2.0
false
8518bef3b2cbdfacebbc3404797fa5ca
from typing import Literal\n\n_BoolCodes = Literal[\n "bool", "bool_",\n "?", "|?", "=?", "<?", ">?",\n "b1", "|b1", "=b1", "<b1", ">b1",\n] # fmt: skip\n\n_UInt8Codes = Literal["uint8", "u1", "|u1", "=u1", "<u1", ">u1"]\n_UInt16Codes = Literal["uint16", "u2", "|u2", "=u2", "<u2", ">u2"]\n_UInt32Codes = Literal["uint32", "u4", "|u4", "=u4", "<u4", ">u4"]\n_UInt64Codes = Literal["uint64", "u8", "|u8", "=u8", "<u8", ">u8"]\n\n_Int8Codes = Literal["int8", "i1", "|i1", "=i1", "<i1", ">i1"]\n_Int16Codes = Literal["int16", "i2", "|i2", "=i2", "<i2", ">i2"]\n_Int32Codes = Literal["int32", "i4", "|i4", "=i4", "<i4", ">i4"]\n_Int64Codes = Literal["int64", "i8", "|i8", "=i8", "<i8", ">i8"]\n\n_Float16Codes = Literal["float16", "f2", "|f2", "=f2", "<f2", ">f2"]\n_Float32Codes = Literal["float32", "f4", "|f4", "=f4", "<f4", ">f4"]\n_Float64Codes = Literal["float64", "f8", "|f8", "=f8", "<f8", ">f8"]\n\n_Complex64Codes = Literal["complex64", "c8", "|c8", "=c8", "<c8", ">c8"]\n_Complex128Codes = Literal["complex128", "c16", "|c16", "=c16", "<c16", ">c16"]\n\n_ByteCodes = Literal["byte", "b", "|b", "=b", "<b", ">b"]\n_ShortCodes = Literal["short", "h", "|h", "=h", "<h", ">h"]\n_IntCCodes = Literal["intc", "i", "|i", "=i", "<i", ">i"]\n_IntPCodes = Literal["intp", "int", "int_", "n", "|n", "=n", "<n", ">n"]\n_LongCodes = Literal["long", "l", "|l", "=l", "<l", ">l"]\n_IntCodes = _IntPCodes\n_LongLongCodes = Literal["longlong", "q", "|q", "=q", "<q", ">q"]\n\n_UByteCodes = Literal["ubyte", "B", "|B", "=B", "<B", ">B"]\n_UShortCodes = Literal["ushort", "H", "|H", "=H", "<H", ">H"]\n_UIntCCodes = Literal["uintc", "I", "|I", "=I", "<I", ">I"]\n_UIntPCodes = Literal["uintp", "uint", "N", "|N", "=N", "<N", ">N"]\n_ULongCodes = Literal["ulong", "L", "|L", "=L", "<L", ">L"]\n_UIntCodes = _UIntPCodes\n_ULongLongCodes = Literal["ulonglong", "Q", "|Q", "=Q", "<Q", ">Q"]\n\n_HalfCodes = Literal["half", "e", "|e", "=e", "<e", ">e"]\n_SingleCodes = Literal["single", "f", "|f", "=f", "<f", ">f"]\n_DoubleCodes = Literal["double", "float", "d", "|d", "=d", "<d", ">d"]\n_LongDoubleCodes = Literal["longdouble", "g", "|g", "=g", "<g", ">g"]\n\n_CSingleCodes = Literal["csingle", "F", "|F", "=F", "<F", ">F"]\n_CDoubleCodes = Literal["cdouble", "complex", "D", "|D", "=D", "<D", ">D"]\n_CLongDoubleCodes = Literal["clongdouble", "G", "|G", "=G", "<G", ">G"]\n\n_StrCodes = Literal["str", "str_", "unicode", "U", "|U", "=U", "<U", ">U"]\n_BytesCodes = Literal["bytes", "bytes_", "S", "|S", "=S", "<S", ">S"]\n_VoidCodes = Literal["void", "V", "|V", "=V", "<V", ">V"]\n_ObjectCodes = Literal["object", "object_", "O", "|O", "=O", "<O", ">O"]\n\n_DT64Codes = Literal[\n "datetime64", "|datetime64", "=datetime64",\n "<datetime64", ">datetime64",\n "datetime64[Y]", "|datetime64[Y]", "=datetime64[Y]",\n "<datetime64[Y]", ">datetime64[Y]",\n "datetime64[M]", "|datetime64[M]", "=datetime64[M]",\n "<datetime64[M]", ">datetime64[M]",\n "datetime64[W]", "|datetime64[W]", "=datetime64[W]",\n "<datetime64[W]", ">datetime64[W]",\n "datetime64[D]", "|datetime64[D]", "=datetime64[D]",\n "<datetime64[D]", ">datetime64[D]",\n "datetime64[h]", "|datetime64[h]", "=datetime64[h]",\n "<datetime64[h]", ">datetime64[h]",\n "datetime64[m]", "|datetime64[m]", "=datetime64[m]",\n "<datetime64[m]", ">datetime64[m]",\n "datetime64[s]", "|datetime64[s]", "=datetime64[s]",\n "<datetime64[s]", ">datetime64[s]",\n "datetime64[ms]", "|datetime64[ms]", "=datetime64[ms]",\n "<datetime64[ms]", ">datetime64[ms]",\n "datetime64[us]", "|datetime64[us]", "=datetime64[us]",\n "<datetime64[us]", ">datetime64[us]",\n "datetime64[ns]", "|datetime64[ns]", "=datetime64[ns]",\n "<datetime64[ns]", ">datetime64[ns]",\n "datetime64[ps]", "|datetime64[ps]", "=datetime64[ps]",\n "<datetime64[ps]", ">datetime64[ps]",\n "datetime64[fs]", "|datetime64[fs]", "=datetime64[fs]",\n "<datetime64[fs]", ">datetime64[fs]",\n "datetime64[as]", "|datetime64[as]", "=datetime64[as]",\n "<datetime64[as]", ">datetime64[as]",\n "M", "|M", "=M", "<M", ">M",\n "M8", "|M8", "=M8", "<M8", ">M8",\n "M8[Y]", "|M8[Y]", "=M8[Y]", "<M8[Y]", ">M8[Y]",\n "M8[M]", "|M8[M]", "=M8[M]", "<M8[M]", ">M8[M]",\n "M8[W]", "|M8[W]", "=M8[W]", "<M8[W]", ">M8[W]",\n "M8[D]", "|M8[D]", "=M8[D]", "<M8[D]", ">M8[D]",\n "M8[h]", "|M8[h]", "=M8[h]", "<M8[h]", ">M8[h]",\n "M8[m]", "|M8[m]", "=M8[m]", "<M8[m]", ">M8[m]",\n "M8[s]", "|M8[s]", "=M8[s]", "<M8[s]", ">M8[s]",\n "M8[ms]", "|M8[ms]", "=M8[ms]", "<M8[ms]", ">M8[ms]",\n "M8[us]", "|M8[us]", "=M8[us]", "<M8[us]", ">M8[us]",\n "M8[ns]", "|M8[ns]", "=M8[ns]", "<M8[ns]", ">M8[ns]",\n "M8[ps]", "|M8[ps]", "=M8[ps]", "<M8[ps]", ">M8[ps]",\n "M8[fs]", "|M8[fs]", "=M8[fs]", "<M8[fs]", ">M8[fs]",\n "M8[as]", "|M8[as]", "=M8[as]", "<M8[as]", ">M8[as]",\n]\n_TD64Codes = Literal[\n "timedelta64", "|timedelta64", "=timedelta64",\n "<timedelta64", ">timedelta64",\n "timedelta64[Y]", "|timedelta64[Y]", "=timedelta64[Y]",\n "<timedelta64[Y]", ">timedelta64[Y]",\n "timedelta64[M]", "|timedelta64[M]", "=timedelta64[M]",\n "<timedelta64[M]", ">timedelta64[M]",\n "timedelta64[W]", "|timedelta64[W]", "=timedelta64[W]",\n "<timedelta64[W]", ">timedelta64[W]",\n "timedelta64[D]", "|timedelta64[D]", "=timedelta64[D]",\n "<timedelta64[D]", ">timedelta64[D]",\n "timedelta64[h]", "|timedelta64[h]", "=timedelta64[h]",\n "<timedelta64[h]", ">timedelta64[h]",\n "timedelta64[m]", "|timedelta64[m]", "=timedelta64[m]",\n "<timedelta64[m]", ">timedelta64[m]",\n "timedelta64[s]", "|timedelta64[s]", "=timedelta64[s]",\n "<timedelta64[s]", ">timedelta64[s]",\n "timedelta64[ms]", "|timedelta64[ms]", "=timedelta64[ms]",\n "<timedelta64[ms]", ">timedelta64[ms]",\n "timedelta64[us]", "|timedelta64[us]", "=timedelta64[us]",\n "<timedelta64[us]", ">timedelta64[us]",\n "timedelta64[ns]", "|timedelta64[ns]", "=timedelta64[ns]",\n "<timedelta64[ns]", ">timedelta64[ns]",\n "timedelta64[ps]", "|timedelta64[ps]", "=timedelta64[ps]",\n "<timedelta64[ps]", ">timedelta64[ps]",\n "timedelta64[fs]", "|timedelta64[fs]", "=timedelta64[fs]",\n "<timedelta64[fs]", ">timedelta64[fs]",\n "timedelta64[as]", "|timedelta64[as]", "=timedelta64[as]",\n "<timedelta64[as]", ">timedelta64[as]",\n "m", "|m", "=m", "<m", ">m",\n "m8", "|m8", "=m8", "<m8", ">m8",\n "m8[Y]", "|m8[Y]", "=m8[Y]", "<m8[Y]", ">m8[Y]",\n "m8[M]", "|m8[M]", "=m8[M]", "<m8[M]", ">m8[M]",\n "m8[W]", "|m8[W]", "=m8[W]", "<m8[W]", ">m8[W]",\n "m8[D]", "|m8[D]", "=m8[D]", "<m8[D]", ">m8[D]",\n "m8[h]", "|m8[h]", "=m8[h]", "<m8[h]", ">m8[h]",\n "m8[m]", "|m8[m]", "=m8[m]", "<m8[m]", ">m8[m]",\n "m8[s]", "|m8[s]", "=m8[s]", "<m8[s]", ">m8[s]",\n "m8[ms]", "|m8[ms]", "=m8[ms]", "<m8[ms]", ">m8[ms]",\n "m8[us]", "|m8[us]", "=m8[us]", "<m8[us]", ">m8[us]",\n "m8[ns]", "|m8[ns]", "=m8[ns]", "<m8[ns]", ">m8[ns]",\n "m8[ps]", "|m8[ps]", "=m8[ps]", "<m8[ps]", ">m8[ps]",\n "m8[fs]", "|m8[fs]", "=m8[fs]", "<m8[fs]", ">m8[fs]",\n "m8[as]", "|m8[as]", "=m8[as]", "<m8[as]", ">m8[as]",\n]\n\n# NOTE: `StringDType' has no scalar type, and therefore has no name that can\n# be passed to the `dtype` constructor\n_StringCodes = Literal["T", "|T", "=T", "<T", ">T"]\n\n# NOTE: Nested literals get flattened and de-duplicated at runtime, which isn't\n# the case for a `Union` of `Literal`s.\n# So even though they're equivalent when type-checking, they differ at runtime.\n# Another advantage of nesting, is that they always have a "flat"\n# `Literal.__args__`, which is a tuple of *literally* all its literal values.\n\n_UnsignedIntegerCodes = Literal[\n _UInt8Codes,\n _UInt16Codes,\n _UInt32Codes,\n _UInt64Codes,\n _UIntCodes,\n _UByteCodes,\n _UShortCodes,\n _UIntCCodes,\n _ULongCodes,\n _ULongLongCodes,\n]\n_SignedIntegerCodes = Literal[\n _Int8Codes,\n _Int16Codes,\n _Int32Codes,\n _Int64Codes,\n _IntCodes,\n _ByteCodes,\n _ShortCodes,\n _IntCCodes,\n _LongCodes,\n _LongLongCodes,\n]\n_FloatingCodes = Literal[\n _Float16Codes,\n _Float32Codes,\n _Float64Codes,\n _HalfCodes,\n _SingleCodes,\n _DoubleCodes,\n _LongDoubleCodes\n]\n_ComplexFloatingCodes = Literal[\n _Complex64Codes,\n _Complex128Codes,\n _CSingleCodes,\n _CDoubleCodes,\n _CLongDoubleCodes,\n]\n_IntegerCodes = Literal[_UnsignedIntegerCodes, _SignedIntegerCodes]\n_InexactCodes = Literal[_FloatingCodes, _ComplexFloatingCodes]\n_NumberCodes = Literal[_IntegerCodes, _InexactCodes]\n\n_CharacterCodes = Literal[_StrCodes, _BytesCodes]\n_FlexibleCodes = Literal[_VoidCodes, _CharacterCodes]\n\n_GenericCodes = Literal[\n _BoolCodes,\n _NumberCodes,\n _FlexibleCodes,\n _DT64Codes,\n _TD64Codes,\n _ObjectCodes,\n # TODO: add `_StringCodes` once it has a scalar type\n # _StringCodes,\n]\n
.venv\Lib\site-packages\numpy\_typing\_char_codes.py
_char_codes.py
Python
8,977
0.95
0.004695
0.045685
vue-tools
146
2025-04-16T01:59:56.272529
Apache-2.0
false
eb8529ee9fa55d2b1b1d5adc2e7acf28
from collections.abc import Sequence # noqa: F811\nfrom typing import (\n Any,\n Protocol,\n TypeAlias,\n TypedDict,\n TypeVar,\n runtime_checkable,\n)\n\nimport numpy as np\n\nfrom ._char_codes import (\n _BoolCodes,\n _BytesCodes,\n _ComplexFloatingCodes,\n _DT64Codes,\n _FloatingCodes,\n _NumberCodes,\n _ObjectCodes,\n _SignedIntegerCodes,\n _StrCodes,\n _TD64Codes,\n _UnsignedIntegerCodes,\n _VoidCodes,\n)\n\n_ScalarT = TypeVar("_ScalarT", bound=np.generic)\n_DTypeT_co = TypeVar("_DTypeT_co", bound=np.dtype, covariant=True)\n\n_DTypeLikeNested: TypeAlias = Any # TODO: wait for support for recursive types\n\n\n# Mandatory keys\nclass _DTypeDictBase(TypedDict):\n names: Sequence[str]\n formats: Sequence[_DTypeLikeNested]\n\n\n# Mandatory + optional keys\nclass _DTypeDict(_DTypeDictBase, total=False):\n # Only `str` elements are usable as indexing aliases,\n # but `titles` can in principle accept any object\n offsets: Sequence[int]\n titles: Sequence[Any]\n itemsize: int\n aligned: bool\n\n\n# A protocol for anything with the dtype attribute\n@runtime_checkable\nclass _SupportsDType(Protocol[_DTypeT_co]):\n @property\n def dtype(self) -> _DTypeT_co: ...\n\n\n# A subset of `npt.DTypeLike` that can be parametrized w.r.t. `np.generic`\n_DTypeLike: TypeAlias = type[_ScalarT] | np.dtype[_ScalarT] | _SupportsDType[np.dtype[_ScalarT]]\n\n\n# Would create a dtype[np.void]\n_VoidDTypeLike: TypeAlias = (\n # If a tuple, then it can be either:\n # - (flexible_dtype, itemsize)\n # - (fixed_dtype, shape)\n # - (base_dtype, new_dtype)\n # But because `_DTypeLikeNested = Any`, the first two cases are redundant\n\n # tuple[_DTypeLikeNested, int] | tuple[_DTypeLikeNested, _ShapeLike] |\n tuple[_DTypeLikeNested, _DTypeLikeNested]\n\n # [(field_name, field_dtype, field_shape), ...]\n # The type here is quite broad because NumPy accepts quite a wide\n # range of inputs inside the list; see the tests for some examples.\n | list[Any]\n\n # {'names': ..., 'formats': ..., 'offsets': ..., 'titles': ..., 'itemsize': ...}\n | _DTypeDict\n)\n\n# Aliases for commonly used dtype-like objects.\n# Note that the precision of `np.number` subclasses is ignored herein.\n_DTypeLikeBool: TypeAlias = type[bool] | _DTypeLike[np.bool] | _BoolCodes\n_DTypeLikeInt: TypeAlias = (\n type[int] | _DTypeLike[np.signedinteger] | _SignedIntegerCodes\n)\n_DTypeLikeUInt: TypeAlias = _DTypeLike[np.unsignedinteger] | _UnsignedIntegerCodes\n_DTypeLikeFloat: TypeAlias = type[float] | _DTypeLike[np.floating] | _FloatingCodes\n_DTypeLikeComplex: TypeAlias = (\n type[complex] | _DTypeLike[np.complexfloating] | _ComplexFloatingCodes\n)\n_DTypeLikeComplex_co: TypeAlias = (\n type[complex] | _DTypeLike[np.bool | np.number] | _BoolCodes | _NumberCodes\n)\n_DTypeLikeDT64: TypeAlias = _DTypeLike[np.timedelta64] | _TD64Codes\n_DTypeLikeTD64: TypeAlias = _DTypeLike[np.datetime64] | _DT64Codes\n_DTypeLikeBytes: TypeAlias = type[bytes] | _DTypeLike[np.bytes_] | _BytesCodes\n_DTypeLikeStr: TypeAlias = type[str] | _DTypeLike[np.str_] | _StrCodes\n_DTypeLikeVoid: TypeAlias = (\n type[memoryview] | _DTypeLike[np.void] | _VoidDTypeLike | _VoidCodes\n)\n_DTypeLikeObject: TypeAlias = type[object] | _DTypeLike[np.object_] | _ObjectCodes\n\n\n# Anything that can be coerced into numpy.dtype.\n# Reference: https://docs.scipy.org/doc/numpy/reference/arrays.dtypes.html\nDTypeLike: TypeAlias = _DTypeLike[Any] | _VoidDTypeLike | str | None\n\n# NOTE: while it is possible to provide the dtype as a dict of\n# dtype-like objects (e.g. `{'field1': ..., 'field2': ..., ...}`),\n# this syntax is officially discouraged and\n# therefore not included in the type-union defining `DTypeLike`.\n#\n# See https://github.com/numpy/numpy/issues/16891 for more details.\n
.venv\Lib\site-packages\numpy\_typing\_dtype_like.py
_dtype_like.py
Python
3,876
0.95
0.096491
0.290323
awesome-app
248
2024-02-12T21:24:26.023367
Apache-2.0
false
e98d65acd4d0209f82f610cebcacd9ca
"""A module with platform-specific extended precision\n`numpy.number` subclasses.\n\nThe subclasses are defined here (instead of ``__init__.pyi``) such\nthat they can be imported conditionally via the numpy's mypy plugin.\n"""\n\nimport numpy as np\n\nfrom . import _96Bit, _128Bit\n\nfloat96 = np.floating[_96Bit]\nfloat128 = np.floating[_128Bit]\ncomplex192 = np.complexfloating[_96Bit, _96Bit]\ncomplex256 = np.complexfloating[_128Bit, _128Bit]\n
.venv\Lib\site-packages\numpy\_typing\_extended_precision.py
_extended_precision.py
Python
449
0.85
0
0
python-kit
996
2024-02-08T10:34:13.060009
MIT
false
3dc6045562bb8c3aae085b046985e40a
"""A module with the precisions of platform-specific `~numpy.number`s."""\n\nfrom typing import TypeAlias\n\nfrom ._nbit_base import _8Bit, _16Bit, _32Bit, _64Bit, _96Bit, _128Bit\n\n# To-be replaced with a `npt.NBitBase` subclass by numpy's mypy plugin\n_NBitByte: TypeAlias = _8Bit\n_NBitShort: TypeAlias = _16Bit\n_NBitIntC: TypeAlias = _32Bit\n_NBitIntP: TypeAlias = _32Bit | _64Bit\n_NBitInt: TypeAlias = _NBitIntP\n_NBitLong: TypeAlias = _32Bit | _64Bit\n_NBitLongLong: TypeAlias = _64Bit\n\n_NBitHalf: TypeAlias = _16Bit\n_NBitSingle: TypeAlias = _32Bit\n_NBitDouble: TypeAlias = _64Bit\n_NBitLongDouble: TypeAlias = _64Bit | _96Bit | _128Bit\n
.venv\Lib\site-packages\numpy\_typing\_nbit.py
_nbit.py
Python
651
0.95
0
0.066667
python-kit
339
2024-04-23T01:28:29.847327
BSD-3-Clause
false
a8df6ded6f55748426a7a6f9b5b7069a
"""A module with the precisions of generic `~numpy.number` types."""\nfrom typing import final\n\nfrom numpy._utils import set_module\n\n\n@final # Disallow the creation of arbitrary `NBitBase` subclasses\n@set_module("numpy.typing")\nclass NBitBase:\n """\n A type representing `numpy.number` precision during static type checking.\n\n Used exclusively for the purpose of static type checking, `NBitBase`\n represents the base of a hierarchical set of subclasses.\n Each subsequent subclass is herein used for representing a lower level\n of precision, *e.g.* ``64Bit > 32Bit > 16Bit``.\n\n .. versionadded:: 1.20\n\n .. deprecated:: 2.3\n Use ``@typing.overload`` or a ``TypeVar`` with a scalar-type as upper\n bound, instead.\n\n Examples\n --------\n Below is a typical usage example: `NBitBase` is herein used for annotating\n a function that takes a float and integer of arbitrary precision\n as arguments and returns a new float of whichever precision is largest\n (*e.g.* ``np.float16 + np.int64 -> np.float64``).\n\n .. code-block:: python\n\n >>> from typing import TypeVar, TYPE_CHECKING\n >>> import numpy as np\n >>> import numpy.typing as npt\n\n >>> S = TypeVar("S", bound=npt.NBitBase)\n >>> T = TypeVar("T", bound=npt.NBitBase)\n\n >>> def add(a: np.floating[S], b: np.integer[T]) -> np.floating[S | T]:\n ... return a + b\n\n >>> a = np.float16()\n >>> b = np.int64()\n >>> out = add(a, b)\n\n >>> if TYPE_CHECKING:\n ... reveal_locals()\n ... # note: Revealed local types are:\n ... # note: a: numpy.floating[numpy.typing._16Bit*]\n ... # note: b: numpy.signedinteger[numpy.typing._64Bit*]\n ... # note: out: numpy.floating[numpy.typing._64Bit*]\n\n """\n # Deprecated in NumPy 2.3, 2025-05-01\n\n def __init_subclass__(cls) -> None:\n allowed_names = {\n "NBitBase", "_128Bit", "_96Bit", "_64Bit", "_32Bit", "_16Bit", "_8Bit"\n }\n if cls.__name__ not in allowed_names:\n raise TypeError('cannot inherit from final class "NBitBase"')\n super().__init_subclass__()\n\n@final\n@set_module("numpy._typing")\n# Silence errors about subclassing a `@final`-decorated class\nclass _128Bit(NBitBase): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]\n pass\n\n@final\n@set_module("numpy._typing")\nclass _96Bit(_128Bit): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]\n pass\n\n@final\n@set_module("numpy._typing")\nclass _64Bit(_96Bit): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]\n pass\n\n@final\n@set_module("numpy._typing")\nclass _32Bit(_64Bit): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]\n pass\n\n@final\n@set_module("numpy._typing")\nclass _16Bit(_32Bit): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]\n pass\n\n@final\n@set_module("numpy._typing")\nclass _8Bit(_16Bit): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]\n pass\n
.venv\Lib\site-packages\numpy\_typing\_nbit_base.py
_nbit_base.py
Python
3,152
0.95
0.180851
0.027397
vue-tools
273
2023-10-20T02:25:24.139753
GPL-3.0
false
2cacbb8196a93c3c72e0d935c6b2ca72
# pyright: reportDeprecated=false\n# pyright: reportGeneralTypeIssues=false\n# mypy: disable-error-code=misc\n\nfrom typing import final\n\nfrom typing_extensions import deprecated\n\n# Deprecated in NumPy 2.3, 2025-05-01\n@deprecated(\n "`NBitBase` is deprecated and will be removed from numpy.typing in the "\n "future. Use `@typing.overload` or a `TypeVar` with a scalar-type as upper "\n "bound, instead. (deprecated in NumPy 2.3)",\n)\n@final\nclass NBitBase: ...\n\n@final\nclass _256Bit(NBitBase): ...\n\n@final\nclass _128Bit(_256Bit): ...\n\n@final\nclass _96Bit(_128Bit): ...\n\n@final\nclass _80Bit(_96Bit): ...\n\n@final\nclass _64Bit(_80Bit): ...\n\n@final\nclass _32Bit(_64Bit): ...\n\n@final\nclass _16Bit(_32Bit): ...\n\n@final\nclass _8Bit(_16Bit): ...\n
.venv\Lib\site-packages\numpy\_typing\_nbit_base.pyi
_nbit_base.pyi
Other
780
0.95
0.225
0.137931
python-kit
20
2024-05-22T04:34:05.456497
Apache-2.0
false
fdd1b48a2d0d5eed69682ab7fcb8daf6
"""A module containing the `_NestedSequence` protocol."""\n\nfrom typing import TYPE_CHECKING, Any, Protocol, TypeVar, runtime_checkable\n\nif TYPE_CHECKING:\n from collections.abc import Iterator\n\n__all__ = ["_NestedSequence"]\n\n_T_co = TypeVar("_T_co", covariant=True)\n\n\n@runtime_checkable\nclass _NestedSequence(Protocol[_T_co]):\n """A protocol for representing nested sequences.\n\n Warning\n -------\n `_NestedSequence` currently does not work in combination with typevars,\n *e.g.* ``def func(a: _NestedSequnce[T]) -> T: ...``.\n\n See Also\n --------\n collections.abc.Sequence\n ABCs for read-only and mutable :term:`sequences`.\n\n Examples\n --------\n .. code-block:: python\n\n >>> from typing import TYPE_CHECKING\n >>> import numpy as np\n >>> from numpy._typing import _NestedSequence\n\n >>> def get_dtype(seq: _NestedSequence[float]) -> np.dtype[np.float64]:\n ... return np.asarray(seq).dtype\n\n >>> a = get_dtype([1.0])\n >>> b = get_dtype([[1.0]])\n >>> c = get_dtype([[[1.0]]])\n >>> d = get_dtype([[[[1.0]]]])\n\n >>> if TYPE_CHECKING:\n ... reveal_locals()\n ... # note: Revealed local types are:\n ... # note: a: numpy.dtype[numpy.floating[numpy._typing._64Bit]]\n ... # note: b: numpy.dtype[numpy.floating[numpy._typing._64Bit]]\n ... # note: c: numpy.dtype[numpy.floating[numpy._typing._64Bit]]\n ... # note: d: numpy.dtype[numpy.floating[numpy._typing._64Bit]]\n\n """\n\n def __len__(self, /) -> int:\n """Implement ``len(self)``."""\n raise NotImplementedError\n\n def __getitem__(self, index: int, /) -> "_T_co | _NestedSequence[_T_co]":\n """Implement ``self[x]``."""\n raise NotImplementedError\n\n def __contains__(self, x: object, /) -> bool:\n """Implement ``x in self``."""\n raise NotImplementedError\n\n def __iter__(self, /) -> "Iterator[_T_co | _NestedSequence[_T_co]]":\n """Implement ``iter(self)``."""\n raise NotImplementedError\n\n def __reversed__(self, /) -> "Iterator[_T_co | _NestedSequence[_T_co]]":\n """Implement ``reversed(self)``."""\n raise NotImplementedError\n\n def count(self, value: Any, /) -> int:\n """Return the number of occurrences of `value`."""\n raise NotImplementedError\n\n def index(self, value: Any, /) -> int:\n """Return the first index of `value`."""\n raise NotImplementedError\n
.venv\Lib\site-packages\numpy\_typing\_nested_sequence.py
_nested_sequence.py
Python
2,584
0.95
0.177215
0.017241
react-lib
180
2023-11-16T03:55:15.615301
GPL-3.0
false
dca8369b94ca548f4c179bd33e33fc27
from typing import Any, TypeAlias\n\nimport numpy as np\n\n# NOTE: `_StrLike_co` and `_BytesLike_co` are pointless, as `np.str_` and\n# `np.bytes_` are already subclasses of their builtin counterpart\n_CharLike_co: TypeAlias = str | bytes\n\n# The `<X>Like_co` type-aliases below represent all scalars that can be\n# coerced into `<X>` (with the casting rule `same_kind`)\n_BoolLike_co: TypeAlias = bool | np.bool\n_UIntLike_co: TypeAlias = bool | np.unsignedinteger | np.bool\n_IntLike_co: TypeAlias = int | np.integer | np.bool\n_FloatLike_co: TypeAlias = float | np.floating | np.integer | np.bool\n_ComplexLike_co: TypeAlias = complex | np.number | np.bool\n_NumberLike_co: TypeAlias = _ComplexLike_co\n_TD64Like_co: TypeAlias = int | np.timedelta64 | np.integer | np.bool\n# `_VoidLike_co` is technically not a scalar, but it's close enough\n_VoidLike_co: TypeAlias = tuple[Any, ...] | np.void\n_ScalarLike_co: TypeAlias = complex | str | bytes | np.generic\n
.venv\Lib\site-packages\numpy\_typing\_scalars.py
_scalars.py
Python
964
0.95
0
0.294118
python-kit
523
2023-12-05T00:13:43.842302
BSD-3-Clause
false
a5e9e02101b9aa47f3ebbbf20d9f3be1
from collections.abc import Sequence\nfrom typing import Any, SupportsIndex, TypeAlias\n\n_Shape: TypeAlias = tuple[int, ...]\n_AnyShape: TypeAlias = tuple[Any, ...]\n\n# Anything that can be coerced to a shape tuple\n_ShapeLike: TypeAlias = SupportsIndex | Sequence[SupportsIndex]\n
.venv\Lib\site-packages\numpy\_typing\_shape.py
_shape.py
Python
283
0.95
0
0.166667
python-kit
452
2024-02-06T19:25:17.331373
MIT
false
3b7203150d900c30f9202522f3e45058
from numpy import ufunc\n\n_UFunc_Nin1_Nout1 = ufunc\n_UFunc_Nin2_Nout1 = ufunc\n_UFunc_Nin1_Nout2 = ufunc\n_UFunc_Nin2_Nout2 = ufunc\n_GUFunc_Nin2_Nout1 = ufunc\n
.venv\Lib\site-packages\numpy\_typing\_ufunc.py
_ufunc.py
Python
163
0.85
0
0
node-utils
640
2023-11-03T18:22:39.461977
BSD-3-Clause
false
063579f8d9f087e3a7c4a7eec6103241
"""A module with private type-check-only `numpy.ufunc` subclasses.\n\nThe signatures of the ufuncs are too varied to reasonably type\nwith a single class. So instead, `ufunc` has been expanded into\nfour private subclasses, one for each combination of\n`~ufunc.nin` and `~ufunc.nout`.\n"""\n\nfrom typing import (\n Any,\n Generic,\n Literal,\n LiteralString,\n NoReturn,\n Protocol,\n SupportsIndex,\n TypeAlias,\n TypedDict,\n TypeVar,\n Unpack,\n overload,\n type_check_only,\n)\n\nimport numpy as np\nfrom numpy import _CastingKind, _OrderKACF, ufunc\nfrom numpy.typing import NDArray\n\nfrom ._array_like import ArrayLike, _ArrayLikeBool_co, _ArrayLikeInt_co\nfrom ._dtype_like import DTypeLike\nfrom ._scalars import _ScalarLike_co\nfrom ._shape import _ShapeLike\n\n_T = TypeVar("_T")\n_2Tuple: TypeAlias = tuple[_T, _T]\n_3Tuple: TypeAlias = tuple[_T, _T, _T]\n_4Tuple: TypeAlias = tuple[_T, _T, _T, _T]\n\n_2PTuple: TypeAlias = tuple[_T, _T, *tuple[_T, ...]]\n_3PTuple: TypeAlias = tuple[_T, _T, _T, *tuple[_T, ...]]\n_4PTuple: TypeAlias = tuple[_T, _T, _T, _T, *tuple[_T, ...]]\n\n_NTypes = TypeVar("_NTypes", bound=int, covariant=True)\n_IDType = TypeVar("_IDType", covariant=True)\n_NameType = TypeVar("_NameType", bound=LiteralString, covariant=True)\n_Signature = TypeVar("_Signature", bound=LiteralString, covariant=True)\n\n_NIn = TypeVar("_NIn", bound=int, covariant=True)\n_NOut = TypeVar("_NOut", bound=int, covariant=True)\n_ReturnType_co = TypeVar("_ReturnType_co", covariant=True)\n_ArrayT = TypeVar("_ArrayT", bound=np.ndarray[Any, Any])\n\n@type_check_only\nclass _SupportsArrayUFunc(Protocol):\n def __array_ufunc__(\n self,\n ufunc: ufunc,\n method: Literal["__call__", "reduce", "reduceat", "accumulate", "outer", "at"],\n *inputs: Any,\n **kwargs: Any,\n ) -> Any: ...\n\n@type_check_only\nclass _UFunc3Kwargs(TypedDict, total=False):\n where: _ArrayLikeBool_co | None\n casting: _CastingKind\n order: _OrderKACF\n subok: bool\n signature: _3Tuple[str | None] | str | None\n\n# NOTE: `reduce`, `accumulate`, `reduceat` and `outer` raise a ValueError for\n# ufuncs that don't accept two input arguments and return one output argument.\n# In such cases the respective methods return `NoReturn`\n\n# NOTE: Similarly, `at` won't be defined for ufuncs that return\n# multiple outputs; in such cases `at` is typed to return `NoReturn`\n\n# NOTE: If 2 output types are returned then `out` must be a\n# 2-tuple of arrays. Otherwise `None` or a plain array are also acceptable\n\n# pyright: reportIncompatibleMethodOverride=false\n\n@type_check_only\nclass _UFunc_Nin1_Nout1(ufunc, Generic[_NameType, _NTypes, _IDType]): # type: ignore[misc]\n @property\n def __name__(self) -> _NameType: ...\n @property\n def __qualname__(self) -> _NameType: ...\n @property\n def ntypes(self) -> _NTypes: ...\n @property\n def identity(self) -> _IDType: ...\n @property\n def nin(self) -> Literal[1]: ...\n @property\n def nout(self) -> Literal[1]: ...\n @property\n def nargs(self) -> Literal[2]: ...\n @property\n def signature(self) -> None: ...\n\n @overload\n def __call__(\n self,\n __x1: _ScalarLike_co,\n out: None = ...,\n *,\n where: _ArrayLikeBool_co | None = ...,\n casting: _CastingKind = ...,\n order: _OrderKACF = ...,\n dtype: DTypeLike = ...,\n subok: bool = ...,\n signature: str | _2Tuple[str | None] = ...,\n ) -> Any: ...\n @overload\n def __call__(\n self,\n __x1: ArrayLike,\n out: NDArray[Any] | tuple[NDArray[Any]] | None = ...,\n *,\n where: _ArrayLikeBool_co | None = ...,\n casting: _CastingKind = ...,\n order: _OrderKACF = ...,\n dtype: DTypeLike = ...,\n subok: bool = ...,\n signature: str | _2Tuple[str | None] = ...,\n ) -> NDArray[Any]: ...\n @overload\n def __call__(\n self,\n __x1: _SupportsArrayUFunc,\n out: NDArray[Any] | tuple[NDArray[Any]] | None = ...,\n *,\n where: _ArrayLikeBool_co | None = ...,\n casting: _CastingKind = ...,\n order: _OrderKACF = ...,\n dtype: DTypeLike = ...,\n subok: bool = ...,\n signature: str | _2Tuple[str | None] = ...,\n ) -> Any: ...\n\n def at(\n self,\n a: _SupportsArrayUFunc,\n indices: _ArrayLikeInt_co,\n /,\n ) -> None: ...\n\n def reduce(self, *args, **kwargs) -> NoReturn: ...\n def accumulate(self, *args, **kwargs) -> NoReturn: ...\n def reduceat(self, *args, **kwargs) -> NoReturn: ...\n def outer(self, *args, **kwargs) -> NoReturn: ...\n\n@type_check_only\nclass _UFunc_Nin2_Nout1(ufunc, Generic[_NameType, _NTypes, _IDType]): # type: ignore[misc]\n @property\n def __name__(self) -> _NameType: ...\n @property\n def __qualname__(self) -> _NameType: ...\n @property\n def ntypes(self) -> _NTypes: ...\n @property\n def identity(self) -> _IDType: ...\n @property\n def nin(self) -> Literal[2]: ...\n @property\n def nout(self) -> Literal[1]: ...\n @property\n def nargs(self) -> Literal[3]: ...\n @property\n def signature(self) -> None: ...\n\n @overload # (scalar, scalar) -> scalar\n def __call__(\n self,\n x1: _ScalarLike_co,\n x2: _ScalarLike_co,\n /,\n out: None = None,\n *,\n dtype: DTypeLike | None = None,\n **kwds: Unpack[_UFunc3Kwargs],\n ) -> Any: ...\n @overload # (array-like, array) -> array\n def __call__(\n self,\n x1: ArrayLike,\n x2: NDArray[np.generic],\n /,\n out: NDArray[np.generic] | tuple[NDArray[np.generic]] | None = None,\n *,\n dtype: DTypeLike | None = None,\n **kwds: Unpack[_UFunc3Kwargs],\n ) -> NDArray[Any]: ...\n @overload # (array, array-like) -> array\n def __call__(\n self,\n x1: NDArray[np.generic],\n x2: ArrayLike,\n /,\n out: NDArray[np.generic] | tuple[NDArray[np.generic]] | None = None,\n *,\n dtype: DTypeLike | None = None,\n **kwds: Unpack[_UFunc3Kwargs],\n ) -> NDArray[Any]: ...\n @overload # (array-like, array-like, out=array) -> array\n def __call__(\n self,\n x1: ArrayLike,\n x2: ArrayLike,\n /,\n out: NDArray[np.generic] | tuple[NDArray[np.generic]],\n *,\n dtype: DTypeLike | None = None,\n **kwds: Unpack[_UFunc3Kwargs],\n ) -> NDArray[Any]: ...\n @overload # (array-like, array-like) -> array | scalar\n def __call__(\n self,\n x1: ArrayLike,\n x2: ArrayLike,\n /,\n out: NDArray[np.generic] | tuple[NDArray[np.generic]] | None = None,\n *,\n dtype: DTypeLike | None = None,\n **kwds: Unpack[_UFunc3Kwargs],\n ) -> NDArray[Any] | Any: ...\n\n def at(\n self,\n a: NDArray[Any],\n indices: _ArrayLikeInt_co,\n b: ArrayLike,\n /,\n ) -> None: ...\n\n def reduce(\n self,\n array: ArrayLike,\n axis: _ShapeLike | None = ...,\n dtype: DTypeLike = ...,\n out: NDArray[Any] | None = ...,\n keepdims: bool = ...,\n initial: Any = ...,\n where: _ArrayLikeBool_co = ...,\n ) -> Any: ...\n\n def accumulate(\n self,\n array: ArrayLike,\n axis: SupportsIndex = ...,\n dtype: DTypeLike = ...,\n out: NDArray[Any] | None = ...,\n ) -> NDArray[Any]: ...\n\n def reduceat(\n self,\n array: ArrayLike,\n indices: _ArrayLikeInt_co,\n axis: SupportsIndex = ...,\n dtype: DTypeLike = ...,\n out: NDArray[Any] | None = ...,\n ) -> NDArray[Any]: ...\n\n @overload # (scalar, scalar) -> scalar\n def outer(\n self,\n A: _ScalarLike_co,\n B: _ScalarLike_co,\n /,\n *,\n out: None = None,\n dtype: DTypeLike | None = None,\n **kwds: Unpack[_UFunc3Kwargs],\n ) -> Any: ...\n @overload # (array-like, array) -> array\n def outer(\n self,\n A: ArrayLike,\n B: NDArray[np.generic],\n /,\n *,\n out: NDArray[np.generic] | tuple[NDArray[np.generic]] | None = None,\n dtype: DTypeLike | None = None,\n **kwds: Unpack[_UFunc3Kwargs],\n ) -> NDArray[Any]: ...\n @overload # (array, array-like) -> array\n def outer(\n self,\n A: NDArray[np.generic],\n B: ArrayLike,\n /,\n *,\n out: NDArray[np.generic] | tuple[NDArray[np.generic]] | None = None,\n dtype: DTypeLike | None = None,\n **kwds: Unpack[_UFunc3Kwargs],\n ) -> NDArray[Any]: ...\n @overload # (array-like, array-like, out=array) -> array\n def outer(\n self,\n A: ArrayLike,\n B: ArrayLike,\n /,\n *,\n out: NDArray[np.generic] | tuple[NDArray[np.generic]],\n dtype: DTypeLike | None = None,\n **kwds: Unpack[_UFunc3Kwargs],\n ) -> NDArray[Any]: ...\n @overload # (array-like, array-like) -> array | scalar\n def outer(\n self,\n A: ArrayLike,\n B: ArrayLike,\n /,\n *,\n out: NDArray[np.generic] | tuple[NDArray[np.generic]] | None = None,\n dtype: DTypeLike | None = None,\n **kwds: Unpack[_UFunc3Kwargs],\n ) -> NDArray[Any] | Any: ...\n\n@type_check_only\nclass _UFunc_Nin1_Nout2(ufunc, Generic[_NameType, _NTypes, _IDType]): # type: ignore[misc]\n @property\n def __name__(self) -> _NameType: ...\n @property\n def __qualname__(self) -> _NameType: ...\n @property\n def ntypes(self) -> _NTypes: ...\n @property\n def identity(self) -> _IDType: ...\n @property\n def nin(self) -> Literal[1]: ...\n @property\n def nout(self) -> Literal[2]: ...\n @property\n def nargs(self) -> Literal[3]: ...\n @property\n def signature(self) -> None: ...\n\n @overload\n def __call__(\n self,\n __x1: _ScalarLike_co,\n __out1: None = ...,\n __out2: None = ...,\n *,\n where: _ArrayLikeBool_co | None = ...,\n casting: _CastingKind = ...,\n order: _OrderKACF = ...,\n dtype: DTypeLike = ...,\n subok: bool = ...,\n signature: str | _3Tuple[str | None] = ...,\n ) -> _2Tuple[Any]: ...\n @overload\n def __call__(\n self,\n __x1: ArrayLike,\n __out1: NDArray[Any] | None = ...,\n __out2: NDArray[Any] | None = ...,\n *,\n out: _2Tuple[NDArray[Any]] = ...,\n where: _ArrayLikeBool_co | None = ...,\n casting: _CastingKind = ...,\n order: _OrderKACF = ...,\n dtype: DTypeLike = ...,\n subok: bool = ...,\n signature: str | _3Tuple[str | None] = ...,\n ) -> _2Tuple[NDArray[Any]]: ...\n @overload\n def __call__(\n self,\n __x1: _SupportsArrayUFunc,\n __out1: NDArray[Any] | None = ...,\n __out2: NDArray[Any] | None = ...,\n *,\n out: _2Tuple[NDArray[Any]] = ...,\n where: _ArrayLikeBool_co | None = ...,\n casting: _CastingKind = ...,\n order: _OrderKACF = ...,\n dtype: DTypeLike = ...,\n subok: bool = ...,\n signature: str | _3Tuple[str | None] = ...,\n ) -> _2Tuple[Any]: ...\n\n def at(self, *args, **kwargs) -> NoReturn: ...\n def reduce(self, *args, **kwargs) -> NoReturn: ...\n def accumulate(self, *args, **kwargs) -> NoReturn: ...\n def reduceat(self, *args, **kwargs) -> NoReturn: ...\n def outer(self, *args, **kwargs) -> NoReturn: ...\n\n@type_check_only\nclass _UFunc_Nin2_Nout2(ufunc, Generic[_NameType, _NTypes, _IDType]): # type: ignore[misc]\n @property\n def __name__(self) -> _NameType: ...\n @property\n def __qualname__(self) -> _NameType: ...\n @property\n def ntypes(self) -> _NTypes: ...\n @property\n def identity(self) -> _IDType: ...\n @property\n def nin(self) -> Literal[2]: ...\n @property\n def nout(self) -> Literal[2]: ...\n @property\n def nargs(self) -> Literal[4]: ...\n @property\n def signature(self) -> None: ...\n\n @overload\n def __call__(\n self,\n __x1: _ScalarLike_co,\n __x2: _ScalarLike_co,\n __out1: None = ...,\n __out2: None = ...,\n *,\n where: _ArrayLikeBool_co | None = ...,\n casting: _CastingKind = ...,\n order: _OrderKACF = ...,\n dtype: DTypeLike = ...,\n subok: bool = ...,\n signature: str | _4Tuple[str | None] = ...,\n ) -> _2Tuple[Any]: ...\n @overload\n def __call__(\n self,\n __x1: ArrayLike,\n __x2: ArrayLike,\n __out1: NDArray[Any] | None = ...,\n __out2: NDArray[Any] | None = ...,\n *,\n out: _2Tuple[NDArray[Any]] = ...,\n where: _ArrayLikeBool_co | None = ...,\n casting: _CastingKind = ...,\n order: _OrderKACF = ...,\n dtype: DTypeLike = ...,\n subok: bool = ...,\n signature: str | _4Tuple[str | None] = ...,\n ) -> _2Tuple[NDArray[Any]]: ...\n\n def at(self, *args, **kwargs) -> NoReturn: ...\n def reduce(self, *args, **kwargs) -> NoReturn: ...\n def accumulate(self, *args, **kwargs) -> NoReturn: ...\n def reduceat(self, *args, **kwargs) -> NoReturn: ...\n def outer(self, *args, **kwargs) -> NoReturn: ...\n\n@type_check_only\nclass _GUFunc_Nin2_Nout1(ufunc, Generic[_NameType, _NTypes, _IDType, _Signature]): # type: ignore[misc]\n @property\n def __name__(self) -> _NameType: ...\n @property\n def __qualname__(self) -> _NameType: ...\n @property\n def ntypes(self) -> _NTypes: ...\n @property\n def identity(self) -> _IDType: ...\n @property\n def nin(self) -> Literal[2]: ...\n @property\n def nout(self) -> Literal[1]: ...\n @property\n def nargs(self) -> Literal[3]: ...\n @property\n def signature(self) -> _Signature: ...\n\n # Scalar for 1D array-likes; ndarray otherwise\n @overload\n def __call__(\n self,\n __x1: ArrayLike,\n __x2: ArrayLike,\n out: None = ...,\n *,\n casting: _CastingKind = ...,\n order: _OrderKACF = ...,\n dtype: DTypeLike = ...,\n subok: bool = ...,\n signature: str | _3Tuple[str | None] = ...,\n axes: list[_2Tuple[SupportsIndex]] = ...,\n ) -> Any: ...\n @overload\n def __call__(\n self,\n __x1: ArrayLike,\n __x2: ArrayLike,\n out: NDArray[Any] | tuple[NDArray[Any]],\n *,\n casting: _CastingKind = ...,\n order: _OrderKACF = ...,\n dtype: DTypeLike = ...,\n subok: bool = ...,\n signature: str | _3Tuple[str | None] = ...,\n axes: list[_2Tuple[SupportsIndex]] = ...,\n ) -> NDArray[Any]: ...\n\n def at(self, *args, **kwargs) -> NoReturn: ...\n def reduce(self, *args, **kwargs) -> NoReturn: ...\n def accumulate(self, *args, **kwargs) -> NoReturn: ...\n def reduceat(self, *args, **kwargs) -> NoReturn: ...\n def outer(self, *args, **kwargs) -> NoReturn: ...\n\n@type_check_only\nclass _PyFunc_Kwargs_Nargs2(TypedDict, total=False):\n where: _ArrayLikeBool_co | None\n casting: _CastingKind\n order: _OrderKACF\n dtype: DTypeLike\n subok: bool\n signature: str | tuple[DTypeLike, DTypeLike]\n\n@type_check_only\nclass _PyFunc_Kwargs_Nargs3(TypedDict, total=False):\n where: _ArrayLikeBool_co | None\n casting: _CastingKind\n order: _OrderKACF\n dtype: DTypeLike\n subok: bool\n signature: str | tuple[DTypeLike, DTypeLike, DTypeLike]\n\n@type_check_only\nclass _PyFunc_Kwargs_Nargs3P(TypedDict, total=False):\n where: _ArrayLikeBool_co | None\n casting: _CastingKind\n order: _OrderKACF\n dtype: DTypeLike\n subok: bool\n signature: str | _3PTuple[DTypeLike]\n\n@type_check_only\nclass _PyFunc_Kwargs_Nargs4P(TypedDict, total=False):\n where: _ArrayLikeBool_co | None\n casting: _CastingKind\n order: _OrderKACF\n dtype: DTypeLike\n subok: bool\n signature: str | _4PTuple[DTypeLike]\n\n@type_check_only\nclass _PyFunc_Nin1_Nout1(ufunc, Generic[_ReturnType_co, _IDType]): # type: ignore[misc]\n @property\n def identity(self) -> _IDType: ...\n @property\n def nin(self) -> Literal[1]: ...\n @property\n def nout(self) -> Literal[1]: ...\n @property\n def nargs(self) -> Literal[2]: ...\n @property\n def ntypes(self) -> Literal[1]: ...\n @property\n def signature(self) -> None: ...\n\n @overload\n def __call__(\n self,\n x1: _ScalarLike_co,\n /,\n out: None = ...,\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs2],\n ) -> _ReturnType_co: ...\n @overload\n def __call__(\n self,\n x1: ArrayLike,\n /,\n out: None = ...,\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs2],\n ) -> _ReturnType_co | NDArray[np.object_]: ...\n @overload\n def __call__(\n self,\n x1: ArrayLike,\n /,\n out: _ArrayT | tuple[_ArrayT],\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs2],\n ) -> _ArrayT: ...\n @overload\n def __call__(\n self,\n x1: _SupportsArrayUFunc,\n /,\n out: NDArray[Any] | tuple[NDArray[Any]] | None = ...,\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs2],\n ) -> Any: ...\n\n def at(self, a: _SupportsArrayUFunc, ixs: _ArrayLikeInt_co, /) -> None: ...\n def reduce(self, /, *args: Any, **kwargs: Any) -> NoReturn: ...\n def accumulate(self, /, *args: Any, **kwargs: Any) -> NoReturn: ...\n def reduceat(self, /, *args: Any, **kwargs: Any) -> NoReturn: ...\n def outer(self, /, *args: Any, **kwargs: Any) -> NoReturn: ...\n\n@type_check_only\nclass _PyFunc_Nin2_Nout1(ufunc, Generic[_ReturnType_co, _IDType]): # type: ignore[misc]\n @property\n def identity(self) -> _IDType: ...\n @property\n def nin(self) -> Literal[2]: ...\n @property\n def nout(self) -> Literal[1]: ...\n @property\n def nargs(self) -> Literal[3]: ...\n @property\n def ntypes(self) -> Literal[1]: ...\n @property\n def signature(self) -> None: ...\n\n @overload\n def __call__(\n self,\n x1: _ScalarLike_co,\n x2: _ScalarLike_co,\n /,\n out: None = ...,\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs3],\n ) -> _ReturnType_co: ...\n @overload\n def __call__(\n self,\n x1: ArrayLike,\n x2: ArrayLike,\n /,\n out: None = ...,\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs3],\n ) -> _ReturnType_co | NDArray[np.object_]: ...\n @overload\n def __call__(\n self,\n x1: ArrayLike,\n x2: ArrayLike,\n /,\n out: _ArrayT | tuple[_ArrayT],\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs3],\n ) -> _ArrayT: ...\n @overload\n def __call__(\n self,\n x1: _SupportsArrayUFunc,\n x2: _SupportsArrayUFunc | ArrayLike,\n /,\n out: NDArray[Any] | tuple[NDArray[Any]] | None = ...,\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs3],\n ) -> Any: ...\n @overload\n def __call__(\n self,\n x1: ArrayLike,\n x2: _SupportsArrayUFunc,\n /,\n out: NDArray[Any] | tuple[NDArray[Any]] | None = ...,\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs3],\n ) -> Any: ...\n\n def at(self, a: _SupportsArrayUFunc, ixs: _ArrayLikeInt_co, b: ArrayLike, /) -> None: ...\n\n @overload\n def reduce(\n self,\n array: ArrayLike,\n axis: _ShapeLike | None,\n dtype: DTypeLike,\n out: _ArrayT,\n /,\n keepdims: bool = ...,\n initial: _ScalarLike_co = ...,\n where: _ArrayLikeBool_co = ...,\n ) -> _ArrayT: ...\n @overload\n def reduce(\n self,\n /,\n array: ArrayLike,\n axis: _ShapeLike | None = ...,\n dtype: DTypeLike = ...,\n *,\n out: _ArrayT | tuple[_ArrayT],\n keepdims: bool = ...,\n initial: _ScalarLike_co = ...,\n where: _ArrayLikeBool_co = ...,\n ) -> _ArrayT: ...\n @overload\n def reduce(\n self,\n /,\n array: ArrayLike,\n axis: _ShapeLike | None = ...,\n dtype: DTypeLike = ...,\n out: None = ...,\n *,\n keepdims: Literal[True],\n initial: _ScalarLike_co = ...,\n where: _ArrayLikeBool_co = ...,\n ) -> NDArray[np.object_]: ...\n @overload\n def reduce(\n self,\n /,\n array: ArrayLike,\n axis: _ShapeLike | None = ...,\n dtype: DTypeLike = ...,\n out: None = ...,\n keepdims: bool = ...,\n initial: _ScalarLike_co = ...,\n where: _ArrayLikeBool_co = ...,\n ) -> _ReturnType_co | NDArray[np.object_]: ...\n\n @overload\n def reduceat(\n self,\n array: ArrayLike,\n indices: _ArrayLikeInt_co,\n axis: SupportsIndex,\n dtype: DTypeLike,\n out: _ArrayT,\n /,\n ) -> _ArrayT: ...\n @overload\n def reduceat(\n self,\n /,\n array: ArrayLike,\n indices: _ArrayLikeInt_co,\n axis: SupportsIndex = ...,\n dtype: DTypeLike = ...,\n *,\n out: _ArrayT | tuple[_ArrayT],\n ) -> _ArrayT: ...\n @overload\n def reduceat(\n self,\n /,\n array: ArrayLike,\n indices: _ArrayLikeInt_co,\n axis: SupportsIndex = ...,\n dtype: DTypeLike = ...,\n out: None = ...,\n ) -> NDArray[np.object_]: ...\n @overload\n def reduceat(\n self,\n /,\n array: _SupportsArrayUFunc,\n indices: _ArrayLikeInt_co,\n axis: SupportsIndex = ...,\n dtype: DTypeLike = ...,\n out: NDArray[Any] | tuple[NDArray[Any]] | None = ...,\n ) -> Any: ...\n\n @overload\n def accumulate(\n self,\n array: ArrayLike,\n axis: SupportsIndex,\n dtype: DTypeLike,\n out: _ArrayT,\n /,\n ) -> _ArrayT: ...\n @overload\n def accumulate(\n self,\n array: ArrayLike,\n axis: SupportsIndex = ...,\n dtype: DTypeLike = ...,\n *,\n out: _ArrayT | tuple[_ArrayT],\n ) -> _ArrayT: ...\n @overload\n def accumulate(\n self,\n /,\n array: ArrayLike,\n axis: SupportsIndex = ...,\n dtype: DTypeLike = ...,\n out: None = ...,\n ) -> NDArray[np.object_]: ...\n\n @overload\n def outer(\n self,\n A: _ScalarLike_co,\n B: _ScalarLike_co,\n /, *,\n out: None = ...,\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs3],\n ) -> _ReturnType_co: ...\n @overload\n def outer(\n self,\n A: ArrayLike,\n B: ArrayLike,\n /, *,\n out: None = ...,\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs3],\n ) -> _ReturnType_co | NDArray[np.object_]: ...\n @overload\n def outer(\n self,\n A: ArrayLike,\n B: ArrayLike,\n /, *,\n out: _ArrayT,\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs3],\n ) -> _ArrayT: ...\n @overload\n def outer(\n self,\n A: _SupportsArrayUFunc,\n B: _SupportsArrayUFunc | ArrayLike,\n /, *,\n out: None = ...,\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs3],\n ) -> Any: ...\n @overload\n def outer(\n self,\n A: _ScalarLike_co,\n B: _SupportsArrayUFunc | ArrayLike,\n /, *,\n out: None = ...,\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs3],\n ) -> Any: ...\n\n@type_check_only\nclass _PyFunc_Nin3P_Nout1(ufunc, Generic[_ReturnType_co, _IDType, _NIn]): # type: ignore[misc]\n @property\n def identity(self) -> _IDType: ...\n @property\n def nin(self) -> _NIn: ...\n @property\n def nout(self) -> Literal[1]: ...\n @property\n def ntypes(self) -> Literal[1]: ...\n @property\n def signature(self) -> None: ...\n\n @overload\n def __call__(\n self,\n x1: _ScalarLike_co,\n x2: _ScalarLike_co,\n x3: _ScalarLike_co,\n /,\n *xs: _ScalarLike_co,\n out: None = ...,\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs4P],\n ) -> _ReturnType_co: ...\n @overload\n def __call__(\n self,\n x1: ArrayLike,\n x2: ArrayLike,\n x3: ArrayLike,\n /,\n *xs: ArrayLike,\n out: None = ...,\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs4P],\n ) -> _ReturnType_co | NDArray[np.object_]: ...\n @overload\n def __call__(\n self,\n x1: ArrayLike,\n x2: ArrayLike,\n x3: ArrayLike,\n /,\n *xs: ArrayLike,\n out: _ArrayT | tuple[_ArrayT],\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs4P],\n ) -> _ArrayT: ...\n @overload\n def __call__(\n self,\n x1: _SupportsArrayUFunc | ArrayLike,\n x2: _SupportsArrayUFunc | ArrayLike,\n x3: _SupportsArrayUFunc | ArrayLike,\n /,\n *xs: _SupportsArrayUFunc | ArrayLike,\n out: NDArray[Any] | tuple[NDArray[Any]] | None = ...,\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs4P],\n ) -> Any: ...\n\n def at(self, /, *args: Any, **kwargs: Any) -> NoReturn: ...\n def reduce(self, /, *args: Any, **kwargs: Any) -> NoReturn: ...\n def accumulate(self, /, *args: Any, **kwargs: Any) -> NoReturn: ...\n def reduceat(self, /, *args: Any, **kwargs: Any) -> NoReturn: ...\n def outer(self, /, *args: Any, **kwargs: Any) -> NoReturn: ...\n\n@type_check_only\nclass _PyFunc_Nin1P_Nout2P(ufunc, Generic[_ReturnType_co, _IDType, _NIn, _NOut]): # type: ignore[misc]\n @property\n def identity(self) -> _IDType: ...\n @property\n def nin(self) -> _NIn: ...\n @property\n def nout(self) -> _NOut: ...\n @property\n def ntypes(self) -> Literal[1]: ...\n @property\n def signature(self) -> None: ...\n\n @overload\n def __call__(\n self,\n x1: _ScalarLike_co,\n /,\n *xs: _ScalarLike_co,\n out: None = ...,\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs3P],\n ) -> _2PTuple[_ReturnType_co]: ...\n @overload\n def __call__(\n self,\n x1: ArrayLike,\n /,\n *xs: ArrayLike,\n out: None = ...,\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs3P],\n ) -> _2PTuple[_ReturnType_co | NDArray[np.object_]]: ...\n @overload\n def __call__(\n self,\n x1: ArrayLike,\n /,\n *xs: ArrayLike,\n out: _2PTuple[_ArrayT],\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs3P],\n ) -> _2PTuple[_ArrayT]: ...\n @overload\n def __call__(\n self,\n x1: _SupportsArrayUFunc | ArrayLike,\n /,\n *xs: _SupportsArrayUFunc | ArrayLike,\n out: _2PTuple[NDArray[Any]] | None = ...,\n **kwargs: Unpack[_PyFunc_Kwargs_Nargs3P],\n ) -> Any: ...\n\n def at(self, /, *args: Any, **kwargs: Any) -> NoReturn: ...\n def reduce(self, /, *args: Any, **kwargs: Any) -> NoReturn: ...\n def accumulate(self, /, *args: Any, **kwargs: Any) -> NoReturn: ...\n def reduceat(self, /, *args: Any, **kwargs: Any) -> NoReturn: ...\n def outer(self, /, *args: Any, **kwargs: Any) -> NoReturn: ...\n
.venv\Lib\site-packages\numpy\_typing\_ufunc.pyi
_ufunc.pyi
Other
27,516
0.95
0.187035
0.084555
node-utils
734
2024-06-26T10:15:26.278946
Apache-2.0
false
0f349f1f5f214a2cd4272dc9a5931939
"""Private counterpart of ``numpy.typing``."""\n\nfrom ._array_like import ArrayLike as ArrayLike\nfrom ._array_like import NDArray as NDArray\nfrom ._array_like import _ArrayLike as _ArrayLike\nfrom ._array_like import _ArrayLikeAnyString_co as _ArrayLikeAnyString_co\nfrom ._array_like import _ArrayLikeBool_co as _ArrayLikeBool_co\nfrom ._array_like import _ArrayLikeBytes_co as _ArrayLikeBytes_co\nfrom ._array_like import _ArrayLikeComplex128_co as _ArrayLikeComplex128_co\nfrom ._array_like import _ArrayLikeComplex_co as _ArrayLikeComplex_co\nfrom ._array_like import _ArrayLikeDT64_co as _ArrayLikeDT64_co\nfrom ._array_like import _ArrayLikeFloat64_co as _ArrayLikeFloat64_co\nfrom ._array_like import _ArrayLikeFloat_co as _ArrayLikeFloat_co\nfrom ._array_like import _ArrayLikeInt as _ArrayLikeInt\nfrom ._array_like import _ArrayLikeInt_co as _ArrayLikeInt_co\nfrom ._array_like import _ArrayLikeNumber_co as _ArrayLikeNumber_co\nfrom ._array_like import _ArrayLikeObject_co as _ArrayLikeObject_co\nfrom ._array_like import _ArrayLikeStr_co as _ArrayLikeStr_co\nfrom ._array_like import _ArrayLikeString_co as _ArrayLikeString_co\nfrom ._array_like import _ArrayLikeTD64_co as _ArrayLikeTD64_co\nfrom ._array_like import _ArrayLikeUInt_co as _ArrayLikeUInt_co\nfrom ._array_like import _ArrayLikeVoid_co as _ArrayLikeVoid_co\nfrom ._array_like import _FiniteNestedSequence as _FiniteNestedSequence\nfrom ._array_like import _SupportsArray as _SupportsArray\nfrom ._array_like import _SupportsArrayFunc as _SupportsArrayFunc\n\n#\nfrom ._char_codes import _BoolCodes as _BoolCodes\nfrom ._char_codes import _ByteCodes as _ByteCodes\nfrom ._char_codes import _BytesCodes as _BytesCodes\nfrom ._char_codes import _CDoubleCodes as _CDoubleCodes\nfrom ._char_codes import _CharacterCodes as _CharacterCodes\nfrom ._char_codes import _CLongDoubleCodes as _CLongDoubleCodes\nfrom ._char_codes import _Complex64Codes as _Complex64Codes\nfrom ._char_codes import _Complex128Codes as _Complex128Codes\nfrom ._char_codes import _ComplexFloatingCodes as _ComplexFloatingCodes\nfrom ._char_codes import _CSingleCodes as _CSingleCodes\nfrom ._char_codes import _DoubleCodes as _DoubleCodes\nfrom ._char_codes import _DT64Codes as _DT64Codes\nfrom ._char_codes import _FlexibleCodes as _FlexibleCodes\nfrom ._char_codes import _Float16Codes as _Float16Codes\nfrom ._char_codes import _Float32Codes as _Float32Codes\nfrom ._char_codes import _Float64Codes as _Float64Codes\nfrom ._char_codes import _FloatingCodes as _FloatingCodes\nfrom ._char_codes import _GenericCodes as _GenericCodes\nfrom ._char_codes import _HalfCodes as _HalfCodes\nfrom ._char_codes import _InexactCodes as _InexactCodes\nfrom ._char_codes import _Int8Codes as _Int8Codes\nfrom ._char_codes import _Int16Codes as _Int16Codes\nfrom ._char_codes import _Int32Codes as _Int32Codes\nfrom ._char_codes import _Int64Codes as _Int64Codes\nfrom ._char_codes import _IntCCodes as _IntCCodes\nfrom ._char_codes import _IntCodes as _IntCodes\nfrom ._char_codes import _IntegerCodes as _IntegerCodes\nfrom ._char_codes import _IntPCodes as _IntPCodes\nfrom ._char_codes import _LongCodes as _LongCodes\nfrom ._char_codes import _LongDoubleCodes as _LongDoubleCodes\nfrom ._char_codes import _LongLongCodes as _LongLongCodes\nfrom ._char_codes import _NumberCodes as _NumberCodes\nfrom ._char_codes import _ObjectCodes as _ObjectCodes\nfrom ._char_codes import _ShortCodes as _ShortCodes\nfrom ._char_codes import _SignedIntegerCodes as _SignedIntegerCodes\nfrom ._char_codes import _SingleCodes as _SingleCodes\nfrom ._char_codes import _StrCodes as _StrCodes\nfrom ._char_codes import _StringCodes as _StringCodes\nfrom ._char_codes import _TD64Codes as _TD64Codes\nfrom ._char_codes import _UByteCodes as _UByteCodes\nfrom ._char_codes import _UInt8Codes as _UInt8Codes\nfrom ._char_codes import _UInt16Codes as _UInt16Codes\nfrom ._char_codes import _UInt32Codes as _UInt32Codes\nfrom ._char_codes import _UInt64Codes as _UInt64Codes\nfrom ._char_codes import _UIntCCodes as _UIntCCodes\nfrom ._char_codes import _UIntCodes as _UIntCodes\nfrom ._char_codes import _UIntPCodes as _UIntPCodes\nfrom ._char_codes import _ULongCodes as _ULongCodes\nfrom ._char_codes import _ULongLongCodes as _ULongLongCodes\nfrom ._char_codes import _UnsignedIntegerCodes as _UnsignedIntegerCodes\nfrom ._char_codes import _UShortCodes as _UShortCodes\nfrom ._char_codes import _VoidCodes as _VoidCodes\n\n#\nfrom ._dtype_like import DTypeLike as DTypeLike\nfrom ._dtype_like import _DTypeLike as _DTypeLike\nfrom ._dtype_like import _DTypeLikeBool as _DTypeLikeBool\nfrom ._dtype_like import _DTypeLikeBytes as _DTypeLikeBytes\nfrom ._dtype_like import _DTypeLikeComplex as _DTypeLikeComplex\nfrom ._dtype_like import _DTypeLikeComplex_co as _DTypeLikeComplex_co\nfrom ._dtype_like import _DTypeLikeDT64 as _DTypeLikeDT64\nfrom ._dtype_like import _DTypeLikeFloat as _DTypeLikeFloat\nfrom ._dtype_like import _DTypeLikeInt as _DTypeLikeInt\nfrom ._dtype_like import _DTypeLikeObject as _DTypeLikeObject\nfrom ._dtype_like import _DTypeLikeStr as _DTypeLikeStr\nfrom ._dtype_like import _DTypeLikeTD64 as _DTypeLikeTD64\nfrom ._dtype_like import _DTypeLikeUInt as _DTypeLikeUInt\nfrom ._dtype_like import _DTypeLikeVoid as _DTypeLikeVoid\nfrom ._dtype_like import _SupportsDType as _SupportsDType\nfrom ._dtype_like import _VoidDTypeLike as _VoidDTypeLike\n\n#\nfrom ._nbit import _NBitByte as _NBitByte\nfrom ._nbit import _NBitDouble as _NBitDouble\nfrom ._nbit import _NBitHalf as _NBitHalf\nfrom ._nbit import _NBitInt as _NBitInt\nfrom ._nbit import _NBitIntC as _NBitIntC\nfrom ._nbit import _NBitIntP as _NBitIntP\nfrom ._nbit import _NBitLong as _NBitLong\nfrom ._nbit import _NBitLongDouble as _NBitLongDouble\nfrom ._nbit import _NBitLongLong as _NBitLongLong\nfrom ._nbit import _NBitShort as _NBitShort\nfrom ._nbit import _NBitSingle as _NBitSingle\n\n#\nfrom ._nbit_base import (\n NBitBase as NBitBase, # type: ignore[deprecated] # pyright: ignore[reportDeprecated]\n)\nfrom ._nbit_base import _8Bit as _8Bit\nfrom ._nbit_base import _16Bit as _16Bit\nfrom ._nbit_base import _32Bit as _32Bit\nfrom ._nbit_base import _64Bit as _64Bit\nfrom ._nbit_base import _96Bit as _96Bit\nfrom ._nbit_base import _128Bit as _128Bit\n\n#\nfrom ._nested_sequence import _NestedSequence as _NestedSequence\n\n#\nfrom ._scalars import _BoolLike_co as _BoolLike_co\nfrom ._scalars import _CharLike_co as _CharLike_co\nfrom ._scalars import _ComplexLike_co as _ComplexLike_co\nfrom ._scalars import _FloatLike_co as _FloatLike_co\nfrom ._scalars import _IntLike_co as _IntLike_co\nfrom ._scalars import _NumberLike_co as _NumberLike_co\nfrom ._scalars import _ScalarLike_co as _ScalarLike_co\nfrom ._scalars import _TD64Like_co as _TD64Like_co\nfrom ._scalars import _UIntLike_co as _UIntLike_co\nfrom ._scalars import _VoidLike_co as _VoidLike_co\n\n#\nfrom ._shape import _AnyShape as _AnyShape\nfrom ._shape import _Shape as _Shape\nfrom ._shape import _ShapeLike as _ShapeLike\n\n#\nfrom ._ufunc import _GUFunc_Nin2_Nout1 as _GUFunc_Nin2_Nout1\nfrom ._ufunc import _UFunc_Nin1_Nout1 as _UFunc_Nin1_Nout1\nfrom ._ufunc import _UFunc_Nin1_Nout2 as _UFunc_Nin1_Nout2\nfrom ._ufunc import _UFunc_Nin2_Nout1 as _UFunc_Nin2_Nout1\nfrom ._ufunc import _UFunc_Nin2_Nout2 as _UFunc_Nin2_Nout2\n
.venv\Lib\site-packages\numpy\_typing\__init__.py
__init__.py
Python
7,336
0.95
0
0.057554
awesome-app
484
2023-12-29T11:52:14.578413
BSD-3-Clause
false
8b05458456083f31a3c76afcf71d7247
\n\n
.venv\Lib\site-packages\numpy\_typing\__pycache__\_add_docstring.cpython-313.pyc
_add_docstring.cpython-313.pyc
Other
4,781
0.95
0.077519
0.06
python-kit
203
2024-07-10T02:26:43.777375
MIT
false
bb3c8dce793e6999a175f3299db2d159
\n\n
.venv\Lib\site-packages\numpy\_typing\__pycache__\_array_like.cpython-313.pyc
_array_like.cpython-313.pyc
Other
5,914
0.95
0.025974
0.013514
vue-tools
124
2025-05-04T23:05:11.072614
BSD-3-Clause
false
cba796657c02054f819b21b42353c937
\n\n
.venv\Lib\site-packages\numpy\_typing\__pycache__\_char_codes.cpython-313.pyc
_char_codes.cpython-313.pyc
Other
7,073
0.8
0
0
react-lib
867
2025-03-11T23:55:56.932516
GPL-3.0
false
22411f9d91fffb9f3c5a5e5be767c899
\n\n
.venv\Lib\site-packages\numpy\_typing\__pycache__\_dtype_like.cpython-313.pyc
_dtype_like.cpython-313.pyc
Other
4,015
0.8
0
0
react-lib
875
2024-01-28T07:15:26.378220
BSD-3-Clause
false
2d7754d872d701f06fce27ca4c821c10
\n\n
.venv\Lib\site-packages\numpy\_typing\__pycache__\_extended_precision.cpython-313.pyc
_extended_precision.cpython-313.pyc
Other
782
0.85
0
0
vue-tools
110
2023-08-21T03:34:34.667358
Apache-2.0
false
ccc642caaef952efbe948fe84b04a7c1
\n\n
.venv\Lib\site-packages\numpy\_typing\__pycache__\_nbit.cpython-313.pyc
_nbit.cpython-313.pyc
Other
964
0.8
0
0
react-lib
990
2023-10-10T00:14:11.552142
BSD-3-Clause
false
1ee383cc3f64c7f6c685618136e020f7
\n\n
.venv\Lib\site-packages\numpy\_typing\__pycache__\_nbit_base.cpython-313.pyc
_nbit_base.cpython-313.pyc
Other
3,926
0.95
0.101449
0
python-kit
514
2024-02-17T21:57:13.255581
Apache-2.0
false
8f63aaed1089addf6337f9926b58ab5d
\n\n
.venv\Lib\site-packages\numpy\_typing\__pycache__\_nested_sequence.cpython-313.pyc
_nested_sequence.cpython-313.pyc
Other
3,373
0.95
0.09434
0.022727
node-utils
607
2025-04-30T12:46:11.268514
BSD-3-Clause
false
25c47c560ff4d54792b07f8f6e9cecd1
\n\n
.venv\Lib\site-packages\numpy\_typing\__pycache__\_scalars.cpython-313.pyc
_scalars.cpython-313.pyc
Other
1,424
0.8
0
0
awesome-app
354
2025-07-06T08:37:51.698070
GPL-3.0
false
aa3e2029fca6651acaed6fe7376bd236
\n\n
.venv\Lib\site-packages\numpy\_typing\__pycache__\_shape.cpython-313.pyc
_shape.cpython-313.pyc
Other
529
0.8
0
0
awesome-app
357
2023-10-14T11:57:08.678756
BSD-3-Clause
false
f0081db4b0ff1e80d466e3703ef17d3f
\n\n
.venv\Lib\site-packages\numpy\_typing\__pycache__\_ufunc.cpython-313.pyc
_ufunc.cpython-313.pyc
Other
370
0.7
0
0
vue-tools
679
2024-12-15T22:55:15.733239
MIT
false
c9a91ca33b59c5818aae9f1098ceffd7
\n\n
.venv\Lib\site-packages\numpy\_typing\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
5,111
0.8
0
0
awesome-app
881
2024-12-01T01:59:42.557623
Apache-2.0
false
438c899c6d3a8a45cccaec50ab61e0d0
"""\nA set of methods retained from np.compat module that\nare still used across codebase.\n"""\n\n__all__ = ["asunicode", "asbytes"]\n\n\ndef asunicode(s):\n if isinstance(s, bytes):\n return s.decode('latin1')\n return str(s)\n\n\ndef asbytes(s):\n if isinstance(s, bytes):\n return s\n return str(s).encode('latin1')\n
.venv\Lib\site-packages\numpy\_utils\_convertions.py
_convertions.py
Python
347
0.85
0.222222
0
node-utils
459
2025-05-03T03:28:27.359718
BSD-3-Clause
false
caabffb8dcb48b4119d613081234202d
__all__ = ["asbytes", "asunicode"]\n\ndef asunicode(s: bytes | str) -> str: ...\ndef asbytes(s: bytes | str) -> str: ...\n
.venv\Lib\site-packages\numpy\_utils\_convertions.pyi
_convertions.pyi
Other
122
0.85
0.5
0
vue-tools
227
2023-12-02T06:11:57.952350
BSD-3-Clause
false
75c2b93630c612beb2db94cc2390bc31
"""Subset of inspect module from upstream python\n\nWe use this instead of upstream because upstream inspect is slow to import, and\nsignificantly contributes to numpy import times. Importing this copy has almost\nno overhead.\n\n"""\nimport types\n\n__all__ = ['getargspec', 'formatargspec']\n\n# ----------------------------------------------------------- type-checking\ndef ismethod(object):\n """Return true if the object is an instance method.\n\n Instance method objects provide these attributes:\n __doc__ documentation string\n __name__ name with which this method was defined\n im_class class object in which this method belongs\n im_func function object containing implementation of method\n im_self instance to which this method is bound, or None\n\n """\n return isinstance(object, types.MethodType)\n\ndef isfunction(object):\n """Return true if the object is a user-defined function.\n\n Function objects provide these attributes:\n __doc__ documentation string\n __name__ name with which this function was defined\n func_code code object containing compiled function bytecode\n func_defaults tuple of any default values for arguments\n func_doc (same as __doc__)\n func_globals global namespace in which this function was defined\n func_name (same as __name__)\n\n """\n return isinstance(object, types.FunctionType)\n\ndef iscode(object):\n """Return true if the object is a code object.\n\n Code objects provide these attributes:\n co_argcount number of arguments (not including * or ** args)\n co_code string of raw compiled bytecode\n co_consts tuple of constants used in the bytecode\n co_filename name of file in which this code object was created\n co_firstlineno number of first line in Python source code\n co_flags bitmap: 1=optimized | 2=newlocals | 4=*arg | 8=**arg\n co_lnotab encoded mapping of line numbers to bytecode indices\n co_name name with which this code object was defined\n co_names tuple of names of local variables\n co_nlocals number of local variables\n co_stacksize virtual machine stack space required\n co_varnames tuple of names of arguments and local variables\n\n """\n return isinstance(object, types.CodeType)\n\n\n# ------------------------------------------------ argument list extraction\n# These constants are from Python's compile.h.\nCO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS = 1, 2, 4, 8\n\ndef getargs(co):\n """Get information about the arguments accepted by a code object.\n\n Three things are returned: (args, varargs, varkw), where 'args' is\n a list of argument names (possibly containing nested lists), and\n 'varargs' and 'varkw' are the names of the * and ** arguments or None.\n\n """\n\n if not iscode(co):\n raise TypeError('arg is not a code object')\n\n nargs = co.co_argcount\n names = co.co_varnames\n args = list(names[:nargs])\n\n # The following acrobatics are for anonymous (tuple) arguments.\n # Which we do not need to support, so remove to avoid importing\n # the dis module.\n for i in range(nargs):\n if args[i][:1] in ['', '.']:\n raise TypeError("tuple function arguments are not supported")\n varargs = None\n if co.co_flags & CO_VARARGS:\n varargs = co.co_varnames[nargs]\n nargs = nargs + 1\n varkw = None\n if co.co_flags & CO_VARKEYWORDS:\n varkw = co.co_varnames[nargs]\n return args, varargs, varkw\n\ndef getargspec(func):\n """Get the names and default values of a function's arguments.\n\n A tuple of four things is returned: (args, varargs, varkw, defaults).\n 'args' is a list of the argument names (it may contain nested lists).\n 'varargs' and 'varkw' are the names of the * and ** arguments or None.\n 'defaults' is an n-tuple of the default values of the last n arguments.\n\n """\n\n if ismethod(func):\n func = func.__func__\n if not isfunction(func):\n raise TypeError('arg is not a Python function')\n args, varargs, varkw = getargs(func.__code__)\n return args, varargs, varkw, func.__defaults__\n\ndef getargvalues(frame):\n """Get information about arguments passed into a particular frame.\n\n A tuple of four things is returned: (args, varargs, varkw, locals).\n 'args' is a list of the argument names (it may contain nested lists).\n 'varargs' and 'varkw' are the names of the * and ** arguments or None.\n 'locals' is the locals dictionary of the given frame.\n\n """\n args, varargs, varkw = getargs(frame.f_code)\n return args, varargs, varkw, frame.f_locals\n\ndef joinseq(seq):\n if len(seq) == 1:\n return '(' + seq[0] + ',)'\n else:\n return '(' + ', '.join(seq) + ')'\n\ndef strseq(object, convert, join=joinseq):\n """Recursively walk a sequence, stringifying each element.\n\n """\n if type(object) in [list, tuple]:\n return join([strseq(_o, convert, join) for _o in object])\n else:\n return convert(object)\n\ndef formatargspec(args, varargs=None, varkw=None, defaults=None,\n formatarg=str,\n formatvarargs=lambda name: '*' + name,\n formatvarkw=lambda name: '**' + name,\n formatvalue=lambda value: '=' + repr(value),\n join=joinseq):\n """Format an argument spec from the 4 values returned by getargspec.\n\n The first four arguments are (args, varargs, varkw, defaults). The\n other four arguments are the corresponding optional formatting functions\n that are called to turn names and values into strings. The ninth\n argument is an optional function to format the sequence of arguments.\n\n """\n specs = []\n if defaults:\n firstdefault = len(args) - len(defaults)\n for i in range(len(args)):\n spec = strseq(args[i], formatarg, join)\n if defaults and i >= firstdefault:\n spec = spec + formatvalue(defaults[i - firstdefault])\n specs.append(spec)\n if varargs is not None:\n specs.append(formatvarargs(varargs))\n if varkw is not None:\n specs.append(formatvarkw(varkw))\n return '(' + ', '.join(specs) + ')'\n\ndef formatargvalues(args, varargs, varkw, locals,\n formatarg=str,\n formatvarargs=lambda name: '*' + name,\n formatvarkw=lambda name: '**' + name,\n formatvalue=lambda value: '=' + repr(value),\n join=joinseq):\n """Format an argument spec from the 4 values returned by getargvalues.\n\n The first four arguments are (args, varargs, varkw, locals). The\n next four arguments are the corresponding optional formatting functions\n that are called to turn names and values into strings. The ninth\n argument is an optional function to format the sequence of arguments.\n\n """\n def convert(name, locals=locals,\n formatarg=formatarg, formatvalue=formatvalue):\n return formatarg(name) + formatvalue(locals[name])\n specs = [strseq(arg, convert, join) for arg in args]\n\n if varargs:\n specs.append(formatvarargs(varargs) + formatvalue(locals[varargs]))\n if varkw:\n specs.append(formatvarkw(varkw) + formatvalue(locals[varkw]))\n return '(' + ', '.join(specs) + ')'\n
.venv\Lib\site-packages\numpy\_utils\_inspect.py
_inspect.py
Python
7,628
0.95
0.234375
0.03871
python-kit
926
2024-06-20T22:57:58.753347
GPL-3.0
false
2ca14450f94948a43d035e1e2c9b5fec
import types\nfrom collections.abc import Callable, Mapping\nfrom typing import Any, Final, TypeAlias, TypeVar, overload\n\nfrom _typeshed import SupportsLenAndGetItem\nfrom typing_extensions import TypeIs\n\n__all__ = ["formatargspec", "getargspec"]\n\n###\n\n_T = TypeVar("_T")\n_RT = TypeVar("_RT")\n\n_StrSeq: TypeAlias = SupportsLenAndGetItem[str]\n_NestedSeq: TypeAlias = list[_T | _NestedSeq[_T]] | tuple[_T | _NestedSeq[_T], ...]\n\n_JoinFunc: TypeAlias = Callable[[list[_T]], _T]\n_FormatFunc: TypeAlias = Callable[[_T], str]\n\n###\n\nCO_OPTIMIZED: Final = 1\nCO_NEWLOCALS: Final = 2\nCO_VARARGS: Final = 4\nCO_VARKEYWORDS: Final = 8\n\n###\n\ndef ismethod(object: object) -> TypeIs[types.MethodType]: ...\ndef isfunction(object: object) -> TypeIs[types.FunctionType]: ...\ndef iscode(object: object) -> TypeIs[types.CodeType]: ...\n\n###\n\ndef getargs(co: types.CodeType) -> tuple[list[str], str | None, str | None]: ...\ndef getargspec(func: types.MethodType | types.FunctionType) -> tuple[list[str], str | None, str | None, tuple[Any, ...]]: ...\ndef getargvalues(frame: types.FrameType) -> tuple[list[str], str | None, str | None, dict[str, Any]]: ...\n\n#\ndef joinseq(seq: _StrSeq) -> str: ...\n\n#\n@overload\ndef strseq(object: _NestedSeq[str], convert: Callable[[Any], Any], join: _JoinFunc[str] = ...) -> str: ...\n@overload\ndef strseq(object: _NestedSeq[_T], convert: Callable[[_T], _RT], join: _JoinFunc[_RT]) -> _RT: ...\n\n#\ndef formatargspec(\n args: _StrSeq,\n varargs: str | None = None,\n varkw: str | None = None,\n defaults: SupportsLenAndGetItem[object] | None = None,\n formatarg: _FormatFunc[str] = ..., # str\n formatvarargs: _FormatFunc[str] = ..., # "*{}".format\n formatvarkw: _FormatFunc[str] = ..., # "**{}".format\n formatvalue: _FormatFunc[object] = ..., # "={!r}".format\n join: _JoinFunc[str] = ..., # joinseq\n) -> str: ...\ndef formatargvalues(\n args: _StrSeq,\n varargs: str | None,\n varkw: str | None,\n locals: Mapping[str, object] | None,\n formatarg: _FormatFunc[str] = ..., # str\n formatvarargs: _FormatFunc[str] = ..., # "*{}".format\n formatvarkw: _FormatFunc[str] = ..., # "**{}".format\n formatvalue: _FormatFunc[object] = ..., # "={!r}".format\n join: _JoinFunc[str] = ..., # joinseq\n) -> str: ...\n
.venv\Lib\site-packages\numpy\_utils\_inspect.pyi
_inspect.pyi
Other
2,326
0.95
0.15493
0.125
react-lib
998
2024-06-25T10:48:37.986847
Apache-2.0
false
02c064d55111fa81115c30e23273ed68
"""Utility to compare pep440 compatible version strings.\n\nThe LooseVersion and StrictVersion classes that distutils provides don't\nwork; they don't recognize anything like alpha/beta/rc/dev versions.\n"""\n\n# Copyright (c) Donald Stufft and individual contributors.\n# All rights reserved.\n\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are met:\n\n# 1. Redistributions of source code must retain the above copyright notice,\n# this list of conditions and the following disclaimer.\n\n# 2. Redistributions in binary form must reproduce the above copyright\n# notice, this list of conditions and the following disclaimer in the\n# documentation and/or other materials provided with the distribution.\n\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\n# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE\n# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n# POSSIBILITY OF SUCH DAMAGE.\n\nimport collections\nimport itertools\nimport re\n\n__all__ = [\n "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN",\n]\n\n\n# BEGIN packaging/_structures.py\n\n\nclass Infinity:\n def __repr__(self):\n return "Infinity"\n\n def __hash__(self):\n return hash(repr(self))\n\n def __lt__(self, other):\n return False\n\n def __le__(self, other):\n return False\n\n def __eq__(self, other):\n return isinstance(other, self.__class__)\n\n def __ne__(self, other):\n return not isinstance(other, self.__class__)\n\n def __gt__(self, other):\n return True\n\n def __ge__(self, other):\n return True\n\n def __neg__(self):\n return NegativeInfinity\n\n\nInfinity = Infinity()\n\n\nclass NegativeInfinity:\n def __repr__(self):\n return "-Infinity"\n\n def __hash__(self):\n return hash(repr(self))\n\n def __lt__(self, other):\n return True\n\n def __le__(self, other):\n return True\n\n def __eq__(self, other):\n return isinstance(other, self.__class__)\n\n def __ne__(self, other):\n return not isinstance(other, self.__class__)\n\n def __gt__(self, other):\n return False\n\n def __ge__(self, other):\n return False\n\n def __neg__(self):\n return Infinity\n\n\n# BEGIN packaging/version.py\n\n\nNegativeInfinity = NegativeInfinity()\n\n_Version = collections.namedtuple(\n "_Version",\n ["epoch", "release", "dev", "pre", "post", "local"],\n)\n\n\ndef parse(version):\n """\n Parse the given version string and return either a :class:`Version` object\n or a :class:`LegacyVersion` object depending on if the given version is\n a valid PEP 440 version or a legacy version.\n """\n try:\n return Version(version)\n except InvalidVersion:\n return LegacyVersion(version)\n\n\nclass InvalidVersion(ValueError):\n """\n An invalid version was found, users should refer to PEP 440.\n """\n\n\nclass _BaseVersion:\n\n def __hash__(self):\n return hash(self._key)\n\n def __lt__(self, other):\n return self._compare(other, lambda s, o: s < o)\n\n def __le__(self, other):\n return self._compare(other, lambda s, o: s <= o)\n\n def __eq__(self, other):\n return self._compare(other, lambda s, o: s == o)\n\n def __ge__(self, other):\n return self._compare(other, lambda s, o: s >= o)\n\n def __gt__(self, other):\n return self._compare(other, lambda s, o: s > o)\n\n def __ne__(self, other):\n return self._compare(other, lambda s, o: s != o)\n\n def _compare(self, other, method):\n if not isinstance(other, _BaseVersion):\n return NotImplemented\n\n return method(self._key, other._key)\n\n\nclass LegacyVersion(_BaseVersion):\n\n def __init__(self, version):\n self._version = str(version)\n self._key = _legacy_cmpkey(self._version)\n\n def __str__(self):\n return self._version\n\n def __repr__(self):\n return f"<LegacyVersion({str(self)!r})>"\n\n @property\n def public(self):\n return self._version\n\n @property\n def base_version(self):\n return self._version\n\n @property\n def local(self):\n return None\n\n @property\n def is_prerelease(self):\n return False\n\n @property\n def is_postrelease(self):\n return False\n\n\n_legacy_version_component_re = re.compile(\n r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,\n)\n\n_legacy_version_replacement_map = {\n "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",\n}\n\n\ndef _parse_version_parts(s):\n for part in _legacy_version_component_re.split(s):\n part = _legacy_version_replacement_map.get(part, part)\n\n if not part or part == ".":\n continue\n\n if part[:1] in "0123456789":\n # pad for numeric comparison\n yield part.zfill(8)\n else:\n yield "*" + part\n\n # ensure that alpha/beta/candidate are before final\n yield "*final"\n\n\ndef _legacy_cmpkey(version):\n # We hardcode an epoch of -1 here. A PEP 440 version can only have an epoch\n # greater than or equal to 0. This will effectively put the LegacyVersion,\n # which uses the defacto standard originally implemented by setuptools,\n # as before all PEP 440 versions.\n epoch = -1\n\n # This scheme is taken from pkg_resources.parse_version setuptools prior to\n # its adoption of the packaging library.\n parts = []\n for part in _parse_version_parts(version.lower()):\n if part.startswith("*"):\n # remove "-" before a prerelease tag\n if part < "*final":\n while parts and parts[-1] == "*final-":\n parts.pop()\n\n # remove trailing zeros from each series of numeric parts\n while parts and parts[-1] == "00000000":\n parts.pop()\n\n parts.append(part)\n parts = tuple(parts)\n\n return epoch, parts\n\n\n# Deliberately not anchored to the start and end of the string, to make it\n# easier for 3rd party code to reuse\nVERSION_PATTERN = r"""\n v?\n (?:\n (?:(?P<epoch>[0-9]+)!)? # epoch\n (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment\n (?P<pre> # pre-release\n [-_\.]?\n (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))\n [-_\.]?\n (?P<pre_n>[0-9]+)?\n )?\n (?P<post> # post release\n (?:-(?P<post_n1>[0-9]+))\n |\n (?:\n [-_\.]?\n (?P<post_l>post|rev|r)\n [-_\.]?\n (?P<post_n2>[0-9]+)?\n )\n )?\n (?P<dev> # dev release\n [-_\.]?\n (?P<dev_l>dev)\n [-_\.]?\n (?P<dev_n>[0-9]+)?\n )?\n )\n (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version\n"""\n\n\nclass Version(_BaseVersion):\n\n _regex = re.compile(\n r"^\s*" + VERSION_PATTERN + r"\s*$",\n re.VERBOSE | re.IGNORECASE,\n )\n\n def __init__(self, version):\n # Validate the version and parse it into pieces\n match = self._regex.search(version)\n if not match:\n raise InvalidVersion(f"Invalid version: '{version}'")\n\n # Store the parsed out pieces of the version\n self._version = _Version(\n epoch=int(match.group("epoch")) if match.group("epoch") else 0,\n release=tuple(int(i) for i in match.group("release").split(".")),\n pre=_parse_letter_version(\n match.group("pre_l"),\n match.group("pre_n"),\n ),\n post=_parse_letter_version(\n match.group("post_l"),\n match.group("post_n1") or match.group("post_n2"),\n ),\n dev=_parse_letter_version(\n match.group("dev_l"),\n match.group("dev_n"),\n ),\n local=_parse_local_version(match.group("local")),\n )\n\n # Generate a key which will be used for sorting\n self._key = _cmpkey(\n self._version.epoch,\n self._version.release,\n self._version.pre,\n self._version.post,\n self._version.dev,\n self._version.local,\n )\n\n def __repr__(self):\n return f"<Version({str(self)!r})>"\n\n def __str__(self):\n parts = []\n\n # Epoch\n if self._version.epoch != 0:\n parts.append(f"{self._version.epoch}!")\n\n # Release segment\n parts.append(".".join(str(x) for x in self._version.release))\n\n # Pre-release\n if self._version.pre is not None:\n parts.append("".join(str(x) for x in self._version.pre))\n\n # Post-release\n if self._version.post is not None:\n parts.append(f".post{self._version.post[1]}")\n\n # Development release\n if self._version.dev is not None:\n parts.append(f".dev{self._version.dev[1]}")\n\n # Local version segment\n if self._version.local is not None:\n parts.append(\n f"+{'.'.join(str(x) for x in self._version.local)}"\n )\n\n return "".join(parts)\n\n @property\n def public(self):\n return str(self).split("+", 1)[0]\n\n @property\n def base_version(self):\n parts = []\n\n # Epoch\n if self._version.epoch != 0:\n parts.append(f"{self._version.epoch}!")\n\n # Release segment\n parts.append(".".join(str(x) for x in self._version.release))\n\n return "".join(parts)\n\n @property\n def local(self):\n version_string = str(self)\n if "+" in version_string:\n return version_string.split("+", 1)[1]\n\n @property\n def is_prerelease(self):\n return bool(self._version.dev or self._version.pre)\n\n @property\n def is_postrelease(self):\n return bool(self._version.post)\n\n\ndef _parse_letter_version(letter, number):\n if letter:\n # We assume there is an implicit 0 in a pre-release if there is\n # no numeral associated with it.\n if number is None:\n number = 0\n\n # We normalize any letters to their lower-case form\n letter = letter.lower()\n\n # We consider some words to be alternate spellings of other words and\n # in those cases we want to normalize the spellings to our preferred\n # spelling.\n if letter == "alpha":\n letter = "a"\n elif letter == "beta":\n letter = "b"\n elif letter in ["c", "pre", "preview"]:\n letter = "rc"\n elif letter in ["rev", "r"]:\n letter = "post"\n\n return letter, int(number)\n if not letter and number:\n # We assume that if we are given a number but not given a letter,\n # then this is using the implicit post release syntax (e.g., 1.0-1)\n letter = "post"\n\n return letter, int(number)\n\n\n_local_version_seperators = re.compile(r"[\._-]")\n\n\ndef _parse_local_version(local):\n """\n Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").\n """\n if local is not None:\n return tuple(\n part.lower() if not part.isdigit() else int(part)\n for part in _local_version_seperators.split(local)\n )\n\n\ndef _cmpkey(epoch, release, pre, post, dev, local):\n # When we compare a release version, we want to compare it with all of the\n # trailing zeros removed. So we'll use a reverse the list, drop all the now\n # leading zeros until we come to something non-zero, then take the rest,\n # re-reverse it back into the correct order, and make it a tuple and use\n # that for our sorting key.\n release = tuple(\n reversed(list(\n itertools.dropwhile(\n lambda x: x == 0,\n reversed(release),\n )\n ))\n )\n\n # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.\n # We'll do this by abusing the pre-segment, but we _only_ want to do this\n # if there is no pre- or a post-segment. If we have one of those, then\n # the normal sorting rules will handle this case correctly.\n if pre is None and post is None and dev is not None:\n pre = -Infinity\n # Versions without a pre-release (except as noted above) should sort after\n # those with one.\n elif pre is None:\n pre = Infinity\n\n # Versions without a post-segment should sort before those with one.\n if post is None:\n post = -Infinity\n\n # Versions without a development segment should sort after those with one.\n if dev is None:\n dev = Infinity\n\n if local is None:\n # Versions without a local segment should sort before those with one.\n local = -Infinity\n else:\n # Versions with a local segment need that segment parsed to implement\n # the sorting rules in PEP440.\n # - Alphanumeric segments sort before numeric segments\n # - Alphanumeric segments sort lexicographically\n # - Numeric segments sort numerically\n # - Shorter versions sort before longer versions when the prefixes\n # match exactly\n local = tuple(\n (i, "") if isinstance(i, int) else (-Infinity, i)\n for i in local\n )\n\n return epoch, release, pre, post, dev, local\n
.venv\Lib\site-packages\numpy\_utils\_pep440.py
_pep440.py
Python
14,474
0.95
0.207819
0.201087
awesome-app
447
2023-09-08T06:49:50.510719
Apache-2.0
false
f40609cdfb73028993dc4b01d0a00f67
import re\nfrom collections.abc import Callable\nfrom typing import (\n Any,\n ClassVar,\n Final,\n Generic,\n NamedTuple,\n TypeVar,\n final,\n type_check_only,\n)\nfrom typing import (\n Literal as L,\n)\n\nfrom typing_extensions import TypeIs\n\n__all__ = ["VERSION_PATTERN", "InvalidVersion", "LegacyVersion", "Version", "parse"]\n\n###\n\n_CmpKeyT = TypeVar("_CmpKeyT", bound=tuple[object, ...])\n_CmpKeyT_co = TypeVar("_CmpKeyT_co", bound=tuple[object, ...], default=tuple[Any, ...], covariant=True)\n\n###\n\nVERSION_PATTERN: Final[str] = ...\n\nclass InvalidVersion(ValueError): ...\n\n@type_check_only\n@final\nclass _InfinityType:\n def __hash__(self) -> int: ...\n def __eq__(self, other: object, /) -> TypeIs[_InfinityType]: ...\n def __ne__(self, other: object, /) -> bool: ...\n def __lt__(self, other: object, /) -> L[False]: ...\n def __le__(self, other: object, /) -> L[False]: ...\n def __gt__(self, other: object, /) -> L[True]: ...\n def __ge__(self, other: object, /) -> L[True]: ...\n def __neg__(self) -> _NegativeInfinityType: ...\n\nInfinity: Final[_InfinityType] = ...\n\n@type_check_only\n@final\nclass _NegativeInfinityType:\n def __hash__(self) -> int: ...\n def __eq__(self, other: object, /) -> TypeIs[_NegativeInfinityType]: ...\n def __ne__(self, other: object, /) -> bool: ...\n def __lt__(self, other: object, /) -> L[True]: ...\n def __le__(self, other: object, /) -> L[True]: ...\n def __gt__(self, other: object, /) -> L[False]: ...\n def __ge__(self, other: object, /) -> L[False]: ...\n def __neg__(self) -> _InfinityType: ...\n\nNegativeInfinity: Final[_NegativeInfinityType] = ...\n\nclass _Version(NamedTuple):\n epoch: int\n release: tuple[int, ...]\n dev: tuple[str, int] | None\n pre: tuple[str, int] | None\n post: tuple[str, int] | None\n local: tuple[str | int, ...] | None\n\nclass _BaseVersion(Generic[_CmpKeyT_co]):\n _key: _CmpKeyT_co\n def __hash__(self) -> int: ...\n def __eq__(self, other: _BaseVersion, /) -> bool: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]\n def __ne__(self, other: _BaseVersion, /) -> bool: ... # type: ignore[override] # pyright: ignore[reportIncompatibleMethodOverride]\n def __lt__(self, other: _BaseVersion, /) -> bool: ...\n def __le__(self, other: _BaseVersion, /) -> bool: ...\n def __ge__(self, other: _BaseVersion, /) -> bool: ...\n def __gt__(self, other: _BaseVersion, /) -> bool: ...\n def _compare(self, /, other: _BaseVersion[_CmpKeyT], method: Callable[[_CmpKeyT_co, _CmpKeyT], bool]) -> bool: ...\n\nclass LegacyVersion(_BaseVersion[tuple[L[-1], tuple[str, ...]]]):\n _version: Final[str]\n def __init__(self, /, version: str) -> None: ...\n @property\n def public(self) -> str: ...\n @property\n def base_version(self) -> str: ...\n @property\n def local(self) -> None: ...\n @property\n def is_prerelease(self) -> L[False]: ...\n @property\n def is_postrelease(self) -> L[False]: ...\n\nclass Version(\n _BaseVersion[\n tuple[\n int, # epoch\n tuple[int, ...], # release\n tuple[str, int] | _InfinityType | _NegativeInfinityType, # pre\n tuple[str, int] | _NegativeInfinityType, # post\n tuple[str, int] | _InfinityType, # dev\n tuple[tuple[int, L[""]] | tuple[_NegativeInfinityType, str], ...] | _NegativeInfinityType, # local\n ],\n ],\n):\n _regex: ClassVar[re.Pattern[str]] = ...\n _version: Final[str]\n\n def __init__(self, /, version: str) -> None: ...\n @property\n def public(self) -> str: ...\n @property\n def base_version(self) -> str: ...\n @property\n def local(self) -> str | None: ...\n @property\n def is_prerelease(self) -> bool: ...\n @property\n def is_postrelease(self) -> bool: ...\n\n#\ndef parse(version: str) -> Version | LegacyVersion: ...\n
.venv\Lib\site-packages\numpy\_utils\_pep440.pyi
_pep440.pyi
Other
3,991
0.95
0.363636
0.028846
awesome-app
73
2024-06-26T06:00:18.974023
MIT
false
c52165cd0a30e31b30a6fb796b4d2210
"""\nThis is a module for defining private helpers which do not depend on the\nrest of NumPy.\n\nEverything in here must be self-contained so that it can be\nimported anywhere else without creating circular imports.\nIf a utility requires the import of NumPy, it probably belongs\nin ``numpy._core``.\n"""\n\nimport functools\nimport warnings\n\nfrom ._convertions import asbytes, asunicode\n\n\ndef set_module(module):\n """Private decorator for overriding __module__ on a function or class.\n\n Example usage::\n\n @set_module('numpy')\n def example():\n pass\n\n assert example.__module__ == 'numpy'\n """\n def decorator(func):\n if module is not None:\n if isinstance(func, type):\n try:\n func._module_source = func.__module__\n except (AttributeError):\n pass\n\n func.__module__ = module\n return func\n return decorator\n\n\ndef _rename_parameter(old_names, new_names, dep_version=None):\n """\n Generate decorator for backward-compatible keyword renaming.\n\n Apply the decorator generated by `_rename_parameter` to functions with a\n renamed parameter to maintain backward-compatibility.\n\n After decoration, the function behaves as follows:\n If only the new parameter is passed into the function, behave as usual.\n If only the old parameter is passed into the function (as a keyword), raise\n a DeprecationWarning if `dep_version` is provided, and behave as usual\n otherwise.\n If both old and new parameters are passed into the function, raise a\n DeprecationWarning if `dep_version` is provided, and raise the appropriate\n TypeError (function got multiple values for argument).\n\n Parameters\n ----------\n old_names : list of str\n Old names of parameters\n new_name : list of str\n New names of parameters\n dep_version : str, optional\n Version of NumPy in which old parameter was deprecated in the format\n 'X.Y.Z'. If supplied, the deprecation message will indicate that\n support for the old parameter will be removed in version 'X.Y+2.Z'\n\n Notes\n -----\n Untested with functions that accept *args. Probably won't work as written.\n\n """\n def decorator(fun):\n @functools.wraps(fun)\n def wrapper(*args, **kwargs):\n __tracebackhide__ = True # Hide traceback for py.test\n for old_name, new_name in zip(old_names, new_names):\n if old_name in kwargs:\n if dep_version:\n end_version = dep_version.split('.')\n end_version[1] = str(int(end_version[1]) + 2)\n end_version = '.'.join(end_version)\n msg = (f"Use of keyword argument `{old_name}` is "\n f"deprecated and replaced by `{new_name}`. "\n f"Support for `{old_name}` will be removed "\n f"in NumPy {end_version}.")\n warnings.warn(msg, DeprecationWarning, stacklevel=2)\n if new_name in kwargs:\n msg = (f"{fun.__name__}() got multiple values for "\n f"argument now known as `{new_name}`")\n raise TypeError(msg)\n kwargs[new_name] = kwargs.pop(old_name)\n return fun(*args, **kwargs)\n return wrapper\n return decorator\n
.venv\Lib\site-packages\numpy\_utils\__init__.py
__init__.py
Python
3,572
0.95
0.315789
0
vue-tools
113
2024-10-09T06:41:05.788322
MIT
false
3d599c815f1242ff47ec698bbb735203
from collections.abc import Callable, Iterable\nfrom typing import Protocol, TypeVar, overload, type_check_only\n\nfrom _typeshed import IdentityFunction\n\nfrom ._convertions import asbytes as asbytes\nfrom ._convertions import asunicode as asunicode\n\n###\n\n_T = TypeVar("_T")\n_HasModuleT = TypeVar("_HasModuleT", bound=_HasModule)\n\n@type_check_only\nclass _HasModule(Protocol):\n __module__: str\n\n###\n\n@overload\ndef set_module(module: None) -> IdentityFunction: ...\n@overload\ndef set_module(module: str) -> Callable[[_HasModuleT], _HasModuleT]: ...\n\n#\ndef _rename_parameter(\n old_names: Iterable[str],\n new_names: Iterable[str],\n dep_version: str | None = None,\n) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ...\n
.venv\Lib\site-packages\numpy\_utils\__init__.pyi
__init__.pyi
Other
756
0.95
0.133333
0.136364
node-utils
875
2024-05-29T12:15:46.559407
Apache-2.0
false
a3bc8d78a7155e4d3a1e3ce109cf9583
\n\n
.venv\Lib\site-packages\numpy\_utils\__pycache__\_convertions.cpython-313.pyc
_convertions.cpython-313.pyc
Other
860
0.8
0
0
node-utils
79
2024-07-01T21:16:06.112809
MIT
false
100c204fa2ef340afd24290ce0f47530
\n\n
.venv\Lib\site-packages\numpy\_utils\__pycache__\_inspect.cpython-313.pyc
_inspect.cpython-313.pyc
Other
9,267
0.95
0.102041
0
node-utils
518
2025-02-28T20:20:25.117029
Apache-2.0
false
8ac9b8fcaba1ffdafb335a230ca17357
\n\n
.venv\Lib\site-packages\numpy\_utils\__pycache__\_pep440.cpython-313.pyc
_pep440.cpython-313.pyc
Other
19,100
0.8
0.017964
0.00625
awesome-app
396
2024-10-03T10:11:48.336081
GPL-3.0
false
d8674044c82afa8cab11dd31c1bd8b18
\n\n
.venv\Lib\site-packages\numpy\_utils\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
4,086
0.95
0.269841
0
awesome-app
79
2023-10-05T22:44:41.904086
GPL-3.0
false
08f66b7f07af2c115365d0f9213b10c4
\n\n
.venv\Lib\site-packages\numpy\__pycache__\conftest.cpython-313.pyc
conftest.cpython-313.pyc
Other
10,467
0.95
0.052632
0
awesome-app
226
2023-09-08T19:56:49.813691
Apache-2.0
true
6cb51b6396b7291bc955e152e5220431
\n\n
.venv\Lib\site-packages\numpy\__pycache__\dtypes.cpython-313.pyc
dtypes.cpython-313.pyc
Other
1,540
0.85
0
0
awesome-app
38
2024-04-20T17:06:47.878365
BSD-3-Clause
false
6f9579ed332abe07d91a356339946e1a
\n\n
.venv\Lib\site-packages\numpy\__pycache__\exceptions.cpython-313.pyc
exceptions.cpython-313.pyc
Other
8,466
0.95
0.108247
0
python-kit
269
2023-12-02T11:16:03.636471
BSD-3-Clause
false
509f00ed6aa97e59abcb898ec65ce2f7
\n\n
.venv\Lib\site-packages\numpy\__pycache__\matlib.cpython-313.pyc
matlib.cpython-313.pyc
Other
10,912
0.95
0.026866
0
node-utils
881
2023-10-12T03:07:15.819876
Apache-2.0
false
e0cb1c8eba6311a52806b3a0433b6ede
\n\n
.venv\Lib\site-packages\numpy\__pycache__\version.cpython-313.pyc
version.cpython-313.pyc
Other
561
0.8
0.083333
0
awesome-app
600
2025-05-11T04:20:24.590666
BSD-3-Clause
false
b648e008dc814908e69c87828cc9eff4
\n\n
.venv\Lib\site-packages\numpy\__pycache__\_array_api_info.cpython-313.pyc
_array_api_info.cpython-313.pyc
Other
9,970
0.95
0.065385
0
awesome-app
758
2024-08-21T16:09:33.951742
Apache-2.0
false
82a5230c078eb767941a033977d256c9
\n\n
.venv\Lib\site-packages\numpy\__pycache__\_configtool.cpython-313.pyc
_configtool.cpython-313.pyc
Other
1,687
0.8
0.047619
0.05
vue-tools
983
2024-09-17T21:17:51.752037
BSD-3-Clause
false
acb3f9f8cbfa6545895e34891b46230f
\n\n
.venv\Lib\site-packages\numpy\__pycache__\_distributor_init.cpython-313.pyc
_distributor_init.cpython-313.pyc
Other
659
0.7
0
0
vue-tools
783
2024-06-09T19:52:04.520825
Apache-2.0
false
4df32ed0dc8f7b378f42e4840ea17723
\n\n
.venv\Lib\site-packages\numpy\__pycache__\_expired_attrs_2_0.cpython-313.pyc
_expired_attrs_2_0.cpython-313.pyc
Other
4,294
0.95
0.129032
0
react-lib
933
2025-05-28T13:27:36.967590
BSD-3-Clause
false
faa32e28bc90e1ace084d404f859b65f
\n\n
.venv\Lib\site-packages\numpy\__pycache__\_globals.cpython-313.pyc
_globals.cpython-313.pyc
Other
3,904
0.95
0.220588
0.017544
react-lib
213
2025-05-26T08:44:37.536066
GPL-3.0
false
9888308e317672c61f208a8ad2dd1df0
\n\n
.venv\Lib\site-packages\numpy\__pycache__\_pytesttester.cpython-313.pyc
_pytesttester.cpython-313.pyc
Other
6,464
0.95
0.082192
0.016
awesome-app
976
2025-04-21T15:14:08.876279
GPL-3.0
true
66c53f5f79ab95743a4712c0e0f9f741
\n\n
.venv\Lib\site-packages\numpy\__pycache__\__config__.cpython-313.pyc
__config__.cpython-313.pyc
Other
5,310
0.8
0.028986
0
node-utils
366
2024-01-24T16:40:32.442539
Apache-2.0
false
bd6d7e265cb7abdc5ff31287173067d0
\n\n
.venv\Lib\site-packages\numpy\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
28,080
0.95
0.066667
0
awesome-app
793
2023-08-22T03:42:53.988780
Apache-2.0
false
9ac7d21377b1124d706870d2080f2e71
Version: 1.10.1\nArguments: ['C:\\Users\\runneradmin\\AppData\\Local\\Temp\\cibw-run-9tmma_o0\\cp313-win_amd64\\build\\venv\\Scripts\\delvewheel', 'repair', '--add-path', 'D:/a/numpy/numpy/.openblas/lib', '-w', 'C:\\Users\\runneradmin\\AppData\\Local\\Temp\\cibw-run-9tmma_o0\\cp313-win_amd64\\repaired_wheel', 'C:\\Users\\runneradmin\\AppData\\Local\\Temp\\cibw-run-9tmma_o0\\cp313-win_amd64\\built_wheel\\numpy-2.3.1-cp313-cp313-win_amd64.whl']\n
.venv\Lib\site-packages\numpy-2.3.1.dist-info\DELVEWHEEL
DELVEWHEEL
Other
446
0.7
0
0
node-utils
17
2023-11-16T06:12:51.931308
MIT
false
9f29c052236b8993d2eadac92748b7a2
[pkg_config]\nnumpy = numpy._core.lib.pkgconfig\n\n[array_api]\nnumpy = numpy\n\n[pyinstaller40]\nhook-dirs = numpy:_pyinstaller_hooks_dir\n\n[console_scripts]\nf2py = numpy.f2py.f2py2e:main\nnumpy-config = numpy._configtool:main\n\n
.venv\Lib\site-packages\numpy-2.3.1.dist-info\entry_points.txt
entry_points.txt
Other
220
0.7
0
0
vue-tools
513
2024-03-15T00:04:58.253809
GPL-3.0
false
6f858322c9bc0659f250a01c58792b67
pip\n
.venv\Lib\site-packages\numpy-2.3.1.dist-info\INSTALLER
INSTALLER
Other
4
0.5
0
0
python-kit
592
2023-08-17T18:08:03.525521
BSD-3-Clause
false
365c9bfeb7d89244f2ce01c1de44cb85
Copyright (c) 2005-2025, NumPy Developers.\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n\n * Redistributions in binary form must reproduce the above\n copyright notice, this list of conditions and the following\n disclaimer in the documentation and/or other materials provided\n with the distribution.\n\n * Neither the name of the NumPy Developers nor the names of any\n contributors may be used to endorse or promote products derived\n from this software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n----\n\nThe NumPy repository and source distributions bundle several libraries that are\ncompatibly licensed. We list these here.\n\nName: lapack-lite\nFiles: numpy/linalg/lapack_lite/*\nLicense: BSD-3-Clause\n For details, see numpy/linalg/lapack_lite/LICENSE.txt\n\nName: dragon4\nFiles: numpy/_core/src/multiarray/dragon4.c\nLicense: MIT\n For license text, see numpy/_core/src/multiarray/dragon4.c\n\nName: libdivide\nFiles: numpy/_core/include/numpy/libdivide/*\nLicense: Zlib\n For license text, see numpy/_core/include/numpy/libdivide/LICENSE.txt\n\n\nNote that the following files are vendored in the repository and sdist but not\ninstalled in built numpy packages:\n\nName: Meson\nFiles: vendored-meson/meson/*\nLicense: Apache 2.0\n For license text, see vendored-meson/meson/COPYING\n\nName: spin\nFiles: .spin/cmds.py\nLicense: BSD-3\n For license text, see .spin/LICENSE\n\nName: tempita\nFiles: numpy/_build_utils/tempita/*\nLicense: MIT\n For details, see numpy/_build_utils/tempita/LICENCE.txt\n\n----\n\nThis binary distribution of NumPy also bundles the following software:\n\n\nName: OpenBLAS\nFiles: numpy.libs\libscipy_openblas*.dll\nDescription: bundled as a dynamically linked library\nAvailability: https://github.com/OpenMathLib/OpenBLAS/\nLicense: BSD-3-Clause\n Copyright (c) 2011-2014, The OpenBLAS Project\n All rights reserved.\n\n Redistribution and use in source and binary forms, with or without\n modification, are permitted provided that the following conditions are\n met:\n\n 1. Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n\n 2. Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in\n the documentation and/or other materials provided with the\n distribution.\n 3. Neither the name of the OpenBLAS project nor the names of\n its contributors may be used to endorse or promote products\n derived from this software without specific prior written\n permission.\n\n THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\n AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE\n LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE\n USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\nName: LAPACK\nFiles: numpy.libs\libscipy_openblas*.dll\nDescription: bundled in OpenBLAS\nAvailability: https://github.com/OpenMathLib/OpenBLAS/\nLicense: BSD-3-Clause-Open-MPI\n Copyright (c) 1992-2013 The University of Tennessee and The University\n of Tennessee Research Foundation. All rights\n reserved.\n Copyright (c) 2000-2013 The University of California Berkeley. All\n rights reserved.\n Copyright (c) 2006-2013 The University of Colorado Denver. All rights\n reserved.\n\n $COPYRIGHT$\n\n Additional copyrights may follow\n\n $HEADER$\n\n Redistribution and use in source and binary forms, with or without\n modification, are permitted provided that the following conditions are\n met:\n\n - Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n\n - Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer listed\n in this license in the documentation and/or other materials\n provided with the distribution.\n\n - Neither the name of the copyright holders nor the names of its\n contributors may be used to endorse or promote products derived from\n this software without specific prior written permission.\n\n The copyright holders provide no reassurances that the source code\n provided does not infringe any patent, copyright, or any other\n intellectual property rights of third parties. The copyright holders\n disclaim any liability to any recipient for claims brought against\n recipient by any third party for infringement of that parties\n intellectual property rights.\n\n THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\nName: GCC runtime library\nFiles: numpy.libs\libscipy_openblas*.dll\nDescription: statically linked to files compiled with gcc\nAvailability: https://gcc.gnu.org/git/?p=gcc.git;a=tree;f=libgfortran\nLicense: GPL-3.0-or-later WITH GCC-exception-3.1\n Copyright (C) 2002-2017 Free Software Foundation, Inc.\n\n Libgfortran is free software; you can redistribute it and/or modify\n it under the terms of the GNU General Public License as published by\n the Free Software Foundation; either version 3, or (at your option)\n any later version.\n\n Libgfortran is distributed in the hope that it will be useful,\n but WITHOUT ANY WARRANTY; without even the implied warranty of\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n GNU General Public License for more details.\n\n Under Section 7 of GPL version 3, you are granted additional\n permissions described in the GCC Runtime Library Exception, version\n 3.1, as published by the Free Software Foundation.\n\n You should have received a copy of the GNU General Public License and\n a copy of the GCC Runtime Library Exception along with this program;\n see the files COPYING3 and COPYING.RUNTIME respectively. If not, see\n <http://www.gnu.org/licenses/>.\n\n----\n\nFull text of license texts referred to above follows (that they are\nlisted below does not necessarily imply the conditions apply to the\npresent binary release):\n\n----\n\nGCC RUNTIME LIBRARY EXCEPTION\n\nVersion 3.1, 31 March 2009\n\nCopyright (C) 2009 Free Software Foundation, Inc. <https://fsf.org/>\n\nEveryone is permitted to copy and distribute verbatim copies of this\nlicense document, but changing it is not allowed.\n\nThis GCC Runtime Library Exception ("Exception") is an additional\npermission under section 7 of the GNU General Public License, version\n3 ("GPLv3"). It applies to a given file (the "Runtime Library") that\nbears a notice placed by the copyright holder of the file stating that\nthe file is governed by GPLv3 along with this Exception.\n\nWhen you use GCC to compile a program, GCC may combine portions of\ncertain GCC header files and runtime libraries with the compiled\nprogram. The purpose of this Exception is to allow compilation of\nnon-GPL (including proprietary) programs to use, in this way, the\nheader files and runtime libraries covered by this Exception.\n\n0. Definitions.\n\nA file is an "Independent Module" if it either requires the Runtime\nLibrary for execution after a Compilation Process, or makes use of an\ninterface provided by the Runtime Library, but is not otherwise based\non the Runtime Library.\n\n"GCC" means a version of the GNU Compiler Collection, with or without\nmodifications, governed by version 3 (or a specified later version) of\nthe GNU General Public License (GPL) with the option of using any\nsubsequent versions published by the FSF.\n\n"GPL-compatible Software" is software whose conditions of propagation,\nmodification and use would permit combination with GCC in accord with\nthe license of GCC.\n\n"Target Code" refers to output from any compiler for a real or virtual\ntarget processor architecture, in executable form or suitable for\ninput to an assembler, loader, linker and/or execution\nphase. Notwithstanding that, Target Code does not include data in any\nformat that is used as a compiler intermediate representation, or used\nfor producing a compiler intermediate representation.\n\nThe "Compilation Process" transforms code entirely represented in\nnon-intermediate languages designed for human-written code, and/or in\nJava Virtual Machine byte code, into Target Code. Thus, for example,\nuse of source code generators and preprocessors need not be considered\npart of the Compilation Process, since the Compilation Process can be\nunderstood as starting with the output of the generators or\npreprocessors.\n\nA Compilation Process is "Eligible" if it is done using GCC, alone or\nwith other GPL-compatible software, or if it is done without using any\nwork based on GCC. For example, using non-GPL-compatible Software to\noptimize any GCC intermediate representations would not qualify as an\nEligible Compilation Process.\n\n1. Grant of Additional Permission.\n\nYou have permission to propagate a work of Target Code formed by\ncombining the Runtime Library with Independent Modules, even if such\npropagation would otherwise violate the terms of GPLv3, provided that\nall Target Code was generated by Eligible Compilation Processes. You\nmay then convey such a combination under terms of your choice,\nconsistent with the licensing of the Independent Modules.\n\n2. No Weakening of GCC Copyleft.\n\nThe availability of this Exception does not imply any general\npresumption that third-party software is unaffected by the copyleft\nrequirements of the license of GCC.\n\n----\n\n GNU GENERAL PUBLIC LICENSE\n Version 3, 29 June 2007\n\n Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>\n Everyone is permitted to copy and distribute verbatim copies\n of this license document, but changing it is not allowed.\n\n Preamble\n\n The GNU General Public License is a free, copyleft license for\nsoftware and other kinds of works.\n\n The licenses for most software and other practical works are designed\nto take away your freedom to share and change the works. By contrast,\nthe GNU General Public License is intended to guarantee your freedom to\nshare and change all versions of a program--to make sure it remains free\nsoftware for all its users. We, the Free Software Foundation, use the\nGNU General Public License for most of our software; it applies also to\nany other work released this way by its authors. You can apply it to\nyour programs, too.\n\n When we speak of free software, we are referring to freedom, not\nprice. Our General Public Licenses are designed to make sure that you\nhave the freedom to distribute copies of free software (and charge for\nthem if you wish), that you receive source code or can get it if you\nwant it, that you can change the software or use pieces of it in new\nfree programs, and that you know you can do these things.\n\n To protect your rights, we need to prevent others from denying you\nthese rights or asking you to surrender the rights. Therefore, you have\ncertain responsibilities if you distribute copies of the software, or if\nyou modify it: responsibilities to respect the freedom of others.\n\n For example, if you distribute copies of such a program, whether\ngratis or for a fee, you must pass on to the recipients the same\nfreedoms that you received. You must make sure that they, too, receive\nor can get the source code. And you must show them these terms so they\nknow their rights.\n\n Developers that use the GNU GPL protect your rights with two steps:\n(1) assert copyright on the software, and (2) offer you this License\ngiving you legal permission to copy, distribute and/or modify it.\n\n For the developers' and authors' protection, the GPL clearly explains\nthat there is no warranty for this free software. For both users' and\nauthors' sake, the GPL requires that modified versions be marked as\nchanged, so that their problems will not be attributed erroneously to\nauthors of previous versions.\n\n Some devices are designed to deny users access to install or run\nmodified versions of the software inside them, although the manufacturer\ncan do so. This is fundamentally incompatible with the aim of\nprotecting users' freedom to change the software. The systematic\npattern of such abuse occurs in the area of products for individuals to\nuse, which is precisely where it is most unacceptable. Therefore, we\nhave designed this version of the GPL to prohibit the practice for those\nproducts. If such problems arise substantially in other domains, we\nstand ready to extend this provision to those domains in future versions\nof the GPL, as needed to protect the freedom of users.\n\n Finally, every program is threatened constantly by software patents.\nStates should not allow patents to restrict development and use of\nsoftware on general-purpose computers, but in those that do, we wish to\navoid the special danger that patents applied to a free program could\nmake it effectively proprietary. To prevent this, the GPL assures that\npatents cannot be used to render the program non-free.\n\n The precise terms and conditions for copying, distribution and\nmodification follow.\n\n TERMS AND CONDITIONS\n\n 0. Definitions.\n\n "This License" refers to version 3 of the GNU General Public License.\n\n "Copyright" also means copyright-like laws that apply to other kinds of\nworks, such as semiconductor masks.\n\n "The Program" refers to any copyrightable work licensed under this\nLicense. Each licensee is addressed as "you". "Licensees" and\n"recipients" may be individuals or organizations.\n\n To "modify" a work means to copy from or adapt all or part of the work\nin a fashion requiring copyright permission, other than the making of an\nexact copy. The resulting work is called a "modified version" of the\nearlier work or a work "based on" the earlier work.\n\n A "covered work" means either the unmodified Program or a work based\non the Program.\n\n To "propagate" a work means to do anything with it that, without\npermission, would make you directly or secondarily liable for\ninfringement under applicable copyright law, except executing it on a\ncomputer or modifying a private copy. Propagation includes copying,\ndistribution (with or without modification), making available to the\npublic, and in some countries other activities as well.\n\n To "convey" a work means any kind of propagation that enables other\nparties to make or receive copies. Mere interaction with a user through\na computer network, with no transfer of a copy, is not conveying.\n\n An interactive user interface displays "Appropriate Legal Notices"\nto the extent that it includes a convenient and prominently visible\nfeature that (1) displays an appropriate copyright notice, and (2)\ntells the user that there is no warranty for the work (except to the\nextent that warranties are provided), that licensees may convey the\nwork under this License, and how to view a copy of this License. If\nthe interface presents a list of user commands or options, such as a\nmenu, a prominent item in the list meets this criterion.\n\n 1. Source Code.\n\n The "source code" for a work means the preferred form of the work\nfor making modifications to it. "Object code" means any non-source\nform of a work.\n\n A "Standard Interface" means an interface that either is an official\nstandard defined by a recognized standards body, or, in the case of\ninterfaces specified for a particular programming language, one that\nis widely used among developers working in that language.\n\n The "System Libraries" of an executable work include anything, other\nthan the work as a whole, that (a) is included in the normal form of\npackaging a Major Component, but which is not part of that Major\nComponent, and (b) serves only to enable use of the work with that\nMajor Component, or to implement a Standard Interface for which an\nimplementation is available to the public in source code form. A\n"Major Component", in this context, means a major essential component\n(kernel, window system, and so on) of the specific operating system\n(if any) on which the executable work runs, or a compiler used to\nproduce the work, or an object code interpreter used to run it.\n\n The "Corresponding Source" for a work in object code form means all\nthe source code needed to generate, install, and (for an executable\nwork) run the object code and to modify the work, including scripts to\ncontrol those activities. However, it does not include the work's\nSystem Libraries, or general-purpose tools or generally available free\nprograms which are used unmodified in performing those activities but\nwhich are not part of the work. For example, Corresponding Source\nincludes interface definition files associated with source files for\nthe work, and the source code for shared libraries and dynamically\nlinked subprograms that the work is specifically designed to require,\nsuch as by intimate data communication or control flow between those\nsubprograms and other parts of the work.\n\n The Corresponding Source need not include anything that users\ncan regenerate automatically from other parts of the Corresponding\nSource.\n\n The Corresponding Source for a work in source code form is that\nsame work.\n\n 2. Basic Permissions.\n\n All rights granted under this License are granted for the term of\ncopyright on the Program, and are irrevocable provided the stated\nconditions are met. This License explicitly affirms your unlimited\npermission to run the unmodified Program. The output from running a\ncovered work is covered by this License only if the output, given its\ncontent, constitutes a covered work. This License acknowledges your\nrights of fair use or other equivalent, as provided by copyright law.\n\n You may make, run and propagate covered works that you do not\nconvey, without conditions so long as your license otherwise remains\nin force. You may convey covered works to others for the sole purpose\nof having them make modifications exclusively for you, or provide you\nwith facilities for running those works, provided that you comply with\nthe terms of this License in conveying all material for which you do\nnot control copyright. Those thus making or running the covered works\nfor you must do so exclusively on your behalf, under your direction\nand control, on terms that prohibit them from making any copies of\nyour copyrighted material outside their relationship with you.\n\n Conveying under any other circumstances is permitted solely under\nthe conditions stated below. Sublicensing is not allowed; section 10\nmakes it unnecessary.\n\n 3. Protecting Users' Legal Rights From Anti-Circumvention Law.\n\n No covered work shall be deemed part of an effective technological\nmeasure under any applicable law fulfilling obligations under article\n11 of the WIPO copyright treaty adopted on 20 December 1996, or\nsimilar laws prohibiting or restricting circumvention of such\nmeasures.\n\n When you convey a covered work, you waive any legal power to forbid\ncircumvention of technological measures to the extent such circumvention\nis effected by exercising rights under this License with respect to\nthe covered work, and you disclaim any intention to limit operation or\nmodification of the work as a means of enforcing, against the work's\nusers, your or third parties' legal rights to forbid circumvention of\ntechnological measures.\n\n 4. Conveying Verbatim Copies.\n\n You may convey verbatim copies of the Program's source code as you\nreceive it, in any medium, provided that you conspicuously and\nappropriately publish on each copy an appropriate copyright notice;\nkeep intact all notices stating that this License and any\nnon-permissive terms added in accord with section 7 apply to the code;\nkeep intact all notices of the absence of any warranty; and give all\nrecipients a copy of this License along with the Program.\n\n You may charge any price or no price for each copy that you convey,\nand you may offer support or warranty protection for a fee.\n\n 5. Conveying Modified Source Versions.\n\n You may convey a work based on the Program, or the modifications to\nproduce it from the Program, in the form of source code under the\nterms of section 4, provided that you also meet all of these conditions:\n\n a) The work must carry prominent notices stating that you modified\n it, and giving a relevant date.\n\n b) The work must carry prominent notices stating that it is\n released under this License and any conditions added under section\n 7. This requirement modifies the requirement in section 4 to\n "keep intact all notices".\n\n c) You must license the entire work, as a whole, under this\n License to anyone who comes into possession of a copy. This\n License will therefore apply, along with any applicable section 7\n additional terms, to the whole of the work, and all its parts,\n regardless of how they are packaged. This License gives no\n permission to license the work in any other way, but it does not\n invalidate such permission if you have separately received it.\n\n d) If the work has interactive user interfaces, each must display\n Appropriate Legal Notices; however, if the Program has interactive\n interfaces that do not display Appropriate Legal Notices, your\n work need not make them do so.\n\n A compilation of a covered work with other separate and independent\nworks, which are not by their nature extensions of the covered work,\nand which are not combined with it such as to form a larger program,\nin or on a volume of a storage or distribution medium, is called an\n"aggregate" if the compilation and its resulting copyright are not\nused to limit the access or legal rights of the compilation's users\nbeyond what the individual works permit. Inclusion of a covered work\nin an aggregate does not cause this License to apply to the other\nparts of the aggregate.\n\n 6. Conveying Non-Source Forms.\n\n You may convey a covered work in object code form under the terms\nof sections 4 and 5, provided that you also convey the\nmachine-readable Corresponding Source under the terms of this License,\nin one of these ways:\n\n a) Convey the object code in, or embodied in, a physical product\n (including a physical distribution medium), accompanied by the\n Corresponding Source fixed on a durable physical medium\n customarily used for software interchange.\n\n b) Convey the object code in, or embodied in, a physical product\n (including a physical distribution medium), accompanied by a\n written offer, valid for at least three years and valid for as\n long as you offer spare parts or customer support for that product\n model, to give anyone who possesses the object code either (1) a\n copy of the Corresponding Source for all the software in the\n product that is covered by this License, on a durable physical\n medium customarily used for software interchange, for a price no\n more than your reasonable cost of physically performing this\n conveying of source, or (2) access to copy the\n Corresponding Source from a network server at no charge.\n\n c) Convey individual copies of the object code with a copy of the\n written offer to provide the Corresponding Source. This\n alternative is allowed only occasionally and noncommercially, and\n only if you received the object code with such an offer, in accord\n with subsection 6b.\n\n d) Convey the object code by offering access from a designated\n place (gratis or for a charge), and offer equivalent access to the\n Corresponding Source in the same way through the same place at no\n further charge. You need not require recipients to copy the\n Corresponding Source along with the object code. If the place to\n copy the object code is a network server, the Corresponding Source\n may be on a different server (operated by you or a third party)\n that supports equivalent copying facilities, provided you maintain\n clear directions next to the object code saying where to find the\n Corresponding Source. Regardless of what server hosts the\n Corresponding Source, you remain obligated to ensure that it is\n available for as long as needed to satisfy these requirements.\n\n e) Convey the object code using peer-to-peer transmission, provided\n you inform other peers where the object code and Corresponding\n Source of the work are being offered to the general public at no\n charge under subsection 6d.\n\n A separable portion of the object code, whose source code is excluded\nfrom the Corresponding Source as a System Library, need not be\nincluded in conveying the object code work.\n\n A "User Product" is either (1) a "consumer product", which means any\ntangible personal property which is normally used for personal, family,\nor household purposes, or (2) anything designed or sold for incorporation\ninto a dwelling. In determining whether a product is a consumer product,\ndoubtful cases shall be resolved in favor of coverage. For a particular\nproduct received by a particular user, "normally used" refers to a\ntypical or common use of that class of product, regardless of the status\nof the particular user or of the way in which the particular user\nactually uses, or expects or is expected to use, the product. A product\nis a consumer product regardless of whether the product has substantial\ncommercial, industrial or non-consumer uses, unless such uses represent\nthe only significant mode of use of the product.\n\n "Installation Information" for a User Product means any methods,\nprocedures, authorization keys, or other information required to install\nand execute modified versions of a covered work in that User Product from\na modified version of its Corresponding Source. The information must\nsuffice to ensure that the continued functioning of the modified object\ncode is in no case prevented or interfered with solely because\nmodification has been made.\n\n If you convey an object code work under this section in, or with, or\nspecifically for use in, a User Product, and the conveying occurs as\npart of a transaction in which the right of possession and use of the\nUser Product is transferred to the recipient in perpetuity or for a\nfixed term (regardless of how the transaction is characterized), the\nCorresponding Source conveyed under this section must be accompanied\nby the Installation Information. But this requirement does not apply\nif neither you nor any third party retains the ability to install\nmodified object code on the User Product (for example, the work has\nbeen installed in ROM).\n\n The requirement to provide Installation Information does not include a\nrequirement to continue to provide support service, warranty, or updates\nfor a work that has been modified or installed by the recipient, or for\nthe User Product in which it has been modified or installed. Access to a\nnetwork may be denied when the modification itself materially and\nadversely affects the operation of the network or violates the rules and\nprotocols for communication across the network.\n\n Corresponding Source conveyed, and Installation Information provided,\nin accord with this section must be in a format that is publicly\ndocumented (and with an implementation available to the public in\nsource code form), and must require no special password or key for\nunpacking, reading or copying.\n\n 7. Additional Terms.\n\n "Additional permissions" are terms that supplement the terms of this\nLicense by making exceptions from one or more of its conditions.\nAdditional permissions that are applicable to the entire Program shall\nbe treated as though they were included in this License, to the extent\nthat they are valid under applicable law. If additional permissions\napply only to part of the Program, that part may be used separately\nunder those permissions, but the entire Program remains governed by\nthis License without regard to the additional permissions.\n\n When you convey a copy of a covered work, you may at your option\nremove any additional permissions from that copy, or from any part of\nit. (Additional permissions may be written to require their own\nremoval in certain cases when you modify the work.) You may place\nadditional permissions on material, added by you to a covered work,\nfor which you have or can give appropriate copyright permission.\n\n Notwithstanding any other provision of this License, for material you\nadd to a covered work, you may (if authorized by the copyright holders of\nthat material) supplement the terms of this License with terms:\n\n a) Disclaiming warranty or limiting liability differently from the\n terms of sections 15 and 16 of this License; or\n\n b) Requiring preservation of specified reasonable legal notices or\n author attributions in that material or in the Appropriate Legal\n Notices displayed by works containing it; or\n\n c) Prohibiting misrepresentation of the origin of that material, or\n requiring that modified versions of such material be marked in\n reasonable ways as different from the original version; or\n\n d) Limiting the use for publicity purposes of names of licensors or\n authors of the material; or\n\n e) Declining to grant rights under trademark law for use of some\n trade names, trademarks, or service marks; or\n\n f) Requiring indemnification of licensors and authors of that\n material by anyone who conveys the material (or modified versions of\n it) with contractual assumptions of liability to the recipient, for\n any liability that these contractual assumptions directly impose on\n those licensors and authors.\n\n All other non-permissive additional terms are considered "further\nrestrictions" within the meaning of section 10. If the Program as you\nreceived it, or any part of it, contains a notice stating that it is\ngoverned by this License along with a term that is a further\nrestriction, you may remove that term. If a license document contains\na further restriction but permits relicensing or conveying under this\nLicense, you may add to a covered work material governed by the terms\nof that license document, provided that the further restriction does\nnot survive such relicensing or conveying.\n\n If you add terms to a covered work in accord with this section, you\nmust place, in the relevant source files, a statement of the\nadditional terms that apply to those files, or a notice indicating\nwhere to find the applicable terms.\n\n Additional terms, permissive or non-permissive, may be stated in the\nform of a separately written license, or stated as exceptions;\nthe above requirements apply either way.\n\n 8. Termination.\n\n You may not propagate or modify a covered work except as expressly\nprovided under this License. Any attempt otherwise to propagate or\nmodify it is void, and will automatically terminate your rights under\nthis License (including any patent licenses granted under the third\nparagraph of section 11).\n\n However, if you cease all violation of this License, then your\nlicense from a particular copyright holder is reinstated (a)\nprovisionally, unless and until the copyright holder explicitly and\nfinally terminates your license, and (b) permanently, if the copyright\nholder fails to notify you of the violation by some reasonable means\nprior to 60 days after the cessation.\n\n Moreover, your license from a particular copyright holder is\nreinstated permanently if the copyright holder notifies you of the\nviolation by some reasonable means, this is the first time you have\nreceived notice of violation of this License (for any work) from that\ncopyright holder, and you cure the violation prior to 30 days after\nyour receipt of the notice.\n\n Termination of your rights under this section does not terminate the\nlicenses of parties who have received copies or rights from you under\nthis License. If your rights have been terminated and not permanently\nreinstated, you do not qualify to receive new licenses for the same\nmaterial under section 10.\n\n 9. Acceptance Not Required for Having Copies.\n\n You are not required to accept this License in order to receive or\nrun a copy of the Program. Ancillary propagation of a covered work\noccurring solely as a consequence of using peer-to-peer transmission\nto receive a copy likewise does not require acceptance. However,\nnothing other than this License grants you permission to propagate or\nmodify any covered work. These actions infringe copyright if you do\nnot accept this License. Therefore, by modifying or propagating a\ncovered work, you indicate your acceptance of this License to do so.\n\n 10. Automatic Licensing of Downstream Recipients.\n\n Each time you convey a covered work, the recipient automatically\nreceives a license from the original licensors, to run, modify and\npropagate that work, subject to this License. You are not responsible\nfor enforcing compliance by third parties with this License.\n\n An "entity transaction" is a transaction transferring control of an\norganization, or substantially all assets of one, or subdividing an\norganization, or merging organizations. If propagation of a covered\nwork results from an entity transaction, each party to that\ntransaction who receives a copy of the work also receives whatever\nlicenses to the work the party's predecessor in interest had or could\ngive under the previous paragraph, plus a right to possession of the\nCorresponding Source of the work from the predecessor in interest, if\nthe predecessor has it or can get it with reasonable efforts.\n\n You may not impose any further restrictions on the exercise of the\nrights granted or affirmed under this License. For example, you may\nnot impose a license fee, royalty, or other charge for exercise of\nrights granted under this License, and you may not initiate litigation\n(including a cross-claim or counterclaim in a lawsuit) alleging that\nany patent claim is infringed by making, using, selling, offering for\nsale, or importing the Program or any portion of it.\n\n 11. Patents.\n\n A "contributor" is a copyright holder who authorizes use under this\nLicense of the Program or a work on which the Program is based. The\nwork thus licensed is called the contributor's "contributor version".\n\n A contributor's "essential patent claims" are all patent claims\nowned or controlled by the contributor, whether already acquired or\nhereafter acquired, that would be infringed by some manner, permitted\nby this License, of making, using, or selling its contributor version,\nbut do not include claims that would be infringed only as a\nconsequence of further modification of the contributor version. For\npurposes of this definition, "control" includes the right to grant\npatent sublicenses in a manner consistent with the requirements of\nthis License.\n\n Each contributor grants you a non-exclusive, worldwide, royalty-free\npatent license under the contributor's essential patent claims, to\nmake, use, sell, offer for sale, import and otherwise run, modify and\npropagate the contents of its contributor version.\n\n In the following three paragraphs, a "patent license" is any express\nagreement or commitment, however denominated, not to enforce a patent\n(such as an express permission to practice a patent or covenant not to\nsue for patent infringement). To "grant" such a patent license to a\nparty means to make such an agreement or commitment not to enforce a\npatent against the party.\n\n If you convey a covered work, knowingly relying on a patent license,\nand the Corresponding Source of the work is not available for anyone\nto copy, free of charge and under the terms of this License, through a\npublicly available network server or other readily accessible means,\nthen you must either (1) cause the Corresponding Source to be so\navailable, or (2) arrange to deprive yourself of the benefit of the\npatent license for this particular work, or (3) arrange, in a manner\nconsistent with the requirements of this License, to extend the patent\nlicense to downstream recipients. "Knowingly relying" means you have\nactual knowledge that, but for the patent license, your conveying the\ncovered work in a country, or your recipient's use of the covered work\nin a country, would infringe one or more identifiable patents in that\ncountry that you have reason to believe are valid.\n\n If, pursuant to or in connection with a single transaction or\narrangement, you convey, or propagate by procuring conveyance of, a\ncovered work, and grant a patent license to some of the parties\nreceiving the covered work authorizing them to use, propagate, modify\nor convey a specific copy of the covered work, then the patent license\nyou grant is automatically extended to all recipients of the covered\nwork and works based on it.\n\n A patent license is "discriminatory" if it does not include within\nthe scope of its coverage, prohibits the exercise of, or is\nconditioned on the non-exercise of one or more of the rights that are\nspecifically granted under this License. You may not convey a covered\nwork if you are a party to an arrangement with a third party that is\nin the business of distributing software, under which you make payment\nto the third party based on the extent of your activity of conveying\nthe work, and under which the third party grants, to any of the\nparties who would receive the covered work from you, a discriminatory\npatent license (a) in connection with copies of the covered work\nconveyed by you (or copies made from those copies), or (b) primarily\nfor and in connection with specific products or compilations that\ncontain the covered work, unless you entered into that arrangement,\nor that patent license was granted, prior to 28 March 2007.\n\n Nothing in this License shall be construed as excluding or limiting\nany implied license or other defenses to infringement that may\notherwise be available to you under applicable patent law.\n\n 12. No Surrender of Others' Freedom.\n\n If conditions are imposed on you (whether by court order, agreement or\notherwise) that contradict the conditions of this License, they do not\nexcuse you from the conditions of this License. If you cannot convey a\ncovered work so as to satisfy simultaneously your obligations under this\nLicense and any other pertinent obligations, then as a consequence you may\nnot convey it at all. For example, if you agree to terms that obligate you\nto collect a royalty for further conveying from those to whom you convey\nthe Program, the only way you could satisfy both those terms and this\nLicense would be to refrain entirely from conveying the Program.\n\n 13. Use with the GNU Affero General Public License.\n\n Notwithstanding any other provision of this License, you have\npermission to link or combine any covered work with a work licensed\nunder version 3 of the GNU Affero General Public License into a single\ncombined work, and to convey the resulting work. The terms of this\nLicense will continue to apply to the part which is the covered work,\nbut the special requirements of the GNU Affero General Public License,\nsection 13, concerning interaction through a network will apply to the\ncombination as such.\n\n 14. Revised Versions of this License.\n\n The Free Software Foundation may publish revised and/or new versions of\nthe GNU General Public License from time to time. Such new versions will\nbe similar in spirit to the present version, but may differ in detail to\naddress new problems or concerns.\n\n Each version is given a distinguishing version number. If the\nProgram specifies that a certain numbered version of the GNU General\nPublic License "or any later version" applies to it, you have the\noption of following the terms and conditions either of that numbered\nversion or of any later version published by the Free Software\nFoundation. If the Program does not specify a version number of the\nGNU General Public License, you may choose any version ever published\nby the Free Software Foundation.\n\n If the Program specifies that a proxy can decide which future\nversions of the GNU General Public License can be used, that proxy's\npublic statement of acceptance of a version permanently authorizes you\nto choose that version for the Program.\n\n Later license versions may give you additional or different\npermissions. However, no additional obligations are imposed on any\nauthor or copyright holder as a result of your choosing to follow a\nlater version.\n\n 15. Disclaimer of Warranty.\n\n THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY\nAPPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT\nHOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY\nOF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,\nTHE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR\nPURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM\nIS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF\nALL NECESSARY SERVICING, REPAIR OR CORRECTION.\n\n 16. Limitation of Liability.\n\n IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING\nWILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS\nTHE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY\nGENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE\nUSE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF\nDATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD\nPARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),\nEVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF\nSUCH DAMAGES.\n\n 17. Interpretation of Sections 15 and 16.\n\n If the disclaimer of warranty and limitation of liability provided\nabove cannot be given local legal effect according to their terms,\nreviewing courts shall apply local law that most closely approximates\nan absolute waiver of all civil liability in connection with the\nProgram, unless a warranty or assumption of liability accompanies a\ncopy of the Program in return for a fee.\n\n END OF TERMS AND CONDITIONS\n\n How to Apply These Terms to Your New Programs\n\n If you develop a new program, and you want it to be of the greatest\npossible use to the public, the best way to achieve this is to make it\nfree software which everyone can redistribute and change under these terms.\n\n To do so, attach the following notices to the program. It is safest\nto attach them to the start of each source file to most effectively\nstate the exclusion of warranty; and each file should have at least\nthe "copyright" line and a pointer to where the full notice is found.\n\n <one line to give the program's name and a brief idea of what it does.>\n Copyright (C) <year> <name of author>\n\n This program is free software: you can redistribute it and/or modify\n it under the terms of the GNU General Public License as published by\n the Free Software Foundation, either version 3 of the License, or\n (at your option) any later version.\n\n This program is distributed in the hope that it will be useful,\n but WITHOUT ANY WARRANTY; without even the implied warranty of\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n GNU General Public License for more details.\n\n You should have received a copy of the GNU General Public License\n along with this program. If not, see <https://www.gnu.org/licenses/>.\n\nAlso add information on how to contact you by electronic and paper mail.\n\n If the program does terminal interaction, make it output a short\nnotice like this when it starts in an interactive mode:\n\n <program> Copyright (C) <year> <name of author>\n This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.\n This is free software, and you are welcome to redistribute it\n under certain conditions; type `show c' for details.\n\nThe hypothetical commands `show w' and `show c' should show the appropriate\nparts of the General Public License. Of course, your program's commands\nmight be different; for a GUI interface, you would use an "about box".\n\n You should also get your employer (if you work as a programmer) or school,\nif any, to sign a "copyright disclaimer" for the program, if necessary.\nFor more information on this, and how to apply and follow the GNU GPL, see\n<https://www.gnu.org/licenses/>.\n\n The GNU General Public License does not permit incorporating your program\ninto proprietary programs. If your program is a subroutine library, you\nmay consider it more useful to permit linking proprietary applications with\nthe library. If this is what you want to do, use the GNU Lesser General\nPublic License instead of this License. But first, please read\n<https://www.gnu.org/licenses/why-not-lgpl.html>.\n\n
.venv\Lib\site-packages\numpy-2.3.1.dist-info\LICENSE.txt
LICENSE.txt
Other
47,722
0.95
0.116842
0.003916
python-kit
439
2024-06-15T08:02:44.973332
Apache-2.0
false
f32d19840e1856e798229fd7ef142928
Metadata-Version: 2.1\nName: numpy\nVersion: 2.3.1\nSummary: Fundamental package for array computing in Python\nAuthor: Travis E. Oliphant et al.\nMaintainer-Email: NumPy Developers <numpy-discussion@python.org>\nLicense: Copyright (c) 2005-2025, NumPy Developers.\n All rights reserved.\n \n Redistribution and use in source and binary forms, with or without\n modification, are permitted provided that the following conditions are\n met:\n \n * Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n \n * Redistributions in binary form must reproduce the above\n copyright notice, this list of conditions and the following\n disclaimer in the documentation and/or other materials provided\n with the distribution.\n \n * Neither the name of the NumPy Developers nor the names of any\n contributors may be used to endorse or promote products derived\n from this software without specific prior written permission.\n \n THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n \n ----\n \n The NumPy repository and source distributions bundle several libraries that are\n compatibly licensed. We list these here.\n \n Name: lapack-lite\n Files: numpy/linalg/lapack_lite/*\n License: BSD-3-Clause\n For details, see numpy/linalg/lapack_lite/LICENSE.txt\n \n Name: dragon4\n Files: numpy/_core/src/multiarray/dragon4.c\n License: MIT\n For license text, see numpy/_core/src/multiarray/dragon4.c\n \n Name: libdivide\n Files: numpy/_core/include/numpy/libdivide/*\n License: Zlib\n For license text, see numpy/_core/include/numpy/libdivide/LICENSE.txt\n \n \n Note that the following files are vendored in the repository and sdist but not\n installed in built numpy packages:\n \n Name: Meson\n Files: vendored-meson/meson/*\n License: Apache 2.0\n For license text, see vendored-meson/meson/COPYING\n \n Name: spin\n Files: .spin/cmds.py\n License: BSD-3\n For license text, see .spin/LICENSE\n \n Name: tempita\n Files: numpy/_build_utils/tempita/*\n License: MIT\n For details, see numpy/_build_utils/tempita/LICENCE.txt\n \n ----\n \n This binary distribution of NumPy also bundles the following software:\n \n \n Name: OpenBLAS\n Files: numpy.libs\libscipy_openblas*.dll\n Description: bundled as a dynamically linked library\n Availability: https://github.com/OpenMathLib/OpenBLAS/\n License: BSD-3-Clause\n Copyright (c) 2011-2014, The OpenBLAS Project\n All rights reserved.\n \n Redistribution and use in source and binary forms, with or without\n modification, are permitted provided that the following conditions are\n met:\n \n 1. Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n \n 2. Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in\n the documentation and/or other materials provided with the\n distribution.\n 3. Neither the name of the OpenBLAS project nor the names of\n its contributors may be used to endorse or promote products\n derived from this software without specific prior written\n permission.\n \n THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\n AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE\n LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE\n USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n \n \n Name: LAPACK\n Files: numpy.libs\libscipy_openblas*.dll\n Description: bundled in OpenBLAS\n Availability: https://github.com/OpenMathLib/OpenBLAS/\n License: BSD-3-Clause-Open-MPI\n Copyright (c) 1992-2013 The University of Tennessee and The University\n of Tennessee Research Foundation. All rights\n reserved.\n Copyright (c) 2000-2013 The University of California Berkeley. All\n rights reserved.\n Copyright (c) 2006-2013 The University of Colorado Denver. All rights\n reserved.\n \n $COPYRIGHT$\n \n Additional copyrights may follow\n \n $HEADER$\n \n Redistribution and use in source and binary forms, with or without\n modification, are permitted provided that the following conditions are\n met:\n \n - Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n \n - Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer listed\n in this license in the documentation and/or other materials\n provided with the distribution.\n \n - Neither the name of the copyright holders nor the names of its\n contributors may be used to endorse or promote products derived from\n this software without specific prior written permission.\n \n The copyright holders provide no reassurances that the source code\n provided does not infringe any patent, copyright, or any other\n intellectual property rights of third parties. The copyright holders\n disclaim any liability to any recipient for claims brought against\n recipient by any third party for infringement of that parties\n intellectual property rights.\n \n THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n \n \n Name: GCC runtime library\n Files: numpy.libs\libscipy_openblas*.dll\n Description: statically linked to files compiled with gcc\n Availability: https://gcc.gnu.org/git/?p=gcc.git;a=tree;f=libgfortran\n License: GPL-3.0-or-later WITH GCC-exception-3.1\n Copyright (C) 2002-2017 Free Software Foundation, Inc.\n \n Libgfortran is free software; you can redistribute it and/or modify\n it under the terms of the GNU General Public License as published by\n the Free Software Foundation; either version 3, or (at your option)\n any later version.\n \n Libgfortran is distributed in the hope that it will be useful,\n but WITHOUT ANY WARRANTY; without even the implied warranty of\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n GNU General Public License for more details.\n \n Under Section 7 of GPL version 3, you are granted additional\n permissions described in the GCC Runtime Library Exception, version\n 3.1, as published by the Free Software Foundation.\n \n You should have received a copy of the GNU General Public License and\n a copy of the GCC Runtime Library Exception along with this program;\n see the files COPYING3 and COPYING.RUNTIME respectively. If not, see\n <http://www.gnu.org/licenses/>.\n \n ----\n \n Full text of license texts referred to above follows (that they are\n listed below does not necessarily imply the conditions apply to the\n present binary release):\n \n ----\n \n GCC RUNTIME LIBRARY EXCEPTION\n \n Version 3.1, 31 March 2009\n \n Copyright (C) 2009 Free Software Foundation, Inc. <https://fsf.org/>\n \n Everyone is permitted to copy and distribute verbatim copies of this\n license document, but changing it is not allowed.\n \n This GCC Runtime Library Exception ("Exception") is an additional\n permission under section 7 of the GNU General Public License, version\n 3 ("GPLv3"). It applies to a given file (the "Runtime Library") that\n bears a notice placed by the copyright holder of the file stating that\n the file is governed by GPLv3 along with this Exception.\n \n When you use GCC to compile a program, GCC may combine portions of\n certain GCC header files and runtime libraries with the compiled\n program. The purpose of this Exception is to allow compilation of\n non-GPL (including proprietary) programs to use, in this way, the\n header files and runtime libraries covered by this Exception.\n \n 0. Definitions.\n \n A file is an "Independent Module" if it either requires the Runtime\n Library for execution after a Compilation Process, or makes use of an\n interface provided by the Runtime Library, but is not otherwise based\n on the Runtime Library.\n \n "GCC" means a version of the GNU Compiler Collection, with or without\n modifications, governed by version 3 (or a specified later version) of\n the GNU General Public License (GPL) with the option of using any\n subsequent versions published by the FSF.\n \n "GPL-compatible Software" is software whose conditions of propagation,\n modification and use would permit combination with GCC in accord with\n the license of GCC.\n \n "Target Code" refers to output from any compiler for a real or virtual\n target processor architecture, in executable form or suitable for\n input to an assembler, loader, linker and/or execution\n phase. Notwithstanding that, Target Code does not include data in any\n format that is used as a compiler intermediate representation, or used\n for producing a compiler intermediate representation.\n \n The "Compilation Process" transforms code entirely represented in\n non-intermediate languages designed for human-written code, and/or in\n Java Virtual Machine byte code, into Target Code. Thus, for example,\n use of source code generators and preprocessors need not be considered\n part of the Compilation Process, since the Compilation Process can be\n understood as starting with the output of the generators or\n preprocessors.\n \n A Compilation Process is "Eligible" if it is done using GCC, alone or\n with other GPL-compatible software, or if it is done without using any\n work based on GCC. For example, using non-GPL-compatible Software to\n optimize any GCC intermediate representations would not qualify as an\n Eligible Compilation Process.\n \n 1. Grant of Additional Permission.\n \n You have permission to propagate a work of Target Code formed by\n combining the Runtime Library with Independent Modules, even if such\n propagation would otherwise violate the terms of GPLv3, provided that\n all Target Code was generated by Eligible Compilation Processes. You\n may then convey such a combination under terms of your choice,\n consistent with the licensing of the Independent Modules.\n \n 2. No Weakening of GCC Copyleft.\n \n The availability of this Exception does not imply any general\n presumption that third-party software is unaffected by the copyleft\n requirements of the license of GCC.\n \n ----\n \n GNU GENERAL PUBLIC LICENSE\n Version 3, 29 June 2007\n \n Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>\n Everyone is permitted to copy and distribute verbatim copies\n of this license document, but changing it is not allowed.\n \n Preamble\n \n The GNU General Public License is a free, copyleft license for\n software and other kinds of works.\n \n The licenses for most software and other practical works are designed\n to take away your freedom to share and change the works. By contrast,\n the GNU General Public License is intended to guarantee your freedom to\n share and change all versions of a program--to make sure it remains free\n software for all its users. We, the Free Software Foundation, use the\n GNU General Public License for most of our software; it applies also to\n any other work released this way by its authors. You can apply it to\n your programs, too.\n \n When we speak of free software, we are referring to freedom, not\n price. Our General Public Licenses are designed to make sure that you\n have the freedom to distribute copies of free software (and charge for\n them if you wish), that you receive source code or can get it if you\n want it, that you can change the software or use pieces of it in new\n free programs, and that you know you can do these things.\n \n To protect your rights, we need to prevent others from denying you\n these rights or asking you to surrender the rights. Therefore, you have\n certain responsibilities if you distribute copies of the software, or if\n you modify it: responsibilities to respect the freedom of others.\n \n For example, if you distribute copies of such a program, whether\n gratis or for a fee, you must pass on to the recipients the same\n freedoms that you received. You must make sure that they, too, receive\n or can get the source code. And you must show them these terms so they\n know their rights.\n \n Developers that use the GNU GPL protect your rights with two steps:\n (1) assert copyright on the software, and (2) offer you this License\n giving you legal permission to copy, distribute and/or modify it.\n \n For the developers' and authors' protection, the GPL clearly explains\n that there is no warranty for this free software. For both users' and\n authors' sake, the GPL requires that modified versions be marked as\n changed, so that their problems will not be attributed erroneously to\n authors of previous versions.\n \n Some devices are designed to deny users access to install or run\n modified versions of the software inside them, although the manufacturer\n can do so. This is fundamentally incompatible with the aim of\n protecting users' freedom to change the software. The systematic\n pattern of such abuse occurs in the area of products for individuals to\n use, which is precisely where it is most unacceptable. Therefore, we\n have designed this version of the GPL to prohibit the practice for those\n products. If such problems arise substantially in other domains, we\n stand ready to extend this provision to those domains in future versions\n of the GPL, as needed to protect the freedom of users.\n \n Finally, every program is threatened constantly by software patents.\n States should not allow patents to restrict development and use of\n software on general-purpose computers, but in those that do, we wish to\n avoid the special danger that patents applied to a free program could\n make it effectively proprietary. To prevent this, the GPL assures that\n patents cannot be used to render the program non-free.\n \n The precise terms and conditions for copying, distribution and\n modification follow.\n \n TERMS AND CONDITIONS\n \n 0. Definitions.\n \n "This License" refers to version 3 of the GNU General Public License.\n \n "Copyright" also means copyright-like laws that apply to other kinds of\n works, such as semiconductor masks.\n \n "The Program" refers to any copyrightable work licensed under this\n License. Each licensee is addressed as "you". "Licensees" and\n "recipients" may be individuals or organizations.\n \n To "modify" a work means to copy from or adapt all or part of the work\n in a fashion requiring copyright permission, other than the making of an\n exact copy. The resulting work is called a "modified version" of the\n earlier work or a work "based on" the earlier work.\n \n A "covered work" means either the unmodified Program or a work based\n on the Program.\n \n To "propagate" a work means to do anything with it that, without\n permission, would make you directly or secondarily liable for\n infringement under applicable copyright law, except executing it on a\n computer or modifying a private copy. Propagation includes copying,\n distribution (with or without modification), making available to the\n public, and in some countries other activities as well.\n \n To "convey" a work means any kind of propagation that enables other\n parties to make or receive copies. Mere interaction with a user through\n a computer network, with no transfer of a copy, is not conveying.\n \n An interactive user interface displays "Appropriate Legal Notices"\n to the extent that it includes a convenient and prominently visible\n feature that (1) displays an appropriate copyright notice, and (2)\n tells the user that there is no warranty for the work (except to the\n extent that warranties are provided), that licensees may convey the\n work under this License, and how to view a copy of this License. If\n the interface presents a list of user commands or options, such as a\n menu, a prominent item in the list meets this criterion.\n \n 1. Source Code.\n \n The "source code" for a work means the preferred form of the work\n for making modifications to it. "Object code" means any non-source\n form of a work.\n \n A "Standard Interface" means an interface that either is an official\n standard defined by a recognized standards body, or, in the case of\n interfaces specified for a particular programming language, one that\n is widely used among developers working in that language.\n \n The "System Libraries" of an executable work include anything, other\n than the work as a whole, that (a) is included in the normal form of\n packaging a Major Component, but which is not part of that Major\n Component, and (b) serves only to enable use of the work with that\n Major Component, or to implement a Standard Interface for which an\n implementation is available to the public in source code form. A\n "Major Component", in this context, means a major essential component\n (kernel, window system, and so on) of the specific operating system\n (if any) on which the executable work runs, or a compiler used to\n produce the work, or an object code interpreter used to run it.\n \n The "Corresponding Source" for a work in object code form means all\n the source code needed to generate, install, and (for an executable\n work) run the object code and to modify the work, including scripts to\n control those activities. However, it does not include the work's\n System Libraries, or general-purpose tools or generally available free\n programs which are used unmodified in performing those activities but\n which are not part of the work. For example, Corresponding Source\n includes interface definition files associated with source files for\n the work, and the source code for shared libraries and dynamically\n linked subprograms that the work is specifically designed to require,\n such as by intimate data communication or control flow between those\n subprograms and other parts of the work.\n \n The Corresponding Source need not include anything that users\n can regenerate automatically from other parts of the Corresponding\n Source.\n \n The Corresponding Source for a work in source code form is that\n same work.\n \n 2. Basic Permissions.\n \n All rights granted under this License are granted for the term of\n copyright on the Program, and are irrevocable provided the stated\n conditions are met. This License explicitly affirms your unlimited\n permission to run the unmodified Program. The output from running a\n covered work is covered by this License only if the output, given its\n content, constitutes a covered work. This License acknowledges your\n rights of fair use or other equivalent, as provided by copyright law.\n \n You may make, run and propagate covered works that you do not\n convey, without conditions so long as your license otherwise remains\n in force. You may convey covered works to others for the sole purpose\n of having them make modifications exclusively for you, or provide you\n with facilities for running those works, provided that you comply with\n the terms of this License in conveying all material for which you do\n not control copyright. Those thus making or running the covered works\n for you must do so exclusively on your behalf, under your direction\n and control, on terms that prohibit them from making any copies of\n your copyrighted material outside their relationship with you.\n \n Conveying under any other circumstances is permitted solely under\n the conditions stated below. Sublicensing is not allowed; section 10\n makes it unnecessary.\n \n 3. Protecting Users' Legal Rights From Anti-Circumvention Law.\n \n No covered work shall be deemed part of an effective technological\n measure under any applicable law fulfilling obligations under article\n 11 of the WIPO copyright treaty adopted on 20 December 1996, or\n similar laws prohibiting or restricting circumvention of such\n measures.\n \n When you convey a covered work, you waive any legal power to forbid\n circumvention of technological measures to the extent such circumvention\n is effected by exercising rights under this License with respect to\n the covered work, and you disclaim any intention to limit operation or\n modification of the work as a means of enforcing, against the work's\n users, your or third parties' legal rights to forbid circumvention of\n technological measures.\n \n 4. Conveying Verbatim Copies.\n \n You may convey verbatim copies of the Program's source code as you\n receive it, in any medium, provided that you conspicuously and\n appropriately publish on each copy an appropriate copyright notice;\n keep intact all notices stating that this License and any\n non-permissive terms added in accord with section 7 apply to the code;\n keep intact all notices of the absence of any warranty; and give all\n recipients a copy of this License along with the Program.\n \n You may charge any price or no price for each copy that you convey,\n and you may offer support or warranty protection for a fee.\n \n 5. Conveying Modified Source Versions.\n \n You may convey a work based on the Program, or the modifications to\n produce it from the Program, in the form of source code under the\n terms of section 4, provided that you also meet all of these conditions:\n \n a) The work must carry prominent notices stating that you modified\n it, and giving a relevant date.\n \n b) The work must carry prominent notices stating that it is\n released under this License and any conditions added under section\n 7. This requirement modifies the requirement in section 4 to\n "keep intact all notices".\n \n c) You must license the entire work, as a whole, under this\n License to anyone who comes into possession of a copy. This\n License will therefore apply, along with any applicable section 7\n additional terms, to the whole of the work, and all its parts,\n regardless of how they are packaged. This License gives no\n permission to license the work in any other way, but it does not\n invalidate such permission if you have separately received it.\n \n d) If the work has interactive user interfaces, each must display\n Appropriate Legal Notices; however, if the Program has interactive\n interfaces that do not display Appropriate Legal Notices, your\n work need not make them do so.\n \n A compilation of a covered work with other separate and independent\n works, which are not by their nature extensions of the covered work,\n and which are not combined with it such as to form a larger program,\n in or on a volume of a storage or distribution medium, is called an\n "aggregate" if the compilation and its resulting copyright are not\n used to limit the access or legal rights of the compilation's users\n beyond what the individual works permit. Inclusion of a covered work\n in an aggregate does not cause this License to apply to the other\n parts of the aggregate.\n \n 6. Conveying Non-Source Forms.\n \n You may convey a covered work in object code form under the terms\n of sections 4 and 5, provided that you also convey the\n machine-readable Corresponding Source under the terms of this License,\n in one of these ways:\n \n a) Convey the object code in, or embodied in, a physical product\n (including a physical distribution medium), accompanied by the\n Corresponding Source fixed on a durable physical medium\n customarily used for software interchange.\n \n b) Convey the object code in, or embodied in, a physical product\n (including a physical distribution medium), accompanied by a\n written offer, valid for at least three years and valid for as\n long as you offer spare parts or customer support for that product\n model, to give anyone who possesses the object code either (1) a\n copy of the Corresponding Source for all the software in the\n product that is covered by this License, on a durable physical\n medium customarily used for software interchange, for a price no\n more than your reasonable cost of physically performing this\n conveying of source, or (2) access to copy the\n Corresponding Source from a network server at no charge.\n \n c) Convey individual copies of the object code with a copy of the\n written offer to provide the Corresponding Source. This\n alternative is allowed only occasionally and noncommercially, and\n only if you received the object code with such an offer, in accord\n with subsection 6b.\n \n d) Convey the object code by offering access from a designated\n place (gratis or for a charge), and offer equivalent access to the\n Corresponding Source in the same way through the same place at no\n further charge. You need not require recipients to copy the\n Corresponding Source along with the object code. If the place to\n copy the object code is a network server, the Corresponding Source\n may be on a different server (operated by you or a third party)\n that supports equivalent copying facilities, provided you maintain\n clear directions next to the object code saying where to find the\n Corresponding Source. Regardless of what server hosts the\n Corresponding Source, you remain obligated to ensure that it is\n available for as long as needed to satisfy these requirements.\n \n e) Convey the object code using peer-to-peer transmission, provided\n you inform other peers where the object code and Corresponding\n Source of the work are being offered to the general public at no\n charge under subsection 6d.\n \n A separable portion of the object code, whose source code is excluded\n from the Corresponding Source as a System Library, need not be\n included in conveying the object code work.\n \n A "User Product" is either (1) a "consumer product", which means any\n tangible personal property which is normally used for personal, family,\n or household purposes, or (2) anything designed or sold for incorporation\n into a dwelling. In determining whether a product is a consumer product,\n doubtful cases shall be resolved in favor of coverage. For a particular\n product received by a particular user, "normally used" refers to a\n typical or common use of that class of product, regardless of the status\n of the particular user or of the way in which the particular user\n actually uses, or expects or is expected to use, the product. A product\n is a consumer product regardless of whether the product has substantial\n commercial, industrial or non-consumer uses, unless such uses represent\n the only significant mode of use of the product.\n \n "Installation Information" for a User Product means any methods,\n procedures, authorization keys, or other information required to install\n and execute modified versions of a covered work in that User Product from\n a modified version of its Corresponding Source. The information must\n suffice to ensure that the continued functioning of the modified object\n code is in no case prevented or interfered with solely because\n modification has been made.\n \n If you convey an object code work under this section in, or with, or\n specifically for use in, a User Product, and the conveying occurs as\n part of a transaction in which the right of possession and use of the\n User Product is transferred to the recipient in perpetuity or for a\n fixed term (regardless of how the transaction is characterized), the\n Corresponding Source conveyed under this section must be accompanied\n by the Installation Information. But this requirement does not apply\n if neither you nor any third party retains the ability to install\n modified object code on the User Product (for example, the work has\n been installed in ROM).\n \n The requirement to provide Installation Information does not include a\n requirement to continue to provide support service, warranty, or updates\n for a work that has been modified or installed by the recipient, or for\n the User Product in which it has been modified or installed. Access to a\n network may be denied when the modification itself materially and\n adversely affects the operation of the network or violates the rules and\n protocols for communication across the network.\n \n Corresponding Source conveyed, and Installation Information provided,\n in accord with this section must be in a format that is publicly\n documented (and with an implementation available to the public in\n source code form), and must require no special password or key for\n unpacking, reading or copying.\n \n 7. Additional Terms.\n \n "Additional permissions" are terms that supplement the terms of this\n License by making exceptions from one or more of its conditions.\n Additional permissions that are applicable to the entire Program shall\n be treated as though they were included in this License, to the extent\n that they are valid under applicable law. If additional permissions\n apply only to part of the Program, that part may be used separately\n under those permissions, but the entire Program remains governed by\n this License without regard to the additional permissions.\n \n When you convey a copy of a covered work, you may at your option\n remove any additional permissions from that copy, or from any part of\n it. (Additional permissions may be written to require their own\n removal in certain cases when you modify the work.) You may place\n additional permissions on material, added by you to a covered work,\n for which you have or can give appropriate copyright permission.\n \n Notwithstanding any other provision of this License, for material you\n add to a covered work, you may (if authorized by the copyright holders of\n that material) supplement the terms of this License with terms:\n \n a) Disclaiming warranty or limiting liability differently from the\n terms of sections 15 and 16 of this License; or\n \n b) Requiring preservation of specified reasonable legal notices or\n author attributions in that material or in the Appropriate Legal\n Notices displayed by works containing it; or\n \n c) Prohibiting misrepresentation of the origin of that material, or\n requiring that modified versions of such material be marked in\n reasonable ways as different from the original version; or\n \n d) Limiting the use for publicity purposes of names of licensors or\n authors of the material; or\n \n e) Declining to grant rights under trademark law for use of some\n trade names, trademarks, or service marks; or\n \n f) Requiring indemnification of licensors and authors of that\n material by anyone who conveys the material (or modified versions of\n it) with contractual assumptions of liability to the recipient, for\n any liability that these contractual assumptions directly impose on\n those licensors and authors.\n \n All other non-permissive additional terms are considered "further\n restrictions" within the meaning of section 10. If the Program as you\n received it, or any part of it, contains a notice stating that it is\n governed by this License along with a term that is a further\n restriction, you may remove that term. If a license document contains\n a further restriction but permits relicensing or conveying under this\n License, you may add to a covered work material governed by the terms\n of that license document, provided that the further restriction does\n not survive such relicensing or conveying.\n \n If you add terms to a covered work in accord with this section, you\n must place, in the relevant source files, a statement of the\n additional terms that apply to those files, or a notice indicating\n where to find the applicable terms.\n \n Additional terms, permissive or non-permissive, may be stated in the\n form of a separately written license, or stated as exceptions;\n the above requirements apply either way.\n \n 8. Termination.\n \n You may not propagate or modify a covered work except as expressly\n provided under this License. Any attempt otherwise to propagate or\n modify it is void, and will automatically terminate your rights under\n this License (including any patent licenses granted under the third\n paragraph of section 11).\n \n However, if you cease all violation of this License, then your\n license from a particular copyright holder is reinstated (a)\n provisionally, unless and until the copyright holder explicitly and\n finally terminates your license, and (b) permanently, if the copyright\n holder fails to notify you of the violation by some reasonable means\n prior to 60 days after the cessation.\n \n Moreover, your license from a particular copyright holder is\n reinstated permanently if the copyright holder notifies you of the\n violation by some reasonable means, this is the first time you have\n received notice of violation of this License (for any work) from that\n copyright holder, and you cure the violation prior to 30 days after\n your receipt of the notice.\n \n Termination of your rights under this section does not terminate the\n licenses of parties who have received copies or rights from you under\n this License. If your rights have been terminated and not permanently\n reinstated, you do not qualify to receive new licenses for the same\n material under section 10.\n \n 9. Acceptance Not Required for Having Copies.\n \n You are not required to accept this License in order to receive or\n run a copy of the Program. Ancillary propagation of a covered work\n occurring solely as a consequence of using peer-to-peer transmission\n to receive a copy likewise does not require acceptance. However,\n nothing other than this License grants you permission to propagate or\n modify any covered work. These actions infringe copyright if you do\n not accept this License. Therefore, by modifying or propagating a\n covered work, you indicate your acceptance of this License to do so.\n \n 10. Automatic Licensing of Downstream Recipients.\n \n Each time you convey a covered work, the recipient automatically\n receives a license from the original licensors, to run, modify and\n propagate that work, subject to this License. You are not responsible\n for enforcing compliance by third parties with this License.\n \n An "entity transaction" is a transaction transferring control of an\n organization, or substantially all assets of one, or subdividing an\n organization, or merging organizations. If propagation of a covered\n work results from an entity transaction, each party to that\n transaction who receives a copy of the work also receives whatever\n licenses to the work the party's predecessor in interest had or could\n give under the previous paragraph, plus a right to possession of the\n Corresponding Source of the work from the predecessor in interest, if\n the predecessor has it or can get it with reasonable efforts.\n \n You may not impose any further restrictions on the exercise of the\n rights granted or affirmed under this License. For example, you may\n not impose a license fee, royalty, or other charge for exercise of\n rights granted under this License, and you may not initiate litigation\n (including a cross-claim or counterclaim in a lawsuit) alleging that\n any patent claim is infringed by making, using, selling, offering for\n sale, or importing the Program or any portion of it.\n \n 11. Patents.\n \n A "contributor" is a copyright holder who authorizes use under this\n License of the Program or a work on which the Program is based. The\n work thus licensed is called the contributor's "contributor version".\n \n A contributor's "essential patent claims" are all patent claims\n owned or controlled by the contributor, whether already acquired or\n hereafter acquired, that would be infringed by some manner, permitted\n by this License, of making, using, or selling its contributor version,\n but do not include claims that would be infringed only as a\n consequence of further modification of the contributor version. For\n purposes of this definition, "control" includes the right to grant\n patent sublicenses in a manner consistent with the requirements of\n this License.\n \n Each contributor grants you a non-exclusive, worldwide, royalty-free\n patent license under the contributor's essential patent claims, to\n make, use, sell, offer for sale, import and otherwise run, modify and\n propagate the contents of its contributor version.\n \n In the following three paragraphs, a "patent license" is any express\n agreement or commitment, however denominated, not to enforce a patent\n (such as an express permission to practice a patent or covenant not to\n sue for patent infringement). To "grant" such a patent license to a\n party means to make such an agreement or commitment not to enforce a\n patent against the party.\n \n If you convey a covered work, knowingly relying on a patent license,\n and the Corresponding Source of the work is not available for anyone\n to copy, free of charge and under the terms of this License, through a\n publicly available network server or other readily accessible means,\n then you must either (1) cause the Corresponding Source to be so\n available, or (2) arrange to deprive yourself of the benefit of the\n patent license for this particular work, or (3) arrange, in a manner\n consistent with the requirements of this License, to extend the patent\n license to downstream recipients. "Knowingly relying" means you have\n actual knowledge that, but for the patent license, your conveying the\n covered work in a country, or your recipient's use of the covered work\n in a country, would infringe one or more identifiable patents in that\n country that you have reason to believe are valid.\n \n If, pursuant to or in connection with a single transaction or\n arrangement, you convey, or propagate by procuring conveyance of, a\n covered work, and grant a patent license to some of the parties\n receiving the covered work authorizing them to use, propagate, modify\n or convey a specific copy of the covered work, then the patent license\n you grant is automatically extended to all recipients of the covered\n work and works based on it.\n \n A patent license is "discriminatory" if it does not include within\n the scope of its coverage, prohibits the exercise of, or is\n conditioned on the non-exercise of one or more of the rights that are\n specifically granted under this License. You may not convey a covered\n work if you are a party to an arrangement with a third party that is\n in the business of distributing software, under which you make payment\n to the third party based on the extent of your activity of conveying\n the work, and under which the third party grants, to any of the\n parties who would receive the covered work from you, a discriminatory\n patent license (a) in connection with copies of the covered work\n conveyed by you (or copies made from those copies), or (b) primarily\n for and in connection with specific products or compilations that\n contain the covered work, unless you entered into that arrangement,\n or that patent license was granted, prior to 28 March 2007.\n \n Nothing in this License shall be construed as excluding or limiting\n any implied license or other defenses to infringement that may\n otherwise be available to you under applicable patent law.\n \n 12. No Surrender of Others' Freedom.\n \n If conditions are imposed on you (whether by court order, agreement or\n otherwise) that contradict the conditions of this License, they do not\n excuse you from the conditions of this License. If you cannot convey a\n covered work so as to satisfy simultaneously your obligations under this\n License and any other pertinent obligations, then as a consequence you may\n not convey it at all. For example, if you agree to terms that obligate you\n to collect a royalty for further conveying from those to whom you convey\n the Program, the only way you could satisfy both those terms and this\n License would be to refrain entirely from conveying the Program.\n \n 13. Use with the GNU Affero General Public License.\n \n Notwithstanding any other provision of this License, you have\n permission to link or combine any covered work with a work licensed\n under version 3 of the GNU Affero General Public License into a single\n combined work, and to convey the resulting work. The terms of this\n License will continue to apply to the part which is the covered work,\n but the special requirements of the GNU Affero General Public License,\n section 13, concerning interaction through a network will apply to the\n combination as such.\n \n 14. Revised Versions of this License.\n \n The Free Software Foundation may publish revised and/or new versions of\n the GNU General Public License from time to time. Such new versions will\n be similar in spirit to the present version, but may differ in detail to\n address new problems or concerns.\n \n Each version is given a distinguishing version number. If the\n Program specifies that a certain numbered version of the GNU General\n Public License "or any later version" applies to it, you have the\n option of following the terms and conditions either of that numbered\n version or of any later version published by the Free Software\n Foundation. If the Program does not specify a version number of the\n GNU General Public License, you may choose any version ever published\n by the Free Software Foundation.\n \n If the Program specifies that a proxy can decide which future\n versions of the GNU General Public License can be used, that proxy's\n public statement of acceptance of a version permanently authorizes you\n to choose that version for the Program.\n \n Later license versions may give you additional or different\n permissions. However, no additional obligations are imposed on any\n author or copyright holder as a result of your choosing to follow a\n later version.\n \n 15. Disclaimer of Warranty.\n \n THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY\n APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT\n HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY\n OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,\n THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR\n PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM\n IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF\n ALL NECESSARY SERVICING, REPAIR OR CORRECTION.\n \n 16. Limitation of Liability.\n \n IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING\n WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS\n THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY\n GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE\n USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF\n DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD\n PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),\n EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF\n SUCH DAMAGES.\n \n 17. Interpretation of Sections 15 and 16.\n \n If the disclaimer of warranty and limitation of liability provided\n above cannot be given local legal effect according to their terms,\n reviewing courts shall apply local law that most closely approximates\n an absolute waiver of all civil liability in connection with the\n Program, unless a warranty or assumption of liability accompanies a\n copy of the Program in return for a fee.\n \n END OF TERMS AND CONDITIONS\n \n How to Apply These Terms to Your New Programs\n \n If you develop a new program, and you want it to be of the greatest\n possible use to the public, the best way to achieve this is to make it\n free software which everyone can redistribute and change under these terms.\n \n To do so, attach the following notices to the program. It is safest\n to attach them to the start of each source file to most effectively\n state the exclusion of warranty; and each file should have at least\n the "copyright" line and a pointer to where the full notice is found.\n \n <one line to give the program's name and a brief idea of what it does.>\n Copyright (C) <year> <name of author>\n \n This program is free software: you can redistribute it and/or modify\n it under the terms of the GNU General Public License as published by\n the Free Software Foundation, either version 3 of the License, or\n (at your option) any later version.\n \n This program is distributed in the hope that it will be useful,\n but WITHOUT ANY WARRANTY; without even the implied warranty of\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n GNU General Public License for more details.\n \n You should have received a copy of the GNU General Public License\n along with this program. If not, see <https://www.gnu.org/licenses/>.\n \n Also add information on how to contact you by electronic and paper mail.\n \n If the program does terminal interaction, make it output a short\n notice like this when it starts in an interactive mode:\n \n <program> Copyright (C) <year> <name of author>\n This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.\n This is free software, and you are welcome to redistribute it\n under certain conditions; type `show c' for details.\n \n The hypothetical commands `show w' and `show c' should show the appropriate\n parts of the General Public License. Of course, your program's commands\n might be different; for a GUI interface, you would use an "about box".\n \n You should also get your employer (if you work as a programmer) or school,\n if any, to sign a "copyright disclaimer" for the program, if necessary.\n For more information on this, and how to apply and follow the GNU GPL, see\n <https://www.gnu.org/licenses/>.\n \n The GNU General Public License does not permit incorporating your program\n into proprietary programs. If your program is a subroutine library, you\n may consider it more useful to permit linking proprietary applications with\n the library. If this is what you want to do, use the GNU Lesser General\n Public License instead of this License. But first, please read\n <https://www.gnu.org/licenses/why-not-lgpl.html>.\n \n \nClassifier: Development Status :: 5 - Production/Stable\nClassifier: Intended Audience :: Science/Research\nClassifier: Intended Audience :: Developers\nClassifier: License :: OSI Approved :: BSD License\nClassifier: Programming Language :: C\nClassifier: Programming Language :: Python\nClassifier: Programming Language :: Python :: 3\nClassifier: Programming Language :: Python :: 3.11\nClassifier: Programming Language :: Python :: 3.12\nClassifier: Programming Language :: Python :: 3.13\nClassifier: Programming Language :: Python :: 3 :: Only\nClassifier: Programming Language :: Python :: Implementation :: CPython\nClassifier: Topic :: Software Development\nClassifier: Topic :: Scientific/Engineering\nClassifier: Typing :: Typed\nClassifier: Operating System :: Microsoft :: Windows\nClassifier: Operating System :: POSIX\nClassifier: Operating System :: Unix\nClassifier: Operating System :: MacOS\nProject-URL: homepage, https://numpy.org\nProject-URL: documentation, https://numpy.org/doc/\nProject-URL: source, https://github.com/numpy/numpy\nProject-URL: download, https://pypi.org/project/numpy/#files\nProject-URL: tracker, https://github.com/numpy/numpy/issues\nProject-URL: release notes, https://numpy.org/doc/stable/release\nRequires-Python: >=3.11\nDescription-Content-Type: text/markdown\n\n<h1 align="center">\n<img src="https://raw.githubusercontent.com/numpy/numpy/main/branding/logo/primary/numpylogo.svg" width="300">\n</h1><br>\n\n\n[![Powered by NumFOCUS](https://img.shields.io/badge/powered%20by-NumFOCUS-orange.svg?style=flat&colorA=E1523D&colorB=007D8A)](\nhttps://numfocus.org)\n[![PyPI Downloads](https://img.shields.io/pypi/dm/numpy.svg?label=PyPI%20downloads)](\nhttps://pypi.org/project/numpy/)\n[![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/numpy.svg?label=Conda%20downloads)](\nhttps://anaconda.org/conda-forge/numpy)\n[![Stack Overflow](https://img.shields.io/badge/stackoverflow-Ask%20questions-blue.svg)](\nhttps://stackoverflow.com/questions/tagged/numpy)\n[![Nature Paper](https://img.shields.io/badge/DOI-10.1038%2Fs41586--020--2649--2-blue)](\nhttps://doi.org/10.1038/s41586-020-2649-2)\n[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/numpy/numpy/badge)](https://securityscorecards.dev/viewer/?uri=github.com/numpy/numpy)\n[![Typing](https://img.shields.io/pypi/types/numpy)](https://pypi.org/project/numpy/)\n\n\nNumPy is the fundamental package for scientific computing with Python.\n\n- **Website:** https://numpy.org\n- **Documentation:** https://numpy.org/doc\n- **Mailing list:** https://mail.python.org/mailman/listinfo/numpy-discussion\n- **Source code:** https://github.com/numpy/numpy\n- **Contributing:** https://numpy.org/devdocs/dev/index.html\n- **Bug reports:** https://github.com/numpy/numpy/issues\n- **Report a security vulnerability:** https://tidelift.com/docs/security\n\nIt provides:\n\n- a powerful N-dimensional array object\n- sophisticated (broadcasting) functions\n- tools for integrating C/C++ and Fortran code\n- useful linear algebra, Fourier transform, and random number capabilities\n\nTesting:\n\nNumPy requires `pytest` and `hypothesis`. Tests can then be run after installation with:\n\n python -c "import numpy, sys; sys.exit(numpy.test() is False)"\n\nCode of Conduct\n----------------------\n\nNumPy is a community-driven open source project developed by a diverse group of\n[contributors](https://numpy.org/teams/). The NumPy leadership has made a strong\ncommitment to creating an open, inclusive, and positive community. Please read the\n[NumPy Code of Conduct](https://numpy.org/code-of-conduct/) for guidance on how to interact\nwith others in a way that makes our community thrive.\n\nCall for Contributions\n----------------------\n\nThe NumPy project welcomes your expertise and enthusiasm!\n\nSmall improvements or fixes are always appreciated. If you are considering larger contributions\nto the source code, please contact us through the [mailing\nlist](https://mail.python.org/mailman/listinfo/numpy-discussion) first.\n\nWriting code isn’t the only way to contribute to NumPy. You can also:\n- review pull requests\n- help us stay on top of new and old issues\n- develop tutorials, presentations, and other educational materials\n- maintain and improve [our website](https://github.com/numpy/numpy.org)\n- develop graphic design for our brand assets and promotional materials\n- translate website content\n- help with outreach and onboard new contributors\n- write grant proposals and help with other fundraising efforts\n\nFor more information about the ways you can contribute to NumPy, visit [our website](https://numpy.org/contribute/). \nIf you’re unsure where to start or how your skills fit in, reach out! You can\nask on the mailing list or here, on GitHub, by opening a new issue or leaving a\ncomment on a relevant issue that is already open.\n\nOur preferred channels of communication are all public, but if you’d like to\nspeak to us in private first, contact our community coordinators at\nnumpy-team@googlegroups.com or on Slack (write numpy-team@googlegroups.com for\nan invitation).\n\nWe also have a biweekly community call, details of which are announced on the\nmailing list. You are very welcome to join.\n\nIf you are new to contributing to open source, [this\nguide](https://opensource.guide/how-to-contribute/) helps explain why, what,\nand how to successfully get involved.\n
.venv\Lib\site-packages\numpy-2.3.1.dist-info\METADATA
METADATA
Other
60,884
0.75
0.111111
0.003468
awesome-app
653
2025-01-25T10:26:38.953425
Apache-2.0
false
30f7207de483c9d37f62dca86303a646
Wheel-Version: 1.0\nGenerator: meson\nRoot-Is-Purelib: false\nTag: cp313-cp313-win_amd64
.venv\Lib\site-packages\numpy-2.3.1.dist-info\WHEEL
WHEEL
Other
85
0.5
0
0
node-utils
812
2024-06-26T07:59:43.043876
GPL-3.0
false
51337c97620c3b1e0d781ad8efe86cea
from abc import ABCMeta\n\n\nclass EnforceOverridesMeta(ABCMeta):\n def __new__(mcls, name, bases, namespace, **kwargs):\n # Ignore any methods defined on the metaclass when enforcing overrides.\n for method in dir(mcls):\n if not method.startswith("__") and method != "mro":\n value = getattr(mcls, method)\n if not isinstance(value, (bool, str, int, float, tuple, list, dict)):\n setattr(getattr(mcls, method), "__ignored__", True)\n\n cls = super().__new__(mcls, name, bases, namespace, **kwargs)\n for name, value in namespace.items():\n mcls._check_if_overrides_final_method(name, bases)\n if not name.startswith("__"):\n value = mcls._handle_special_value(value)\n mcls._check_if_overrides_without_overrides_decorator(name, value, bases)\n return cls\n\n @staticmethod\n def _check_if_overrides_without_overrides_decorator(name, value, bases):\n is_override = getattr(value, "__override__", False)\n for base in bases:\n base_class_method = getattr(base, name, False)\n if (\n not base_class_method\n or not callable(base_class_method)\n or getattr(base_class_method, "__ignored__", False)\n ):\n continue\n if not is_override:\n raise TypeError(\n f"Method {name} overrides method from {base} but does not have @override decorator"\n )\n\n @staticmethod\n def _check_if_overrides_final_method(name, bases):\n for base in bases:\n base_class_method = getattr(base, name, False)\n # `__final__` is added by `@final` decorator\n if getattr(base_class_method, "__final__", False):\n raise TypeError(\n f"Method {name} is finalized in {base}, it cannot be overridden"\n )\n\n @staticmethod\n def _handle_special_value(value):\n if isinstance(value, classmethod) or isinstance(value, staticmethod):\n value = value.__get__(None, dict)\n elif isinstance(value, property):\n value = value.fget\n return value\n\n\nclass EnforceOverrides(metaclass=EnforceOverridesMeta):\n "Use this as the parent class for your custom classes"\n pass\n
.venv\Lib\site-packages\overrides\enforce.py
enforce.py
Python
2,349
0.95
0.327586
0.04
node-utils
986
2024-10-21T17:08:26.892769
BSD-3-Clause
false
a29117c7bfdec804e5d411b7d09225e6
#\n# Copyright 2016 Keunhong Lee\n#\n# Licensed under the Apache License, Version 2.0 (the "License");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an "AS IS" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\nfrom types import FunctionType\nfrom typing import Callable, TypeVar, Union\n\n_WrappedMethod = TypeVar("_WrappedMethod", bound=Union[FunctionType, Callable])\n\n\ndef final(method: _WrappedMethod) -> _WrappedMethod:\n """Decorator to indicate that the decorated method is finalized and cannot be overridden.\n The decorator code is executed while loading class. Using this method\n should have minimal runtime performance implications.\n Currently, only methods with @override are checked.\n\n How to use:\n from overrides import final\n\n class SuperClass(object):\n @final\n def method(self):\n return 2\n\n class SubClass(SuperClass):\n @override\n def method(self): #causes an error\n return 1\n\n :raises AssertionError: if there exists a match in sub classes for the method name\n :return: method\n """\n setattr(method, "__final__", True)\n return method\n
.venv\Lib\site-packages\overrides\final.py
final.py
Python
1,511
0.95
0.222222
0.394737
python-kit
322
2024-09-12T10:07:13.873806
GPL-3.0
false
7c4ebebd2ef84089b8d2104a39df7537
#\n# Copyright 2019 Mikko Korpela\n#\n# Licensed under the Apache License, Version 2.0 (the "License");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an "AS IS" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nimport dis\nimport functools\nimport inspect\nimport sys\nfrom types import FrameType, FunctionType\nfrom typing import Callable, List, Optional, Tuple, TypeVar, Union, overload\n\n__VERSION__ = "7.7.0"\n\nfrom overrides.signature import ensure_signature_is_compatible\n\n_WrappedMethod = TypeVar("_WrappedMethod", bound=Union[FunctionType, Callable])\n_DecoratorMethod = Callable[[_WrappedMethod], _WrappedMethod]\n\n\n@overload\ndef overrides(\n method: None = None,\n *,\n check_signature: bool = True,\n check_at_runtime: bool = False,\n) -> _DecoratorMethod:\n ...\n\n\n@overload\ndef overrides(\n method: _WrappedMethod,\n *,\n check_signature: bool = True,\n check_at_runtime: bool = False,\n) -> _WrappedMethod:\n ...\n\n\ndef overrides(\n method: Optional[_WrappedMethod] = None,\n *,\n check_signature: bool = True,\n check_at_runtime: bool = False,\n) -> Union[_DecoratorMethod, _WrappedMethod]:\n """Decorator to indicate that the decorated method overrides a method in\n superclass.\n The decorator code is executed while loading class. Using this method\n should have minimal runtime performance implications.\n\n How to use:\n from overrides import overrides\n\n class SuperClass(object):\n def method(self):\n return 2\n\n class SubClass(SuperClass):\n\n @overrides\n def method(self):\n return 1\n\n :param check_signature: Whether or not to check the signature of the overridden method.\n :param check_at_runtime: Whether or not to check the overridden method at runtime.\n :raises AssertionError: if no match in super classes for the method name\n :return: method with possibly added (if the method doesn't have one)\n docstring from super class\n """\n if method is not None:\n return _overrides(method, check_signature, check_at_runtime)\n else:\n return functools.partial(\n overrides,\n check_signature=check_signature,\n check_at_runtime=check_at_runtime,\n )\n\n\n@overload\ndef override(\n method: None = None,\n *,\n check_signature: bool = True,\n check_at_runtime: bool = False,\n) -> _DecoratorMethod:\n ...\n\n\n@overload\ndef override(\n method: _WrappedMethod,\n *,\n check_signature: bool = True,\n check_at_runtime: bool = False,\n) -> _WrappedMethod:\n ...\n\n\ndef override(\n method: Optional[_WrappedMethod] = None,\n *,\n check_signature: bool = True,\n check_at_runtime: bool = False,\n) -> Union[_DecoratorMethod, _WrappedMethod]:\n """Decorator to indicate that the decorated method overrides a method in\n superclass.\n The decorator code is executed while loading class. Using this method\n should have minimal runtime performance implications.\n\n How to use:\n from overrides import override\n\n class SuperClass(object):\n def method(self):\n return 2\n\n class SubClass(SuperClass):\n\n @override\n def method(self):\n return 1\n\n :param check_signature: Whether or not to check the signature of the overridden method.\n :param check_at_runtime: Whether or not to check the overridden method at runtime.\n :raises AssertionError: if no match in super classes for the method name\n :return: method with possibly added (if the method doesn't have one)\n docstring from super class\n """\n if method is not None:\n return _overrides(method, check_signature, check_at_runtime)\n else:\n return functools.partial(\n overrides,\n check_signature=check_signature,\n check_at_runtime=check_at_runtime,\n )\n\n\ndef _overrides(\n method: _WrappedMethod,\n check_signature: bool,\n check_at_runtime: bool,\n) -> _WrappedMethod:\n setattr(method, "__override__", True)\n global_vars = getattr(method, "__globals__", None)\n if global_vars is None:\n global_vars = vars(sys.modules[method.__module__])\n for super_class in _get_base_classes(sys._getframe(3), global_vars):\n if hasattr(super_class, method.__name__):\n if check_at_runtime:\n\n @functools.wraps(method)\n def wrapper(*args, **kwargs):\n _validate_method(method, super_class, check_signature)\n return method(*args, **kwargs)\n\n return wrapper # type: ignore\n else:\n _validate_method(method, super_class, check_signature)\n return method\n raise TypeError(f"{method.__qualname__}: No super class method found")\n\n\ndef _validate_method(method, super_class, check_signature):\n super_method = getattr(super_class, method.__name__)\n is_static = isinstance(\n inspect.getattr_static(super_class, method.__name__), staticmethod\n )\n if getattr(super_method, "__final__", False):\n raise TypeError(f"{method.__name__}: is finalized in {super_class}")\n if not method.__doc__:\n method.__doc__ = super_method.__doc__\n if (\n check_signature\n and not method.__name__.startswith("__")\n and not isinstance(super_method, property)\n ):\n ensure_signature_is_compatible(super_method, method, is_static)\n\n\ndef _get_base_classes(frame, namespace):\n return [\n _get_base_class(class_name_components, namespace)\n for class_name_components in _get_base_class_names(frame)\n ]\n\n\ndef _get_base_class_names(frame: FrameType) -> List[List[str]]:\n """Get baseclass names from the code object"""\n current_item: List[str] = []\n items: List[List[str]] = []\n add_last_step = True\n\n for instruction in dis.get_instructions(frame.f_code):\n if instruction.offset > frame.f_lasti:\n break\n if instruction.opcode not in dis.hasname:\n continue\n if not add_last_step:\n items = []\n add_last_step = True\n\n # Combine LOAD_NAME and LOAD_GLOBAL as they have similar functionality\n if instruction.opname in ["LOAD_NAME", "LOAD_GLOBAL"]:\n if current_item:\n items.append(current_item)\n current_item = [instruction.argval]\n\n elif instruction.opname == "LOAD_ATTR" and current_item:\n current_item.append(instruction.argval)\n\n # Reset on other instructions\n else:\n if current_item:\n items.append(current_item)\n current_item = []\n add_last_step = False\n\n if current_item:\n items.append(current_item)\n return items\n\n\ndef _get_base_class(components, namespace):\n try:\n obj = namespace[components[0]]\n except KeyError:\n if isinstance(namespace["__builtins__"], dict):\n obj = namespace["__builtins__"][components[0]]\n else:\n obj = getattr(namespace["__builtins__"], components[0])\n for component in components[1:]:\n if hasattr(obj, component):\n obj = getattr(obj, component)\n return obj\n
.venv\Lib\site-packages\overrides\overrides.py
overrides.py
Python
7,505
0.95
0.225806
0.112195
vue-tools
719
2024-06-10T04:48:21.032779
Apache-2.0
false
dd30f38536df98dc218255b4ce77e916
import inspect\nfrom inspect import Parameter\nfrom types import FunctionType\nfrom typing import Callable, Dict, Optional, Tuple, Type, TypeVar, Union, get_type_hints\n\nfrom .typing_utils import get_args, issubtype\n\n_WrappedMethod = TypeVar("_WrappedMethod", bound=Union[FunctionType, Callable])\n_WrappedMethod2 = TypeVar("_WrappedMethod2", bound=Union[FunctionType, Callable])\n\n\ndef _contains_unbound_typevar(t: Type) -> bool:\n """Recursively check if `t` or any types contained by `t` is a `TypeVar`.\n\n Examples where we return `True`: `T`, `Optional[T]`, `Tuple[Optional[T], ...]`, ...\n Examples where we return `False`: `int`, `Optional[str]`, ...\n\n :param t: Type to evaluate.\n :return: `True` if the input type contains an unbound `TypeVar`, `False` otherwise.\n """\n\n # Check self\n if isinstance(t, TypeVar):\n return True\n\n # Check children\n for arg in get_args(t):\n if _contains_unbound_typevar(arg):\n return True\n\n return False\n\n\ndef _issubtype(left, right):\n if _contains_unbound_typevar(left):\n return True\n if right is None:\n return True\n if _contains_unbound_typevar(right):\n return True\n try:\n return issubtype(left, right)\n except TypeError:\n # Ignore all broken cases\n return True\n\n\ndef _get_type_hints(callable) -> Optional[Dict]:\n try:\n return get_type_hints(callable)\n except (NameError, TypeError):\n return None\n\n\ndef _is_same_module(callable1: _WrappedMethod, callable2: _WrappedMethod2) -> bool:\n mod1 = callable1.__module__.split(".")[0]\n # "__module__" attribute may be missing in CPython or it can be None\n # in PyPy: https://github.com/mkorpela/overrides/issues/118\n mod2 = getattr(callable2, "__module__", None)\n if mod2 is None:\n return False\n mod2 = mod2.split(".")[0]\n return mod1 == mod2\n\n\ndef ensure_signature_is_compatible(\n super_callable: _WrappedMethod,\n sub_callable: _WrappedMethod2,\n is_static: bool = False,\n) -> None:\n """Ensure that the signature of `sub_callable` is compatible with the signature of `super_callable`.\n\n Guarantees that any call to `super_callable` will work on `sub_callable` by checking the following criteria:\n\n 1. The return type of `sub_callable` is a subtype of the return type of `super_callable`.\n 2. All parameters of `super_callable` are present in `sub_callable`, unless `sub_callable`\n declares `*args` or `**kwargs`.\n 3. All positional parameters of `super_callable` appear in the same order in `sub_callable`.\n 4. All parameters of `super_callable` are a subtype of the corresponding parameters of `sub_callable`.\n 5. All required parameters of `sub_callable` are present in `super_callable`, unless `super_callable`\n declares `*args` or `**kwargs`.\n\n :param super_callable: Function to check compatibility with.\n :param sub_callable: Function to check compatibility of.\n :param is_static: True if staticmethod and should check first argument.\n """\n super_callable = _unbound_func(super_callable)\n sub_callable = _unbound_func(sub_callable)\n\n try:\n super_sig = inspect.signature(super_callable)\n except ValueError:\n return\n\n super_type_hints = _get_type_hints(super_callable)\n sub_sig = inspect.signature(sub_callable)\n sub_type_hints = _get_type_hints(sub_callable)\n\n method_name = sub_callable.__qualname__\n same_main_module = _is_same_module(sub_callable, super_callable)\n\n if super_type_hints is not None and sub_type_hints is not None:\n ensure_return_type_compatibility(super_type_hints, sub_type_hints, method_name)\n ensure_all_kwargs_defined_in_sub(\n super_sig, sub_sig, super_type_hints, sub_type_hints, is_static, method_name\n )\n ensure_all_positional_args_defined_in_sub(\n super_sig,\n sub_sig,\n super_type_hints,\n sub_type_hints,\n is_static,\n same_main_module,\n method_name,\n )\n ensure_no_extra_args_in_sub(super_sig, sub_sig, is_static, method_name)\n\n\ndef _unbound_func(callable: _WrappedMethod) -> _WrappedMethod:\n if hasattr(callable, "__self__") and hasattr(callable, "__func__"):\n return callable.__func__ # type: ignore\n return callable\n\n\ndef ensure_all_kwargs_defined_in_sub(\n super_sig: inspect.Signature,\n sub_sig: inspect.Signature,\n super_type_hints: Dict,\n sub_type_hints: Dict,\n check_first_parameter: bool,\n method_name: str,\n):\n sub_has_var_kwargs = any(\n p.kind == Parameter.VAR_KEYWORD for p in sub_sig.parameters.values()\n )\n for super_index, (name, super_param) in enumerate(super_sig.parameters.items()):\n if super_index == 0 and not check_first_parameter:\n continue\n if super_param.kind == Parameter.VAR_POSITIONAL:\n continue\n if super_param.kind == Parameter.POSITIONAL_ONLY:\n continue\n if not is_param_defined_in_sub(\n name, True, sub_has_var_kwargs, sub_sig, super_param\n ):\n raise TypeError(f"{method_name}: `{name}` is not present.")\n elif name in sub_sig.parameters and super_param.kind != Parameter.VAR_KEYWORD:\n sub_index = list(sub_sig.parameters.keys()).index(name)\n sub_param = sub_sig.parameters[name]\n\n if super_param.kind != sub_param.kind and not (\n super_param.kind == Parameter.KEYWORD_ONLY\n and sub_param.kind == Parameter.POSITIONAL_OR_KEYWORD\n ):\n raise TypeError(f"{method_name}: `{name}` is not `{super_param.kind}`")\n elif super_index > sub_index and super_param.kind != Parameter.KEYWORD_ONLY:\n raise TypeError(\n f"{method_name}: `{name}` is not parameter at index `{super_index}`"\n )\n elif (\n name in super_type_hints\n and name in sub_type_hints\n and not _issubtype(super_type_hints[name], sub_type_hints[name])\n ):\n raise TypeError(\n f"`{method_name}: {name} must be a supertype of `{super_param.annotation}` but is `{sub_param.annotation}`"\n )\n\n\ndef ensure_all_positional_args_defined_in_sub(\n super_sig: inspect.Signature,\n sub_sig: inspect.Signature,\n super_type_hints: Dict,\n sub_type_hints: Dict,\n check_first_parameter: bool,\n is_same_main_module: bool,\n method_name: str,\n):\n sub_parameter_values = [\n v\n for v in sub_sig.parameters.values()\n if v.kind not in (Parameter.KEYWORD_ONLY, Parameter.VAR_KEYWORD)\n ]\n super_parameter_values = [\n v\n for v in super_sig.parameters.values()\n if v.kind not in (Parameter.KEYWORD_ONLY, Parameter.VAR_KEYWORD)\n ]\n sub_has_var_args = any(\n p.kind == Parameter.VAR_POSITIONAL for p in sub_parameter_values\n )\n super_has_var_args = any(\n p.kind == Parameter.VAR_POSITIONAL for p in super_parameter_values\n )\n if not sub_has_var_args and len(sub_parameter_values) < len(super_parameter_values):\n raise TypeError(f"{method_name}: parameter list too short")\n super_shift = 0\n for index, sub_param in enumerate(sub_parameter_values):\n if index == 0 and not check_first_parameter:\n continue\n if index + super_shift >= len(super_parameter_values):\n if sub_param.kind == Parameter.VAR_POSITIONAL:\n continue\n if (\n sub_param.kind == Parameter.POSITIONAL_ONLY\n and sub_param.default != Parameter.empty\n ):\n continue\n if sub_param.kind == Parameter.POSITIONAL_OR_KEYWORD:\n continue # Assume use as keyword\n raise TypeError(\n f"{method_name}: `{sub_param.name}` positionally required in subclass but not in supertype"\n )\n if sub_param.kind == Parameter.VAR_POSITIONAL:\n return\n super_param = super_parameter_values[index + super_shift]\n if super_param.kind == Parameter.VAR_POSITIONAL:\n super_shift -= 1\n if super_param.kind == Parameter.VAR_POSITIONAL:\n if not sub_has_var_args:\n raise TypeError(f"{method_name}: `{super_param.name}` must be present")\n continue\n if (\n super_param.kind != sub_param.kind\n and not (\n super_param.kind == Parameter.POSITIONAL_ONLY\n and sub_param.kind == Parameter.POSITIONAL_OR_KEYWORD\n )\n and not (sub_param.kind == Parameter.POSITIONAL_ONLY and super_has_var_args)\n ):\n raise TypeError(\n f"{method_name}: `{sub_param.name}` is not `{super_param.kind}` and is `{sub_param.kind}`"\n )\n elif (\n super_param.name in super_type_hints or is_same_main_module\n ) and not _issubtype(\n super_type_hints.get(super_param.name, None),\n sub_type_hints.get(sub_param.name, None),\n ):\n raise TypeError(\n f"`{method_name}: {sub_param.name} overriding must be a supertype of `{super_param.annotation}` but is `{sub_param.annotation}`"\n )\n\n\ndef is_param_defined_in_sub(\n name: str,\n sub_has_var_args: bool,\n sub_has_var_kwargs: bool,\n sub_sig: inspect.Signature,\n super_param: inspect.Parameter,\n) -> bool:\n return (\n name in sub_sig.parameters\n or (super_param.kind == Parameter.VAR_POSITIONAL and sub_has_var_args)\n or (super_param.kind == Parameter.VAR_KEYWORD and sub_has_var_kwargs)\n or (super_param.kind == Parameter.POSITIONAL_ONLY and sub_has_var_args)\n or (\n super_param.kind == Parameter.POSITIONAL_OR_KEYWORD\n and sub_has_var_args\n and sub_has_var_kwargs\n )\n or (super_param.kind == Parameter.KEYWORD_ONLY and sub_has_var_kwargs)\n )\n\n\ndef ensure_no_extra_args_in_sub(\n super_sig: inspect.Signature,\n sub_sig: inspect.Signature,\n check_first_parameter: bool,\n method_name: str,\n) -> None:\n super_params = super_sig.parameters.values()\n super_var_args = any(p.kind == Parameter.VAR_POSITIONAL for p in super_params)\n super_var_kwargs = any(p.kind == Parameter.VAR_KEYWORD for p in super_params)\n for sub_index, (name, sub_param) in enumerate(sub_sig.parameters.items()):\n if (\n sub_param.kind == Parameter.POSITIONAL_ONLY\n and len(super_params) > sub_index\n and list(super_params)[sub_index].kind == Parameter.POSITIONAL_ONLY\n ):\n continue\n if (\n name not in super_sig.parameters\n and sub_param.default == Parameter.empty\n and sub_param.kind != Parameter.VAR_POSITIONAL\n and sub_param.kind != Parameter.VAR_KEYWORD\n and not (sub_param.kind == Parameter.KEYWORD_ONLY and super_var_kwargs)\n and not (sub_param.kind == Parameter.POSITIONAL_ONLY and super_var_args)\n and not (\n sub_param.kind == Parameter.POSITIONAL_OR_KEYWORD and super_var_args\n )\n and (sub_index > 0 or check_first_parameter)\n ):\n raise TypeError(f"{method_name}: `{name}` is not a valid parameter.")\n\n\ndef ensure_return_type_compatibility(\n super_type_hints: Dict, sub_type_hints: Dict, method_name: str\n):\n super_return = super_type_hints.get("return", None)\n sub_return = sub_type_hints.get("return", None)\n if not _issubtype(sub_return, super_return) and super_return is not None:\n raise TypeError(\n f"{method_name}: return type `{sub_return}` is not a `{super_return}`."\n )\n
.venv\Lib\site-packages\overrides\signature.py
signature.py
Python
11,785
0.95
0.186885
0.018657
awesome-app
727
2023-10-12T09:34:36.342424
Apache-2.0
false
8645cddcecd96521d5f5a38e3acdf50b
"""\nBackport Python3.8+ typing utils &amp; issubtype &amp; more\n\n![Python 3.6](https://github.com/bojiang/typing_utils/workflows/Python%203.6/badge.svg)\n![Python 3.7](https://github.com/bojiang/typing_utils/workflows/Python%203.7/badge.svg)\n![Python 3.8](https://github.com/bojiang/typing_utils/workflows/Python%203.8/badge.svg)\n\n## Install\n\n``` bash\n pip install typing_utils\n```\n"""\n\nimport collections.abc\nimport io\nimport itertools\nimport types\nimport typing\n\nif hasattr(typing, "ForwardRef"): # python3.8\n ForwardRef = getattr(typing, "ForwardRef")\nelif hasattr(typing, "_ForwardRef"): # python3.6\n ForwardRef = getattr(typing, "_ForwardRef")\nelse:\n raise NotImplementedError()\n\nif hasattr(typing, "Literal"):\n Literal = getattr(typing, "Literal")\nelse:\n Literal = None\n\nif hasattr(typing, "_TypedDictMeta"):\n _TypedDictMeta = getattr(typing, "_TypedDictMeta")\nelse:\n _TypedDictMeta = None\n\nif hasattr(types, "UnionType"):\n UnionType = getattr(types, "UnionType")\nelse:\n UnionType = None\n\nunknown = None\n\nBUILTINS_MAPPING = {\n typing.List: list,\n typing.Set: set,\n typing.Dict: dict,\n typing.Tuple: tuple,\n typing.ByteString: bytes, # https://docs.python.org/3/library/typing.html#typing.ByteString\n typing.Callable: collections.abc.Callable,\n typing.Sequence: collections.abc.Sequence,\n type(None): None,\n}\n\nSTATIC_SUBTYPE_MAPPING: typing.Dict[type, typing.Type] = {\n io.TextIOWrapper: typing.TextIO,\n io.TextIOBase: typing.TextIO,\n io.StringIO: typing.TextIO,\n io.BufferedReader: typing.BinaryIO,\n io.BufferedWriter: typing.BinaryIO,\n io.BytesIO: typing.BinaryIO,\n}\n\nif UnionType:\n\n def is_union(element: object) -> bool:\n return element is typing.Union or element is UnionType\n\nelse:\n\n def is_union(element: object) -> bool:\n return element is typing.Union\n\n\ndef optional_all(elements) -> typing.Optional[bool]:\n if all(elements):\n return True\n if all(e is False for e in elements):\n return False\n return unknown\n\n\ndef optional_any(elements) -> typing.Optional[bool]:\n if any(elements):\n return True\n if any(e is None for e in elements):\n return unknown\n return False\n\n\ndef _hashable(value):\n """Determine whether `value` can be hashed."""\n try:\n hash(value)\n except TypeError:\n return False\n return True\n\n\nget_type_hints = typing.get_type_hints\n\nGenericClass = type(typing.List)\nUnionClass = type(typing.Union)\n\nType = typing.Union[None, type, "typing.TypeVar"]\nOriginType = typing.Union[None, type]\nTypeArgs = typing.Union[type, typing.AbstractSet[type], typing.Sequence[type]]\n\n\ndef _normalize_aliases(type_: Type) -> Type:\n if isinstance(type_, typing.TypeVar):\n return type_\n\n assert _hashable(type_), "_normalize_aliases should only be called on element types"\n\n if type_ in BUILTINS_MAPPING:\n return BUILTINS_MAPPING[type_] # type: ignore\n return type_\n\n\ndef get_origin(type_):\n """Get the unsubscripted version of a type.\n This supports generic types, Callable, Tuple, Union, Literal, Final and ClassVar.\n Return None for unsupported types.\n\n Examples:\n\n ```python\n from typing_utils import get_origin\n\n get_origin(Literal[42]) is Literal\n get_origin(int) is None\n get_origin(ClassVar[int]) is ClassVar\n get_origin(Generic) is Generic\n get_origin(Generic[T]) is Generic\n get_origin(Union[T, int]) is Union\n get_origin(List[Tuple[T, T]][int]) == list\n ```\n """\n if hasattr(typing, "get_origin"): # python 3.8+\n _getter = getattr(typing, "get_origin")\n ori = _getter(type_)\n elif hasattr(typing.List, "_special"): # python 3.7\n if isinstance(type_, GenericClass) and not type_._special:\n ori = type_.__origin__\n elif hasattr(type_, "_special") and type_._special:\n ori = type_\n elif type_ is typing.Generic:\n ori = typing.Generic\n else:\n ori = None\n else: # python 3.6\n if isinstance(type_, GenericClass):\n ori = type_.__origin__\n if ori is None:\n ori = type_\n elif isinstance(type_, UnionClass):\n ori = type_.__origin__\n elif type_ is typing.Generic:\n ori = typing.Generic\n else:\n ori = None\n if ori is None and _TypedDictMeta and isinstance(type_, _TypedDictMeta):\n ori = dict\n return _normalize_aliases(ori)\n\n\ndef get_args(type_) -> typing.Tuple:\n """Get type arguments with all substitutions performed.\n For unions, basic simplifications used by Union constructor are performed.\n\n Examples:\n\n ```python\n from typing_utils import get_args\n\n get_args(Dict[str, int]) == (str, int)\n get_args(int) == ()\n get_args(Union[int, Union[T, int], str][int]) == (int, str)\n get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])\n get_args(Callable[[], T][int]) == ([], int)\n ```\n """\n if hasattr(typing, "get_args"): # python 3.8+\n _getter = getattr(typing, "get_args")\n res = _getter(type_)\n elif hasattr(typing.List, "_special"): # python 3.7\n if (\n isinstance(type_, GenericClass) and not type_._special # type: ignore\n ): # backport for python 3.8\n res = type_.__args__ # type: ignore\n if get_origin(type_) is collections.abc.Callable and res[0] is not Ellipsis:\n res = (list(res[:-1]), res[-1])\n else:\n res = ()\n else: # python 3.6\n if isinstance(type_, (GenericClass, UnionClass)): # backport for python 3.8\n res = type_.__args__ # type: ignore\n if get_origin(type_) is collections.abc.Callable and res[0] is not Ellipsis:\n res = (list(res[:-1]), res[-1])\n else:\n res = ()\n if _TypedDictMeta and isinstance(type_, _TypedDictMeta):\n return str, typing.Any\n return () if res is None else res\n\n\ndef eval_forward_ref(ref, forward_refs=None):\n """\n eval forward_refs in all cPython versions\n """\n localns = forward_refs or {}\n\n if hasattr(typing, "_eval_type"): # python3.8 & python 3.9\n _eval_type = getattr(typing, "_eval_type")\n return _eval_type(ref, globals(), localns)\n\n if hasattr(ref, "_eval_type"): # python3.6\n _eval_type = getattr(ref, "_eval_type")\n return _eval_type(globals(), localns)\n\n raise NotImplementedError()\n\n\nclass NormalizedType(typing.NamedTuple):\n """\n Normalized type, made it possible to compare, hash between types.\n """\n\n origin: Type\n args: typing.Union[tuple, frozenset] = tuple()\n\n def __eq__(self, other):\n if isinstance(other, NormalizedType):\n if self.origin != other.origin:\n return False\n if isinstance(self.args, frozenset) and isinstance(other.args, frozenset):\n return self.args <= other.args and other.args <= self.args\n return self.origin == other.origin and self.args == other.args\n if not self.args:\n return self.origin == other\n return False\n\n def __hash__(self) -> int:\n if not self.args:\n return hash(self.origin)\n return hash((self.origin, self.args))\n\n def __repr__(self):\n if not self.args:\n return f"{self.origin}"\n return f"{self.origin}[{self.args}])"\n\n\ndef _normalize_args(tps: TypeArgs):\n if isinstance(tps, str):\n return tps\n if isinstance(tps, collections.abc.Sequence):\n return tuple(_normalize_args(type_) for type_ in tps)\n if isinstance(tps, collections.abc.Set):\n return frozenset(_normalize_args(type_) for type_ in tps)\n return normalize(tps)\n\n\ndef normalize(type_: Type) -> NormalizedType:\n """\n convert types to NormalizedType instances.\n """\n args = get_args(type_)\n origin = get_origin(type_)\n if not origin:\n return NormalizedType(_normalize_aliases(type_))\n origin = _normalize_aliases(origin)\n\n if is_union(origin): # sort args when the origin is Union\n args = _normalize_args(frozenset(args))\n else:\n args = _normalize_args(args)\n return NormalizedType(origin, args)\n\n\ndef _is_origin_subtype(left: OriginType, right: OriginType) -> bool:\n if left is right:\n return True\n\n if (\n left is not None\n and left in STATIC_SUBTYPE_MAPPING\n and right == STATIC_SUBTYPE_MAPPING[left]\n ):\n return True\n\n if hasattr(left, "mro"):\n for parent in left.mro(): # type: ignore\n if parent == right:\n return True\n\n if isinstance(left, type) and isinstance(right, type):\n return issubclass(left, right)\n\n return left == right\n\n\nNormalizedTypeArgs = typing.Union[\n typing.Tuple[typing.Any, ...],\n typing.FrozenSet[NormalizedType],\n NormalizedType,\n]\n\n\ndef _is_origin_subtype_args(\n left: "NormalizedTypeArgs",\n right: "NormalizedTypeArgs",\n forward_refs: typing.Optional[typing.Mapping[str, type]],\n) -> typing.Optional[bool]:\n if isinstance(left, frozenset):\n if not isinstance(right, frozenset):\n return False\n\n excluded = left - right\n if not excluded:\n # Union[str, int] <> Union[int, str]\n return True\n\n # Union[list, int] <> Union[typing.Sequence, int]\n return all(\n any(_is_normal_subtype(e, r, forward_refs) for r in right) for e in excluded\n )\n\n if isinstance(left, collections.abc.Sequence) and not isinstance(\n left, NormalizedType\n ):\n if not isinstance(right, collections.abc.Sequence) or isinstance(\n right, NormalizedType\n ):\n return False\n\n if (\n left\n and left[-1].origin is not Ellipsis\n and right\n and right[-1].origin is Ellipsis\n ):\n # Tuple[type, type] <> Tuple[type, ...]\n return all(_is_origin_subtype_args(l, right[0], forward_refs) for l in left)\n\n if len(left) != len(right):\n return False\n\n return all(\n l is not None\n and r is not None\n and _is_origin_subtype_args(l, r, forward_refs)\n for l, r in itertools.zip_longest(left, right)\n )\n\n assert isinstance(left, NormalizedType)\n assert isinstance(right, NormalizedType)\n\n return _is_normal_subtype(left, right, forward_refs)\n\n\ndef _is_normal_subtype(\n left: NormalizedType,\n right: NormalizedType,\n forward_refs: typing.Optional[typing.Mapping[str, type]],\n) -> typing.Optional[bool]:\n if isinstance(left.origin, ForwardRef):\n left = normalize(eval_forward_ref(left.origin, forward_refs=forward_refs))\n\n if isinstance(right.origin, ForwardRef):\n right = normalize(eval_forward_ref(right.origin, forward_refs=forward_refs))\n\n # Any\n if right.origin is typing.Any:\n return True\n\n # Union\n if is_union(right.origin) and is_union(left.origin):\n return _is_origin_subtype_args(left.args, right.args, forward_refs)\n if is_union(right.origin):\n return optional_any(\n _is_normal_subtype(left, a, forward_refs) for a in right.args\n )\n if is_union(left.origin):\n return optional_all(\n _is_normal_subtype(a, right, forward_refs) for a in left.args\n )\n\n # Literal\n if right.origin is Literal:\n if left.origin is not Literal:\n return False\n return set(left.args).issubset(set(right.args))\n\n # TypeVar\n if isinstance(left.origin, typing.TypeVar) and isinstance(\n right.origin, typing.TypeVar\n ):\n if left.origin is right.origin:\n return True\n\n left_bound = getattr(left.origin, "__bound__", None)\n right_bound = getattr(right.origin, "__bound__", None)\n if right_bound is None or left_bound is None:\n return unknown\n return _is_normal_subtype(\n normalize(left_bound), normalize(right_bound), forward_refs\n )\n if isinstance(right.origin, typing.TypeVar):\n return unknown\n if isinstance(left.origin, typing.TypeVar):\n left_bound = getattr(left.origin, "__bound__", None)\n if left_bound is None:\n return unknown\n return _is_normal_subtype(normalize(left_bound), right, forward_refs)\n\n if not left.args and not right.args:\n return _is_origin_subtype(left.origin, right.origin)\n\n if not right.args:\n return _is_origin_subtype(left.origin, right.origin)\n\n if _is_origin_subtype(left.origin, right.origin):\n return _is_origin_subtype_args(left.args, right.args, forward_refs)\n\n return False\n\n\ndef issubtype(\n left: Type,\n right: Type,\n forward_refs: typing.Optional[dict] = None,\n) -> typing.Optional[bool]:\n """Check that the left argument is a subtype of the right.\n For unions, check if the type arguments of the left is a subset of the right.\n Also works for nested types including ForwardRefs.\n\n Examples:\n\n ```python\n from typing_utils import issubtype\n\n issubtype(typing.List, typing.Any) == True\n issubtype(list, list) == True\n issubtype(list, typing.List) == True\n issubtype(list, typing.Sequence) == True\n issubtype(typing.List[int], list) == True\n issubtype(typing.List[typing.List], list) == True\n issubtype(list, typing.List[int]) == False\n issubtype(list, typing.Union[typing.Tuple, typing.Set]) == False\n issubtype(typing.List[typing.List], typing.List[typing.Sequence]) == True\n JSON = typing.Union[\n int, float, bool, str, None, typing.Sequence["JSON"],\n typing.Mapping[str, "JSON"]\n ]\n issubtype(str, JSON, forward_refs={'JSON': JSON}) == True\n issubtype(typing.Dict[str, str], JSON, forward_refs={'JSON': JSON}) == True\n issubtype(typing.Dict[str, bytes], JSON, forward_refs={'JSON': JSON}) == False\n ```\n """\n return _is_normal_subtype(normalize(left), normalize(right), forward_refs)\n\n\n__all__ = [\n "issubtype",\n "get_origin",\n "get_args",\n "get_type_hints",\n]\n
.venv\Lib\site-packages\overrides\typing_utils.py
typing_utils.py
Python
14,240
0.95
0.214437
0.021108
vue-tools
903
2025-01-30T00:09:51.508055
GPL-3.0
false
cdc90c37a3ab8809705f619b4ed148ab
from overrides.enforce import EnforceOverrides\nimport sys\n\nif sys.version_info < (3, 11):\n from overrides.final import final\nelse:\n from typing import final\nfrom overrides.overrides import __VERSION__, overrides, override\n\n\n__all__ = [\n "__VERSION__",\n "override",\n "overrides",\n "final",\n "EnforceOverrides",\n]\n
.venv\Lib\site-packages\overrides\__init__.py
__init__.py
Python
333
0.85
0.058824
0
react-lib
714
2024-04-30T18:30:16.459112
BSD-3-Clause
false
ac350d727609bee5e6063b520137dee8
\n\n
.venv\Lib\site-packages\overrides\__pycache__\enforce.cpython-313.pyc
enforce.cpython-313.pyc
Other
3,544
0.95
0.111111
0
react-lib
750
2024-06-26T08:46:17.122753
MIT
false
b1fa5eeaf6a07dc2b78ce3d3799bde5a
\n\n
.venv\Lib\site-packages\overrides\__pycache__\final.cpython-313.pyc
final.cpython-313.pyc
Other
1,171
0.95
0.363636
0
node-utils
702
2023-11-02T11:13:41.831206
BSD-3-Clause
false
211f7e20abd97f8c6090209ce55f3893
\n\n
.venv\Lib\site-packages\overrides\__pycache__\overrides.cpython-313.pyc
overrides.cpython-313.pyc
Other
7,991
0.95
0.166667
0
python-kit
687
2024-01-26T22:21:51.166491
MIT
false
dd6065caf646fb9c8ce4ec53ea4b01c5
\n\n
.venv\Lib\site-packages\overrides\__pycache__\signature.cpython-313.pyc
signature.cpython-313.pyc
Other
15,088
0.95
0.021127
0.023256
node-utils
518
2024-12-17T12:50:13.304790
BSD-3-Clause
false
f06b1cc52a21d0f31bb47ca176b87c1a
\n\n
.venv\Lib\site-packages\overrides\__pycache__\typing_utils.cpython-313.pyc
typing_utils.cpython-313.pyc
Other
20,771
0.95
0.011765
0.012876
node-utils
752
2024-11-06T17:19:57.347213
Apache-2.0
false
0872e1e1c359c3de84bd0adcaf42ade5
\n\n
.venv\Lib\site-packages\overrides\__pycache__\__init__.cpython-313.pyc
__init__.cpython-313.pyc
Other
553
0.7
0
0
python-kit
3
2023-10-15T10:30:40.233755
MIT
false
f7582eb2651d37e7770d08412f0ab950
pip\n
.venv\Lib\site-packages\overrides-7.7.0.dist-info\INSTALLER
INSTALLER
Other
4
0.5
0
0
node-utils
956
2024-02-28T05:31:45.380361
BSD-3-Clause
false
365c9bfeb7d89244f2ce01c1de44cb85
Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n "License" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n "Licensor" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n "Legal Entity" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n "control" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n "You" (or "Your") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n "Source" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n "Object" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n "Work" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n "Derivative Works" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n "Contribution" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, "submitted"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as "Not a Contribution."\n\n "Contributor" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a "NOTICE" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an "AS IS" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets "{}"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same "printed page" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright {yyyy} {name of copyright owner}\n\n Licensed under the Apache License, Version 2.0 (the "License");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an "AS IS" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n\n
.venv\Lib\site-packages\overrides-7.7.0.dist-info\LICENSE
LICENSE
Other
11,358
0.95
0.118812
0
vue-tools
85
2024-09-02T09:44:39.066622
Apache-2.0
false
fa818a259cbed7ce8bc2a22d35a464fc
Metadata-Version: 2.1\nName: overrides\nVersion: 7.7.0\nSummary: A decorator to automatically detect mismatch when overriding a method.\nHome-page: https://github.com/mkorpela/overrides\nAuthor: Mikko Korpela\nAuthor-email: mikko.korpela@gmail.com\nLicense: Apache License, Version 2.0\nKeywords: override,inheritence,OOP\nClassifier: Intended Audience :: Developers\nClassifier: Programming Language :: Python :: 3.6\nClassifier: Programming Language :: Python :: 3.7\nClassifier: Programming Language :: Python :: 3.8\nClassifier: Programming Language :: Python :: 3.9\nRequires-Python: >=3.6\nLicense-File: LICENSE\nRequires-Dist: typing ; python_version < "3.5"\n\noverrides\n=========\n\n.. image:: https://img.shields.io/pypi/v/overrides.svg\n :target: https://pypi.python.org/pypi/overrides\n\n.. image:: http://pepy.tech/badge/overrides\n :target: http://pepy.tech/project/overrides\n\nA decorator ``@override`` that verifies that a method that should override an inherited method actually does it.\n\nCopies the docstring of the inherited method to the overridden method.\n\nSince signature validation and docstring inheritance are performed on class creation and not on class instantiation,\nthis library significantly improves the safety and experience of creating class hierarchies in \nPython without significantly impacting performance. See https://stackoverflow.com/q/1167617 for the\ninitial inspiration for this library.\n\nMotivation\n----------\n\nPython has no standard mechanism by which to guarantee that (1) a method that previously overrode an inherited method\ncontinues to do so, and (2) a method that previously did not override an inherited will not override now.\nThis opens the door for subtle problems as class hierarchies evolve over time. For example,\n\n1. A method that is added to a superclass is shadowed by an existing method with the same name in a \n subclass.\n\n2. A method of a superclass that is overridden by a subclass is renamed in the superclass but not in \n the subclass.\n\n3. A method of a superclass that is overridden by a subclass is removed in the superclass but not in\n the subclass.\n\n4. A method of a superclass that is overridden by a subclass but the signature of the overridden\n method is incompatible with that of the inherited one.\n\nThese can be only checked by explicitly marking method override in the code.\n\nPython also has no standard mechanism by which to inherit docstrings in overridden methods. Because \nmost standard linters (e.g., flake8) have rules that require all public methods to have a docstring, \nthis inevitably leads to a proliferation of ``See parent class for usage`` docstrings on overridden\nmethods, or, worse, to a disabling of these rules altogether. In addition, mediocre or missing\ndocstrings degrade the quality of tooltips and completions that can be provided by an editor.\n\nInstallation\n------------\n\nCompatible with Python 3.6+.\n\n.. code-block:: bash\n\n $ pip install overrides\n\nUsage\n-----\n\nUse ``@override`` to indicate that a subclass method should override a superclass method.\n\n.. code-block:: python\n\n from overrides import override\n\n class SuperClass:\n\n def foo(self):\n """This docstring will be inherited by any method that overrides this!"""\n return 1\n\n def bar(self, x) -> str:\n return x\n\n class SubClass(SuperClass):\n\n @override\n def foo(self):\n return 2\n\n @override\n def bar(self, y) -> int: # Raises, because the signature is not compatible.\n return y\n \n @override\n def zoo(self): # Raises, because does not exist in the super class.\n return "foobarzoo"\n\nUse ``EnforceOverrides`` to require subclass methods that shadow superclass methods to be decorated \nwith ``@override``.\n\n.. code-block:: python\n \n from overrides import EnforceOverrides\n\n class SuperClass(EnforceOverrides):\n\n def foo(self):\n return 1\n\n class SubClass(SuperClass):\n\n def foo(self): # Raises, because @override is missing.\n return 2\n\nUse ``@final`` to indicate that a superclass method cannot be overriden.\nWith Python 3.11 and above ``@final`` is directly `typing.final <https://docs.python.org/3.11/library/typing.html#typing.final>`_.\n\n.. code-block:: python\n\n from overrides import EnforceOverrides, final, override\n\n class SuperClass(EnforceOverrides):\n\n @final\n def foo(self):\n return 1\n\n class SubClass(SuperClass):\n\n @override\n def foo(self): # Raises, because overriding a final method is forbidden.\n return 2\n\nNote that ``@classmethod`` and ``@staticmethod`` must be declared before ``@override``.\n\n.. code-block:: python\n\n from overrides import override\n\n class SuperClass:\n\n @staticmethod\n def foo(x):\n return 1\n\n class SubClass(SuperClass):\n\n @staticmethod\n @override\n def foo(x):\n return 2\n\n\nFlags of control\n----------------\n\n.. code-block:: python\n\n # To prevent all signature checks do:\n @override(check_signature=False)\n def some_method(self, now_this_can_be_funny_and_wrong: str, what_ever: int) -> "Dictirux":\n pass\n\n # To do the check only at runtime and solve some forward reference problems\n @override(check_at_runtime=True)\n def some_other_method(self, ..) -> "SomethingDefinedLater":\n pass\n\n a.some_other_method() # Kaboom if not SomethingDefinedLater\n\n\nContributors\n------------\n\nThis project exists only through the work of all the people who contribute.\n\nmkorpela, drorasaf, ngoodman90, TylerYep, leeopop, donpatrice, jayvdb, joelgrus, lisyarus, \nsoulmerge, rkr-at-dbx, ashwin153, brentyi, jobh, tjsmart, bersbersbers, LysanderGG, mgorny.\n
.venv\Lib\site-packages\overrides-7.7.0.dist-info\METADATA
METADATA
Other
5,756
0.95
0.172973
0.016
python-kit
488
2023-10-31T21:30:41.856364
GPL-3.0
false
a97bb57b6a096e827d5db0d21d9fb276
overrides-7.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4\noverrides-7.7.0.dist-info/LICENSE,sha256=xllut76FgcGL5zbIRvuRc7aezPbvlMUTWJPsVr2Sugg,11358\noverrides-7.7.0.dist-info/METADATA,sha256=YdaLvNew0ovZ-LgYzW8fzE5DfdbFSuYDrU2z-YbCgkU,5756\noverrides-7.7.0.dist-info/RECORD,,\noverrides-7.7.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92\noverrides-7.7.0.dist-info/top_level.txt,sha256=06v3aviU3stRPzoenrJ2K_Jz3Ma3R9FQYE-54eTOXYk,10\noverrides/__init__.py,sha256=hSeMqMXIkf6zYQE8P39rVmVbIq5P-GRvqyPWesO0C1A,333\noverrides/__pycache__/__init__.cpython-313.pyc,,\noverrides/__pycache__/enforce.cpython-313.pyc,,\noverrides/__pycache__/final.cpython-313.pyc,,\noverrides/__pycache__/overrides.cpython-313.pyc,,\noverrides/__pycache__/signature.cpython-313.pyc,,\noverrides/__pycache__/typing_utils.cpython-313.pyc,,\noverrides/enforce.py,sha256=hzTmo7VnKdFvQKZ0ULOL9rF-uPEGmYB0Gw9mrktnO0M,2349\noverrides/final.py,sha256=ykK1eXPKtR-SujZOVncSEOtL6eUaJ_nultMhZp3OsO0,1511\noverrides/overrides.py,sha256=Bmt7N8NGZqzNUPbZ1l81Mw75aMPV12O1zZ9JdIUuesc,7505\noverrides/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0\noverrides/signature.py,sha256=vfGiFm6ZkUxLg6QYLaMeHulNILc2O5xtzxzR1r424Ag,11785\noverrides/typing_utils.py,sha256=CPrnitNCiysLs484AYRGfM-q2Mfj6Uz32XsLAage2aI,14240\n
.venv\Lib\site-packages\overrides-7.7.0.dist-info\RECORD
RECORD
Other
1,344
0.7
0
0
react-lib
976
2024-07-29T07:37:14.030033
MIT
false
9c6694aa1d6b9fa7c1bf9b570245dabd
overrides\n
.venv\Lib\site-packages\overrides-7.7.0.dist-info\top_level.txt
top_level.txt
Other
10
0.5
0
0
python-kit
546
2024-01-22T14:21:24.217806
BSD-3-Clause
false
45da8d03b558d27851b80e9a5d20bee5
Wheel-Version: 1.0\nGenerator: bdist_wheel (0.42.0)\nRoot-Is-Purelib: true\nTag: py3-none-any\n\n
.venv\Lib\site-packages\overrides-7.7.0.dist-info\WHEEL
WHEEL
Other
92
0.5
0
0
python-kit
234
2023-12-08T22:29:59.545197
BSD-3-Clause
false
a227bf38fb17005b3bdb56ccc428b1bb
# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n\nfrom __future__ import annotations\n\nimport operator\nimport os\nimport platform\nimport sys\nfrom typing import AbstractSet, Any, Callable, Literal, TypedDict, Union, cast\n\nfrom ._parser import MarkerAtom, MarkerList, Op, Value, Variable\nfrom ._parser import parse_marker as _parse_marker\nfrom ._tokenizer import ParserSyntaxError\nfrom .specifiers import InvalidSpecifier, Specifier\nfrom .utils import canonicalize_name\n\n__all__ = [\n "EvaluateContext",\n "InvalidMarker",\n "Marker",\n "UndefinedComparison",\n "UndefinedEnvironmentName",\n "default_environment",\n]\n\nOperator = Callable[[str, Union[str, AbstractSet[str]]], bool]\nEvaluateContext = Literal["metadata", "lock_file", "requirement"]\nMARKERS_ALLOWING_SET = {"extras", "dependency_groups"}\n\n\nclass InvalidMarker(ValueError):\n """\n An invalid marker was found, users should refer to PEP 508.\n """\n\n\nclass UndefinedComparison(ValueError):\n """\n An invalid operation was attempted on a value that doesn't support it.\n """\n\n\nclass UndefinedEnvironmentName(ValueError):\n """\n A name was attempted to be used that does not exist inside of the\n environment.\n """\n\n\nclass Environment(TypedDict):\n implementation_name: str\n """The implementation's identifier, e.g. ``'cpython'``."""\n\n implementation_version: str\n """\n The implementation's version, e.g. ``'3.13.0a2'`` for CPython 3.13.0a2, or\n ``'7.3.13'`` for PyPy3.10 v7.3.13.\n """\n\n os_name: str\n """\n The value of :py:data:`os.name`. The name of the operating system dependent module\n imported, e.g. ``'posix'``.\n """\n\n platform_machine: str\n """\n Returns the machine type, e.g. ``'i386'``.\n\n An empty string if the value cannot be determined.\n """\n\n platform_release: str\n """\n The system's release, e.g. ``'2.2.0'`` or ``'NT'``.\n\n An empty string if the value cannot be determined.\n """\n\n platform_system: str\n """\n The system/OS name, e.g. ``'Linux'``, ``'Windows'`` or ``'Java'``.\n\n An empty string if the value cannot be determined.\n """\n\n platform_version: str\n """\n The system's release version, e.g. ``'#3 on degas'``.\n\n An empty string if the value cannot be determined.\n """\n\n python_full_version: str\n """\n The Python version as string ``'major.minor.patchlevel'``.\n\n Note that unlike the Python :py:data:`sys.version`, this value will always include\n the patchlevel (it defaults to 0).\n """\n\n platform_python_implementation: str\n """\n A string identifying the Python implementation, e.g. ``'CPython'``.\n """\n\n python_version: str\n """The Python version as string ``'major.minor'``."""\n\n sys_platform: str\n """\n This string contains a platform identifier that can be used to append\n platform-specific components to :py:data:`sys.path`, for instance.\n\n For Unix systems, except on Linux and AIX, this is the lowercased OS name as\n returned by ``uname -s`` with the first part of the version as returned by\n ``uname -r`` appended, e.g. ``'sunos5'`` or ``'freebsd8'``, at the time when Python\n was built.\n """\n\n\ndef _normalize_extra_values(results: Any) -> Any:\n """\n Normalize extra values.\n """\n if isinstance(results[0], tuple):\n lhs, op, rhs = results[0]\n if isinstance(lhs, Variable) and lhs.value == "extra":\n normalized_extra = canonicalize_name(rhs.value)\n rhs = Value(normalized_extra)\n elif isinstance(rhs, Variable) and rhs.value == "extra":\n normalized_extra = canonicalize_name(lhs.value)\n lhs = Value(normalized_extra)\n results[0] = lhs, op, rhs\n return results\n\n\ndef _format_marker(\n marker: list[str] | MarkerAtom | str, first: bool | None = True\n) -> str:\n assert isinstance(marker, (list, tuple, str))\n\n # Sometimes we have a structure like [[...]] which is a single item list\n # where the single item is itself it's own list. In that case we want skip\n # the rest of this function so that we don't get extraneous () on the\n # outside.\n if (\n isinstance(marker, list)\n and len(marker) == 1\n and isinstance(marker[0], (list, tuple))\n ):\n return _format_marker(marker[0])\n\n if isinstance(marker, list):\n inner = (_format_marker(m, first=False) for m in marker)\n if first:\n return " ".join(inner)\n else:\n return "(" + " ".join(inner) + ")"\n elif isinstance(marker, tuple):\n return " ".join([m.serialize() for m in marker])\n else:\n return marker\n\n\n_operators: dict[str, Operator] = {\n "in": lambda lhs, rhs: lhs in rhs,\n "not in": lambda lhs, rhs: lhs not in rhs,\n "<": operator.lt,\n "<=": operator.le,\n "==": operator.eq,\n "!=": operator.ne,\n ">=": operator.ge,\n ">": operator.gt,\n}\n\n\ndef _eval_op(lhs: str, op: Op, rhs: str | AbstractSet[str]) -> bool:\n if isinstance(rhs, str):\n try:\n spec = Specifier("".join([op.serialize(), rhs]))\n except InvalidSpecifier:\n pass\n else:\n return spec.contains(lhs, prereleases=True)\n\n oper: Operator | None = _operators.get(op.serialize())\n if oper is None:\n raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")\n\n return oper(lhs, rhs)\n\n\ndef _normalize(\n lhs: str, rhs: str | AbstractSet[str], key: str\n) -> tuple[str, str | AbstractSet[str]]:\n # PEP 685 – Comparison of extra names for optional distribution dependencies\n # https://peps.python.org/pep-0685/\n # > When comparing extra names, tools MUST normalize the names being\n # > compared using the semantics outlined in PEP 503 for names\n if key == "extra":\n assert isinstance(rhs, str), "extra value must be a string"\n return (canonicalize_name(lhs), canonicalize_name(rhs))\n if key in MARKERS_ALLOWING_SET:\n if isinstance(rhs, str): # pragma: no cover\n return (canonicalize_name(lhs), canonicalize_name(rhs))\n else:\n return (canonicalize_name(lhs), {canonicalize_name(v) for v in rhs})\n\n # other environment markers don't have such standards\n return lhs, rhs\n\n\ndef _evaluate_markers(\n markers: MarkerList, environment: dict[str, str | AbstractSet[str]]\n) -> bool:\n groups: list[list[bool]] = [[]]\n\n for marker in markers:\n assert isinstance(marker, (list, tuple, str))\n\n if isinstance(marker, list):\n groups[-1].append(_evaluate_markers(marker, environment))\n elif isinstance(marker, tuple):\n lhs, op, rhs = marker\n\n if isinstance(lhs, Variable):\n environment_key = lhs.value\n lhs_value = environment[environment_key]\n rhs_value = rhs.value\n else:\n lhs_value = lhs.value\n environment_key = rhs.value\n rhs_value = environment[environment_key]\n assert isinstance(lhs_value, str), "lhs must be a string"\n lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)\n groups[-1].append(_eval_op(lhs_value, op, rhs_value))\n else:\n assert marker in ["and", "or"]\n if marker == "or":\n groups.append([])\n\n return any(all(item) for item in groups)\n\n\ndef format_full_version(info: sys._version_info) -> str:\n version = f"{info.major}.{info.minor}.{info.micro}"\n kind = info.releaselevel\n if kind != "final":\n version += kind[0] + str(info.serial)\n return version\n\n\ndef default_environment() -> Environment:\n iver = format_full_version(sys.implementation.version)\n implementation_name = sys.implementation.name\n return {\n "implementation_name": implementation_name,\n "implementation_version": iver,\n "os_name": os.name,\n "platform_machine": platform.machine(),\n "platform_release": platform.release(),\n "platform_system": platform.system(),\n "platform_version": platform.version(),\n "python_full_version": platform.python_version(),\n "platform_python_implementation": platform.python_implementation(),\n "python_version": ".".join(platform.python_version_tuple()[:2]),\n "sys_platform": sys.platform,\n }\n\n\nclass Marker:\n def __init__(self, marker: str) -> None:\n # Note: We create a Marker object without calling this constructor in\n # packaging.requirements.Requirement. If any additional logic is\n # added here, make sure to mirror/adapt Requirement.\n try:\n self._markers = _normalize_extra_values(_parse_marker(marker))\n # The attribute `_markers` can be described in terms of a recursive type:\n # MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]\n #\n # For example, the following expression:\n # python_version > "3.6" or (python_version == "3.6" and os_name == "unix")\n #\n # is parsed into:\n # [\n # (<Variable('python_version')>, <Op('>')>, <Value('3.6')>),\n # 'and',\n # [\n # (<Variable('python_version')>, <Op('==')>, <Value('3.6')>),\n # 'or',\n # (<Variable('os_name')>, <Op('==')>, <Value('unix')>)\n # ]\n # ]\n except ParserSyntaxError as e:\n raise InvalidMarker(str(e)) from e\n\n def __str__(self) -> str:\n return _format_marker(self._markers)\n\n def __repr__(self) -> str:\n return f"<Marker('{self}')>"\n\n def __hash__(self) -> int:\n return hash((self.__class__.__name__, str(self)))\n\n def __eq__(self, other: Any) -> bool:\n if not isinstance(other, Marker):\n return NotImplemented\n\n return str(self) == str(other)\n\n def evaluate(\n self,\n environment: dict[str, str] | None = None,\n context: EvaluateContext = "metadata",\n ) -> bool:\n """Evaluate a marker.\n\n Return the boolean from evaluating the given marker against the\n environment. environment is an optional argument to override all or\n part of the determined environment. The *context* parameter specifies what\n context the markers are being evaluated for, which influences what markers\n are considered valid. Acceptable values are "metadata" (for core metadata;\n default), "lock_file", and "requirement" (i.e. all other situations).\n\n The environment is determined from the current Python process.\n """\n current_environment = cast(\n "dict[str, str | AbstractSet[str]]", default_environment()\n )\n if context == "lock_file":\n current_environment.update(\n extras=frozenset(), dependency_groups=frozenset()\n )\n elif context == "metadata":\n current_environment["extra"] = ""\n if environment is not None:\n current_environment.update(environment)\n # The API used to allow setting extra to None. We need to handle this\n # case for backwards compatibility.\n if "extra" in current_environment and current_environment["extra"] is None:\n current_environment["extra"] = ""\n\n return _evaluate_markers(\n self._markers, _repair_python_full_version(current_environment)\n )\n\n\ndef _repair_python_full_version(\n env: dict[str, str | AbstractSet[str]],\n) -> dict[str, str | AbstractSet[str]]:\n """\n Work around platform.python_version() returning something that is not PEP 440\n compliant for non-tagged Python builds.\n """\n python_full_version = cast(str, env["python_full_version"])\n if python_full_version.endswith("+"):\n env["python_full_version"] = f"{python_full_version}local"\n return env\n
.venv\Lib\site-packages\packaging\markers.py
markers.py
Python
12,049
0.95
0.165746
0.111864
awesome-app
303
2023-09-16T20:38:29.576269
BSD-3-Clause
false
1be7f129d91388653f23a0fc7414bbfe
from __future__ import annotations\n\nimport email.feedparser\nimport email.header\nimport email.message\nimport email.parser\nimport email.policy\nimport pathlib\nimport sys\nimport typing\nfrom typing import (\n Any,\n Callable,\n Generic,\n Literal,\n TypedDict,\n cast,\n)\n\nfrom . import licenses, requirements, specifiers, utils\nfrom . import version as version_module\nfrom .licenses import NormalizedLicenseExpression\n\nT = typing.TypeVar("T")\n\n\nif sys.version_info >= (3, 11): # pragma: no cover\n ExceptionGroup = ExceptionGroup\nelse: # pragma: no cover\n\n class ExceptionGroup(Exception):\n """A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11.\n\n If :external:exc:`ExceptionGroup` is already defined by Python itself,\n that version is used instead.\n """\n\n message: str\n exceptions: list[Exception]\n\n def __init__(self, message: str, exceptions: list[Exception]) -> None:\n self.message = message\n self.exceptions = exceptions\n\n def __repr__(self) -> str:\n return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})"\n\n\nclass InvalidMetadata(ValueError):\n """A metadata field contains invalid data."""\n\n field: str\n """The name of the field that contains invalid data."""\n\n def __init__(self, field: str, message: str) -> None:\n self.field = field\n super().__init__(message)\n\n\n# The RawMetadata class attempts to make as few assumptions about the underlying\n# serialization formats as possible. The idea is that as long as a serialization\n# formats offer some very basic primitives in *some* way then we can support\n# serializing to and from that format.\nclass RawMetadata(TypedDict, total=False):\n """A dictionary of raw core metadata.\n\n Each field in core metadata maps to a key of this dictionary (when data is\n provided). The key is lower-case and underscores are used instead of dashes\n compared to the equivalent core metadata field. Any core metadata field that\n can be specified multiple times or can hold multiple values in a single\n field have a key with a plural name. See :class:`Metadata` whose attributes\n match the keys of this dictionary.\n\n Core metadata fields that can be specified multiple times are stored as a\n list or dict depending on which is appropriate for the field. Any fields\n which hold multiple values in a single field are stored as a list.\n\n """\n\n # Metadata 1.0 - PEP 241\n metadata_version: str\n name: str\n version: str\n platforms: list[str]\n summary: str\n description: str\n keywords: list[str]\n home_page: str\n author: str\n author_email: str\n license: str\n\n # Metadata 1.1 - PEP 314\n supported_platforms: list[str]\n download_url: str\n classifiers: list[str]\n requires: list[str]\n provides: list[str]\n obsoletes: list[str]\n\n # Metadata 1.2 - PEP 345\n maintainer: str\n maintainer_email: str\n requires_dist: list[str]\n provides_dist: list[str]\n obsoletes_dist: list[str]\n requires_python: str\n requires_external: list[str]\n project_urls: dict[str, str]\n\n # Metadata 2.0\n # PEP 426 attempted to completely revamp the metadata format\n # but got stuck without ever being able to build consensus on\n # it and ultimately ended up withdrawn.\n #\n # However, a number of tools had started emitting METADATA with\n # `2.0` Metadata-Version, so for historical reasons, this version\n # was skipped.\n\n # Metadata 2.1 - PEP 566\n description_content_type: str\n provides_extra: list[str]\n\n # Metadata 2.2 - PEP 643\n dynamic: list[str]\n\n # Metadata 2.3 - PEP 685\n # No new fields were added in PEP 685, just some edge case were\n # tightened up to provide better interoptability.\n\n # Metadata 2.4 - PEP 639\n license_expression: str\n license_files: list[str]\n\n\n_STRING_FIELDS = {\n "author",\n "author_email",\n "description",\n "description_content_type",\n "download_url",\n "home_page",\n "license",\n "license_expression",\n "maintainer",\n "maintainer_email",\n "metadata_version",\n "name",\n "requires_python",\n "summary",\n "version",\n}\n\n_LIST_FIELDS = {\n "classifiers",\n "dynamic",\n "license_files",\n "obsoletes",\n "obsoletes_dist",\n "platforms",\n "provides",\n "provides_dist",\n "provides_extra",\n "requires",\n "requires_dist",\n "requires_external",\n "supported_platforms",\n}\n\n_DICT_FIELDS = {\n "project_urls",\n}\n\n\ndef _parse_keywords(data: str) -> list[str]:\n """Split a string of comma-separated keywords into a list of keywords."""\n return [k.strip() for k in data.split(",")]\n\n\ndef _parse_project_urls(data: list[str]) -> dict[str, str]:\n """Parse a list of label/URL string pairings separated by a comma."""\n urls = {}\n for pair in data:\n # Our logic is slightly tricky here as we want to try and do\n # *something* reasonable with malformed data.\n #\n # The main thing that we have to worry about, is data that does\n # not have a ',' at all to split the label from the Value. There\n # isn't a singular right answer here, and we will fail validation\n # later on (if the caller is validating) so it doesn't *really*\n # matter, but since the missing value has to be an empty str\n # and our return value is dict[str, str], if we let the key\n # be the missing value, then they'd have multiple '' values that\n # overwrite each other in a accumulating dict.\n #\n # The other potentional issue is that it's possible to have the\n # same label multiple times in the metadata, with no solid "right"\n # answer with what to do in that case. As such, we'll do the only\n # thing we can, which is treat the field as unparseable and add it\n # to our list of unparsed fields.\n parts = [p.strip() for p in pair.split(",", 1)]\n parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items\n\n # TODO: The spec doesn't say anything about if the keys should be\n # considered case sensitive or not... logically they should\n # be case-preserving and case-insensitive, but doing that\n # would open up more cases where we might have duplicate\n # entries.\n label, url = parts\n if label in urls:\n # The label already exists in our set of urls, so this field\n # is unparseable, and we can just add the whole thing to our\n # unparseable data and stop processing it.\n raise KeyError("duplicate labels in project urls")\n urls[label] = url\n\n return urls\n\n\ndef _get_payload(msg: email.message.Message, source: bytes | str) -> str:\n """Get the body of the message."""\n # If our source is a str, then our caller has managed encodings for us,\n # and we don't need to deal with it.\n if isinstance(source, str):\n payload = msg.get_payload()\n assert isinstance(payload, str)\n return payload\n # If our source is a bytes, then we're managing the encoding and we need\n # to deal with it.\n else:\n bpayload = msg.get_payload(decode=True)\n assert isinstance(bpayload, bytes)\n try:\n return bpayload.decode("utf8", "strict")\n except UnicodeDecodeError as exc:\n raise ValueError("payload in an invalid encoding") from exc\n\n\n# The various parse_FORMAT functions here are intended to be as lenient as\n# possible in their parsing, while still returning a correctly typed\n# RawMetadata.\n#\n# To aid in this, we also generally want to do as little touching of the\n# data as possible, except where there are possibly some historic holdovers\n# that make valid data awkward to work with.\n#\n# While this is a lower level, intermediate format than our ``Metadata``\n# class, some light touch ups can make a massive difference in usability.\n\n# Map METADATA fields to RawMetadata.\n_EMAIL_TO_RAW_MAPPING = {\n "author": "author",\n "author-email": "author_email",\n "classifier": "classifiers",\n "description": "description",\n "description-content-type": "description_content_type",\n "download-url": "download_url",\n "dynamic": "dynamic",\n "home-page": "home_page",\n "keywords": "keywords",\n "license": "license",\n "license-expression": "license_expression",\n "license-file": "license_files",\n "maintainer": "maintainer",\n "maintainer-email": "maintainer_email",\n "metadata-version": "metadata_version",\n "name": "name",\n "obsoletes": "obsoletes",\n "obsoletes-dist": "obsoletes_dist",\n "platform": "platforms",\n "project-url": "project_urls",\n "provides": "provides",\n "provides-dist": "provides_dist",\n "provides-extra": "provides_extra",\n "requires": "requires",\n "requires-dist": "requires_dist",\n "requires-external": "requires_external",\n "requires-python": "requires_python",\n "summary": "summary",\n "supported-platform": "supported_platforms",\n "version": "version",\n}\n_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()}\n\n\ndef parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]:\n """Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``).\n\n This function returns a two-item tuple of dicts. The first dict is of\n recognized fields from the core metadata specification. Fields that can be\n parsed and translated into Python's built-in types are converted\n appropriately. All other fields are left as-is. Fields that are allowed to\n appear multiple times are stored as lists.\n\n The second dict contains all other fields from the metadata. This includes\n any unrecognized fields. It also includes any fields which are expected to\n be parsed into a built-in type but were not formatted appropriately. Finally,\n any fields that are expected to appear only once but are repeated are\n included in this dict.\n\n """\n raw: dict[str, str | list[str] | dict[str, str]] = {}\n unparsed: dict[str, list[str]] = {}\n\n if isinstance(data, str):\n parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data)\n else:\n parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data)\n\n # We have to wrap parsed.keys() in a set, because in the case of multiple\n # values for a key (a list), the key will appear multiple times in the\n # list of keys, but we're avoiding that by using get_all().\n for name in frozenset(parsed.keys()):\n # Header names in RFC are case insensitive, so we'll normalize to all\n # lower case to make comparisons easier.\n name = name.lower()\n\n # We use get_all() here, even for fields that aren't multiple use,\n # because otherwise someone could have e.g. two Name fields, and we\n # would just silently ignore it rather than doing something about it.\n headers = parsed.get_all(name) or []\n\n # The way the email module works when parsing bytes is that it\n # unconditionally decodes the bytes as ascii using the surrogateescape\n # handler. When you pull that data back out (such as with get_all() ),\n # it looks to see if the str has any surrogate escapes, and if it does\n # it wraps it in a Header object instead of returning the string.\n #\n # As such, we'll look for those Header objects, and fix up the encoding.\n value = []\n # Flag if we have run into any issues processing the headers, thus\n # signalling that the data belongs in 'unparsed'.\n valid_encoding = True\n for h in headers:\n # It's unclear if this can return more types than just a Header or\n # a str, so we'll just assert here to make sure.\n assert isinstance(h, (email.header.Header, str))\n\n # If it's a header object, we need to do our little dance to get\n # the real data out of it. In cases where there is invalid data\n # we're going to end up with mojibake, but there's no obvious, good\n # way around that without reimplementing parts of the Header object\n # ourselves.\n #\n # That should be fine since, if mojibacked happens, this key is\n # going into the unparsed dict anyways.\n if isinstance(h, email.header.Header):\n # The Header object stores it's data as chunks, and each chunk\n # can be independently encoded, so we'll need to check each\n # of them.\n chunks: list[tuple[bytes, str | None]] = []\n for bin, encoding in email.header.decode_header(h):\n try:\n bin.decode("utf8", "strict")\n except UnicodeDecodeError:\n # Enable mojibake.\n encoding = "latin1"\n valid_encoding = False\n else:\n encoding = "utf8"\n chunks.append((bin, encoding))\n\n # Turn our chunks back into a Header object, then let that\n # Header object do the right thing to turn them into a\n # string for us.\n value.append(str(email.header.make_header(chunks)))\n # This is already a string, so just add it.\n else:\n value.append(h)\n\n # We've processed all of our values to get them into a list of str,\n # but we may have mojibake data, in which case this is an unparsed\n # field.\n if not valid_encoding:\n unparsed[name] = value\n continue\n\n raw_name = _EMAIL_TO_RAW_MAPPING.get(name)\n if raw_name is None:\n # This is a bit of a weird situation, we've encountered a key that\n # we don't know what it means, so we don't know whether it's meant\n # to be a list or not.\n #\n # Since we can't really tell one way or another, we'll just leave it\n # as a list, even though it may be a single item list, because that's\n # what makes the most sense for email headers.\n unparsed[name] = value\n continue\n\n # If this is one of our string fields, then we'll check to see if our\n # value is a list of a single item. If it is then we'll assume that\n # it was emitted as a single string, and unwrap the str from inside\n # the list.\n #\n # If it's any other kind of data, then we haven't the faintest clue\n # what we should parse it as, and we have to just add it to our list\n # of unparsed stuff.\n if raw_name in _STRING_FIELDS and len(value) == 1:\n raw[raw_name] = value[0]\n # If this is one of our list of string fields, then we can just assign\n # the value, since email *only* has strings, and our get_all() call\n # above ensures that this is a list.\n elif raw_name in _LIST_FIELDS:\n raw[raw_name] = value\n # Special Case: Keywords\n # The keywords field is implemented in the metadata spec as a str,\n # but it conceptually is a list of strings, and is serialized using\n # ", ".join(keywords), so we'll do some light data massaging to turn\n # this into what it logically is.\n elif raw_name == "keywords" and len(value) == 1:\n raw[raw_name] = _parse_keywords(value[0])\n # Special Case: Project-URL\n # The project urls is implemented in the metadata spec as a list of\n # specially-formatted strings that represent a key and a value, which\n # is fundamentally a mapping, however the email format doesn't support\n # mappings in a sane way, so it was crammed into a list of strings\n # instead.\n #\n # We will do a little light data massaging to turn this into a map as\n # it logically should be.\n elif raw_name == "project_urls":\n try:\n raw[raw_name] = _parse_project_urls(value)\n except KeyError:\n unparsed[name] = value\n # Nothing that we've done has managed to parse this, so it'll just\n # throw it in our unparseable data and move on.\n else:\n unparsed[name] = value\n\n # We need to support getting the Description from the message payload in\n # addition to getting it from the the headers. This does mean, though, there\n # is the possibility of it being set both ways, in which case we put both\n # in 'unparsed' since we don't know which is right.\n try:\n payload = _get_payload(parsed, data)\n except ValueError:\n unparsed.setdefault("description", []).append(\n parsed.get_payload(decode=isinstance(data, bytes)) # type: ignore[call-overload]\n )\n else:\n if payload:\n # Check to see if we've already got a description, if so then both\n # it, and this body move to unparseable.\n if "description" in raw:\n description_header = cast(str, raw.pop("description"))\n unparsed.setdefault("description", []).extend(\n [description_header, payload]\n )\n elif "description" in unparsed:\n unparsed["description"].append(payload)\n else:\n raw["description"] = payload\n\n # We need to cast our `raw` to a metadata, because a TypedDict only support\n # literal key names, but we're computing our key names on purpose, but the\n # way this function is implemented, our `TypedDict` can only have valid key\n # names.\n return cast(RawMetadata, raw), unparsed\n\n\n_NOT_FOUND = object()\n\n\n# Keep the two values in sync.\n_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4"]\n_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4"]\n\n_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"])\n\n\nclass _Validator(Generic[T]):\n """Validate a metadata field.\n\n All _process_*() methods correspond to a core metadata field. The method is\n called with the field's raw value. If the raw value is valid it is returned\n in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field).\n If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause\n as appropriate).\n """\n\n name: str\n raw_name: str\n added: _MetadataVersion\n\n def __init__(\n self,\n *,\n added: _MetadataVersion = "1.0",\n ) -> None:\n self.added = added\n\n def __set_name__(self, _owner: Metadata, name: str) -> None:\n self.name = name\n self.raw_name = _RAW_TO_EMAIL_MAPPING[name]\n\n def __get__(self, instance: Metadata, _owner: type[Metadata]) -> T:\n # With Python 3.8, the caching can be replaced with functools.cached_property().\n # No need to check the cache as attribute lookup will resolve into the\n # instance's __dict__ before __get__ is called.\n cache = instance.__dict__\n value = instance._raw.get(self.name)\n\n # To make the _process_* methods easier, we'll check if the value is None\n # and if this field is NOT a required attribute, and if both of those\n # things are true, we'll skip the the converter. This will mean that the\n # converters never have to deal with the None union.\n if self.name in _REQUIRED_ATTRS or value is not None:\n try:\n converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}")\n except AttributeError:\n pass\n else:\n value = converter(value)\n\n cache[self.name] = value\n try:\n del instance._raw[self.name] # type: ignore[misc]\n except KeyError:\n pass\n\n return cast(T, value)\n\n def _invalid_metadata(\n self, msg: str, cause: Exception | None = None\n ) -> InvalidMetadata:\n exc = InvalidMetadata(\n self.raw_name, msg.format_map({"field": repr(self.raw_name)})\n )\n exc.__cause__ = cause\n return exc\n\n def _process_metadata_version(self, value: str) -> _MetadataVersion:\n # Implicitly makes Metadata-Version required.\n if value not in _VALID_METADATA_VERSIONS:\n raise self._invalid_metadata(f"{value!r} is not a valid metadata version")\n return cast(_MetadataVersion, value)\n\n def _process_name(self, value: str) -> str:\n if not value:\n raise self._invalid_metadata("{field} is a required field")\n # Validate the name as a side-effect.\n try:\n utils.canonicalize_name(value, validate=True)\n except utils.InvalidName as exc:\n raise self._invalid_metadata(\n f"{value!r} is invalid for {{field}}", cause=exc\n ) from exc\n else:\n return value\n\n def _process_version(self, value: str) -> version_module.Version:\n if not value:\n raise self._invalid_metadata("{field} is a required field")\n try:\n return version_module.parse(value)\n except version_module.InvalidVersion as exc:\n raise self._invalid_metadata(\n f"{value!r} is invalid for {{field}}", cause=exc\n ) from exc\n\n def _process_summary(self, value: str) -> str:\n """Check the field contains no newlines."""\n if "\n" in value:\n raise self._invalid_metadata("{field} must be a single line")\n return value\n\n def _process_description_content_type(self, value: str) -> str:\n content_types = {"text/plain", "text/x-rst", "text/markdown"}\n message = email.message.EmailMessage()\n message["content-type"] = value\n\n content_type, parameters = (\n # Defaults to `text/plain` if parsing failed.\n message.get_content_type().lower(),\n message["content-type"].params,\n )\n # Check if content-type is valid or defaulted to `text/plain` and thus was\n # not parseable.\n if content_type not in content_types or content_type not in value.lower():\n raise self._invalid_metadata(\n f"{{field}} must be one of {list(content_types)}, not {value!r}"\n )\n\n charset = parameters.get("charset", "UTF-8")\n if charset != "UTF-8":\n raise self._invalid_metadata(\n f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"\n )\n\n markdown_variants = {"GFM", "CommonMark"}\n variant = parameters.get("variant", "GFM") # Use an acceptable default.\n if content_type == "text/markdown" and variant not in markdown_variants:\n raise self._invalid_metadata(\n f"valid Markdown variants for {{field}} are {list(markdown_variants)}, "\n f"not {variant!r}",\n )\n return value\n\n def _process_dynamic(self, value: list[str]) -> list[str]:\n for dynamic_field in map(str.lower, value):\n if dynamic_field in {"name", "version", "metadata-version"}:\n raise self._invalid_metadata(\n f"{dynamic_field!r} is not allowed as a dynamic field"\n )\n elif dynamic_field not in _EMAIL_TO_RAW_MAPPING:\n raise self._invalid_metadata(\n f"{dynamic_field!r} is not a valid dynamic field"\n )\n return list(map(str.lower, value))\n\n def _process_provides_extra(\n self,\n value: list[str],\n ) -> list[utils.NormalizedName]:\n normalized_names = []\n try:\n for name in value:\n normalized_names.append(utils.canonicalize_name(name, validate=True))\n except utils.InvalidName as exc:\n raise self._invalid_metadata(\n f"{name!r} is invalid for {{field}}", cause=exc\n ) from exc\n else:\n return normalized_names\n\n def _process_requires_python(self, value: str) -> specifiers.SpecifierSet:\n try:\n return specifiers.SpecifierSet(value)\n except specifiers.InvalidSpecifier as exc:\n raise self._invalid_metadata(\n f"{value!r} is invalid for {{field}}", cause=exc\n ) from exc\n\n def _process_requires_dist(\n self,\n value: list[str],\n ) -> list[requirements.Requirement]:\n reqs = []\n try:\n for req in value:\n reqs.append(requirements.Requirement(req))\n except requirements.InvalidRequirement as exc:\n raise self._invalid_metadata(\n f"{req!r} is invalid for {{field}}", cause=exc\n ) from exc\n else:\n return reqs\n\n def _process_license_expression(\n self, value: str\n ) -> NormalizedLicenseExpression | None:\n try:\n return licenses.canonicalize_license_expression(value)\n except ValueError as exc:\n raise self._invalid_metadata(\n f"{value!r} is invalid for {{field}}", cause=exc\n ) from exc\n\n def _process_license_files(self, value: list[str]) -> list[str]:\n paths = []\n for path in value:\n if ".." in path:\n raise self._invalid_metadata(\n f"{path!r} is invalid for {{field}}, "\n "parent directory indicators are not allowed"\n )\n if "*" in path:\n raise self._invalid_metadata(\n f"{path!r} is invalid for {{field}}, paths must be resolved"\n )\n if (\n pathlib.PurePosixPath(path).is_absolute()\n or pathlib.PureWindowsPath(path).is_absolute()\n ):\n raise self._invalid_metadata(\n f"{path!r} is invalid for {{field}}, paths must be relative"\n )\n if pathlib.PureWindowsPath(path).as_posix() != path:\n raise self._invalid_metadata(\n f"{path!r} is invalid for {{field}}, paths must use '/' delimiter"\n )\n paths.append(path)\n return paths\n\n\nclass Metadata:\n """Representation of distribution metadata.\n\n Compared to :class:`RawMetadata`, this class provides objects representing\n metadata fields instead of only using built-in types. Any invalid metadata\n will cause :exc:`InvalidMetadata` to be raised (with a\n :py:attr:`~BaseException.__cause__` attribute as appropriate).\n """\n\n _raw: RawMetadata\n\n @classmethod\n def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> Metadata:\n """Create an instance from :class:`RawMetadata`.\n\n If *validate* is true, all metadata will be validated. All exceptions\n related to validation will be gathered and raised as an :class:`ExceptionGroup`.\n """\n ins = cls()\n ins._raw = data.copy() # Mutations occur due to caching enriched values.\n\n if validate:\n exceptions: list[Exception] = []\n try:\n metadata_version = ins.metadata_version\n metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version)\n except InvalidMetadata as metadata_version_exc:\n exceptions.append(metadata_version_exc)\n metadata_version = None\n\n # Make sure to check for the fields that are present, the required\n # fields (so their absence can be reported).\n fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS\n # Remove fields that have already been checked.\n fields_to_check -= {"metadata_version"}\n\n for key in fields_to_check:\n try:\n if metadata_version:\n # Can't use getattr() as that triggers descriptor protocol which\n # will fail due to no value for the instance argument.\n try:\n field_metadata_version = cls.__dict__[key].added\n except KeyError:\n exc = InvalidMetadata(key, f"unrecognized field: {key!r}")\n exceptions.append(exc)\n continue\n field_age = _VALID_METADATA_VERSIONS.index(\n field_metadata_version\n )\n if field_age > metadata_age:\n field = _RAW_TO_EMAIL_MAPPING[key]\n exc = InvalidMetadata(\n field,\n f"{field} introduced in metadata version "\n f"{field_metadata_version}, not {metadata_version}",\n )\n exceptions.append(exc)\n continue\n getattr(ins, key)\n except InvalidMetadata as exc:\n exceptions.append(exc)\n\n if exceptions:\n raise ExceptionGroup("invalid metadata", exceptions)\n\n return ins\n\n @classmethod\n def from_email(cls, data: bytes | str, *, validate: bool = True) -> Metadata:\n """Parse metadata from email headers.\n\n If *validate* is true, the metadata will be validated. All exceptions\n related to validation will be gathered and raised as an :class:`ExceptionGroup`.\n """\n raw, unparsed = parse_email(data)\n\n if validate:\n exceptions: list[Exception] = []\n for unparsed_key in unparsed:\n if unparsed_key in _EMAIL_TO_RAW_MAPPING:\n message = f"{unparsed_key!r} has invalid data"\n else:\n message = f"unrecognized field: {unparsed_key!r}"\n exceptions.append(InvalidMetadata(unparsed_key, message))\n\n if exceptions:\n raise ExceptionGroup("unparsed", exceptions)\n\n try:\n return cls.from_raw(raw, validate=validate)\n except ExceptionGroup as exc_group:\n raise ExceptionGroup(\n "invalid or unparsed metadata", exc_group.exceptions\n ) from None\n\n metadata_version: _Validator[_MetadataVersion] = _Validator()\n """:external:ref:`core-metadata-metadata-version`\n (required; validated to be a valid metadata version)"""\n # `name` is not normalized/typed to NormalizedName so as to provide access to\n # the original/raw name.\n name: _Validator[str] = _Validator()\n """:external:ref:`core-metadata-name`\n (required; validated using :func:`~packaging.utils.canonicalize_name` and its\n *validate* parameter)"""\n version: _Validator[version_module.Version] = _Validator()\n """:external:ref:`core-metadata-version` (required)"""\n dynamic: _Validator[list[str] | None] = _Validator(\n added="2.2",\n )\n """:external:ref:`core-metadata-dynamic`\n (validated against core metadata field names and lowercased)"""\n platforms: _Validator[list[str] | None] = _Validator()\n """:external:ref:`core-metadata-platform`"""\n supported_platforms: _Validator[list[str] | None] = _Validator(added="1.1")\n """:external:ref:`core-metadata-supported-platform`"""\n summary: _Validator[str | None] = _Validator()\n """:external:ref:`core-metadata-summary` (validated to contain no newlines)"""\n description: _Validator[str | None] = _Validator() # TODO 2.1: can be in body\n """:external:ref:`core-metadata-description`"""\n description_content_type: _Validator[str | None] = _Validator(added="2.1")\n """:external:ref:`core-metadata-description-content-type` (validated)"""\n keywords: _Validator[list[str] | None] = _Validator()\n """:external:ref:`core-metadata-keywords`"""\n home_page: _Validator[str | None] = _Validator()\n """:external:ref:`core-metadata-home-page`"""\n download_url: _Validator[str | None] = _Validator(added="1.1")\n """:external:ref:`core-metadata-download-url`"""\n author: _Validator[str | None] = _Validator()\n """:external:ref:`core-metadata-author`"""\n author_email: _Validator[str | None] = _Validator()\n """:external:ref:`core-metadata-author-email`"""\n maintainer: _Validator[str | None] = _Validator(added="1.2")\n """:external:ref:`core-metadata-maintainer`"""\n maintainer_email: _Validator[str | None] = _Validator(added="1.2")\n """:external:ref:`core-metadata-maintainer-email`"""\n license: _Validator[str | None] = _Validator()\n """:external:ref:`core-metadata-license`"""\n license_expression: _Validator[NormalizedLicenseExpression | None] = _Validator(\n added="2.4"\n )\n """:external:ref:`core-metadata-license-expression`"""\n license_files: _Validator[list[str] | None] = _Validator(added="2.4")\n """:external:ref:`core-metadata-license-file`"""\n classifiers: _Validator[list[str] | None] = _Validator(added="1.1")\n """:external:ref:`core-metadata-classifier`"""\n requires_dist: _Validator[list[requirements.Requirement] | None] = _Validator(\n added="1.2"\n )\n """:external:ref:`core-metadata-requires-dist`"""\n requires_python: _Validator[specifiers.SpecifierSet | None] = _Validator(\n added="1.2"\n )\n """:external:ref:`core-metadata-requires-python`"""\n # Because `Requires-External` allows for non-PEP 440 version specifiers, we\n # don't do any processing on the values.\n requires_external: _Validator[list[str] | None] = _Validator(added="1.2")\n """:external:ref:`core-metadata-requires-external`"""\n project_urls: _Validator[dict[str, str] | None] = _Validator(added="1.2")\n """:external:ref:`core-metadata-project-url`"""\n # PEP 685 lets us raise an error if an extra doesn't pass `Name` validation\n # regardless of metadata version.\n provides_extra: _Validator[list[utils.NormalizedName] | None] = _Validator(\n added="2.1",\n )\n """:external:ref:`core-metadata-provides-extra`"""\n provides_dist: _Validator[list[str] | None] = _Validator(added="1.2")\n """:external:ref:`core-metadata-provides-dist`"""\n obsoletes_dist: _Validator[list[str] | None] = _Validator(added="1.2")\n """:external:ref:`core-metadata-obsoletes-dist`"""\n requires: _Validator[list[str] | None] = _Validator(added="1.1")\n """``Requires`` (deprecated)"""\n provides: _Validator[list[str] | None] = _Validator(added="1.1")\n """``Provides`` (deprecated)"""\n obsoletes: _Validator[list[str] | None] = _Validator(added="1.1")\n """``Obsoletes`` (deprecated)"""\n
.venv\Lib\site-packages\packaging\metadata.py
metadata.py
Python
34,739
0.95
0.162413
0.22325
awesome-app
897
2024-05-08T12:07:36.702137
GPL-3.0
false
7cfeeeeb4a2be7848c92ae82b5c9f6a1
# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\nfrom __future__ import annotations\n\nfrom typing import Any, Iterator\n\nfrom ._parser import parse_requirement as _parse_requirement\nfrom ._tokenizer import ParserSyntaxError\nfrom .markers import Marker, _normalize_extra_values\nfrom .specifiers import SpecifierSet\nfrom .utils import canonicalize_name\n\n\nclass InvalidRequirement(ValueError):\n """\n An invalid requirement was found, users should refer to PEP 508.\n """\n\n\nclass Requirement:\n """Parse a requirement.\n\n Parse a given requirement string into its parts, such as name, specifier,\n URL, and extras. Raises InvalidRequirement on a badly-formed requirement\n string.\n """\n\n # TODO: Can we test whether something is contained within a requirement?\n # If so how do we do that? Do we need to test against the _name_ of\n # the thing as well as the version? What about the markers?\n # TODO: Can we normalize the name and extra name?\n\n def __init__(self, requirement_string: str) -> None:\n try:\n parsed = _parse_requirement(requirement_string)\n except ParserSyntaxError as e:\n raise InvalidRequirement(str(e)) from e\n\n self.name: str = parsed.name\n self.url: str | None = parsed.url or None\n self.extras: set[str] = set(parsed.extras or [])\n self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)\n self.marker: Marker | None = None\n if parsed.marker is not None:\n self.marker = Marker.__new__(Marker)\n self.marker._markers = _normalize_extra_values(parsed.marker)\n\n def _iter_parts(self, name: str) -> Iterator[str]:\n yield name\n\n if self.extras:\n formatted_extras = ",".join(sorted(self.extras))\n yield f"[{formatted_extras}]"\n\n if self.specifier:\n yield str(self.specifier)\n\n if self.url:\n yield f"@ {self.url}"\n if self.marker:\n yield " "\n\n if self.marker:\n yield f"; {self.marker}"\n\n def __str__(self) -> str:\n return "".join(self._iter_parts(self.name))\n\n def __repr__(self) -> str:\n return f"<Requirement('{self}')>"\n\n def __hash__(self) -> int:\n return hash(\n (\n self.__class__.__name__,\n *self._iter_parts(canonicalize_name(self.name)),\n )\n )\n\n def __eq__(self, other: Any) -> bool:\n if not isinstance(other, Requirement):\n return NotImplemented\n\n return (\n canonicalize_name(self.name) == canonicalize_name(other.name)\n and self.extras == other.extras\n and self.specifier == other.specifier\n and self.url == other.url\n and self.marker == other.marker\n )\n
.venv\Lib\site-packages\packaging\requirements.py
requirements.py
Python
2,947
0.95
0.186813
0.112676
python-kit
765
2024-03-17T05:41:04.065873
GPL-3.0
false
2fc711cf5b4a1a8ac92aab0bd4e13284
# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n"""\n.. testsetup::\n\n from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier\n from packaging.version import Version\n"""\n\nfrom __future__ import annotations\n\nimport abc\nimport itertools\nimport re\nfrom typing import Callable, Iterable, Iterator, TypeVar, Union\n\nfrom .utils import canonicalize_version\nfrom .version import Version\n\nUnparsedVersion = Union[Version, str]\nUnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion)\nCallableOperator = Callable[[Version, str], bool]\n\n\ndef _coerce_version(version: UnparsedVersion) -> Version:\n if not isinstance(version, Version):\n version = Version(version)\n return version\n\n\nclass InvalidSpecifier(ValueError):\n """\n Raised when attempting to create a :class:`Specifier` with a specifier\n string that is invalid.\n\n >>> Specifier("lolwat")\n Traceback (most recent call last):\n ...\n packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat'\n """\n\n\nclass BaseSpecifier(metaclass=abc.ABCMeta):\n @abc.abstractmethod\n def __str__(self) -> str:\n """\n Returns the str representation of this Specifier-like object. This\n should be representative of the Specifier itself.\n """\n\n @abc.abstractmethod\n def __hash__(self) -> int:\n """\n Returns a hash value for this Specifier-like object.\n """\n\n @abc.abstractmethod\n def __eq__(self, other: object) -> bool:\n """\n Returns a boolean representing whether or not the two Specifier-like\n objects are equal.\n\n :param other: The other object to check against.\n """\n\n @property\n @abc.abstractmethod\n def prereleases(self) -> bool | None:\n """Whether or not pre-releases as a whole are allowed.\n\n This can be set to either ``True`` or ``False`` to explicitly enable or disable\n prereleases or it can be set to ``None`` (the default) to use default semantics.\n """\n\n @prereleases.setter\n def prereleases(self, value: bool) -> None:\n """Setter for :attr:`prereleases`.\n\n :param value: The value to set.\n """\n\n @abc.abstractmethod\n def contains(self, item: str, prereleases: bool | None = None) -> bool:\n """\n Determines if the given item is contained within this specifier.\n """\n\n @abc.abstractmethod\n def filter(\n self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None\n ) -> Iterator[UnparsedVersionVar]:\n """\n Takes an iterable of items and filters them so that only items which\n are contained within this specifier are allowed in it.\n """\n\n\nclass Specifier(BaseSpecifier):\n """This class abstracts handling of version specifiers.\n\n .. tip::\n\n It is generally not required to instantiate this manually. You should instead\n prefer to work with :class:`SpecifierSet` instead, which can parse\n comma-separated version specifiers (which is what package metadata contains).\n """\n\n _operator_regex_str = r"""\n (?P<operator>(~=|==|!=|<=|>=|<|>|===))\n """\n _version_regex_str = r"""\n (?P<version>\n (?:\n # The identity operators allow for an escape hatch that will\n # do an exact string match of the version you wish to install.\n # This will not be parsed by PEP 440 and we cannot determine\n # any semantic meaning from it. This operator is discouraged\n # but included entirely as an escape hatch.\n (?<====) # Only match for the identity operator\n \s*\n [^\s;)]* # The arbitrary version can be just about anything,\n # we match everything except for whitespace, a\n # semi-colon for marker support, and a closing paren\n # since versions can be enclosed in them.\n )\n |\n (?:\n # The (non)equality operators allow for wild card and local\n # versions to be specified so we have to define these two\n # operators separately to enable that.\n (?<===|!=) # Only match for equals and not equals\n\n \s*\n v?\n (?:[0-9]+!)? # epoch\n [0-9]+(?:\.[0-9]+)* # release\n\n # You cannot use a wild card and a pre-release, post-release, a dev or\n # local version together so group them with a | and make them optional.\n (?:\n \.\* # Wild card syntax of .*\n |\n (?: # pre release\n [-_\.]?\n (alpha|beta|preview|pre|a|b|c|rc)\n [-_\.]?\n [0-9]*\n )?\n (?: # post release\n (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)\n )?\n (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release\n (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local\n )?\n )\n |\n (?:\n # The compatible operator requires at least two digits in the\n # release segment.\n (?<=~=) # Only match for the compatible operator\n\n \s*\n v?\n (?:[0-9]+!)? # epoch\n [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)\n (?: # pre release\n [-_\.]?\n (alpha|beta|preview|pre|a|b|c|rc)\n [-_\.]?\n [0-9]*\n )?\n (?: # post release\n (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)\n )?\n (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release\n )\n |\n (?:\n # All other operators only allow a sub set of what the\n # (non)equality operators do. Specifically they do not allow\n # local versions to be specified nor do they allow the prefix\n # matching wild cards.\n (?<!==|!=|~=) # We have special cases for these\n # operators so we want to make sure they\n # don't match here.\n\n \s*\n v?\n (?:[0-9]+!)? # epoch\n [0-9]+(?:\.[0-9]+)* # release\n (?: # pre release\n [-_\.]?\n (alpha|beta|preview|pre|a|b|c|rc)\n [-_\.]?\n [0-9]*\n )?\n (?: # post release\n (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)\n )?\n (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release\n )\n )\n """\n\n _regex = re.compile(\n r"^\s*" + _operator_regex_str + _version_regex_str + r"\s*$",\n re.VERBOSE | re.IGNORECASE,\n )\n\n _operators = {\n "~=": "compatible",\n "==": "equal",\n "!=": "not_equal",\n "<=": "less_than_equal",\n ">=": "greater_than_equal",\n "<": "less_than",\n ">": "greater_than",\n "===": "arbitrary",\n }\n\n def __init__(self, spec: str = "", prereleases: bool | None = None) -> None:\n """Initialize a Specifier instance.\n\n :param spec:\n The string representation of a specifier which will be parsed and\n normalized before use.\n :param prereleases:\n This tells the specifier if it should accept prerelease versions if\n applicable or not. The default of ``None`` will autodetect it from the\n given specifiers.\n :raises InvalidSpecifier:\n If the given specifier is invalid (i.e. bad syntax).\n """\n match = self._regex.search(spec)\n if not match:\n raise InvalidSpecifier(f"Invalid specifier: {spec!r}")\n\n self._spec: tuple[str, str] = (\n match.group("operator").strip(),\n match.group("version").strip(),\n )\n\n # Store whether or not this Specifier should accept prereleases\n self._prereleases = prereleases\n\n # https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515\n @property # type: ignore[override]\n def prereleases(self) -> bool:\n # If there is an explicit prereleases set for this, then we'll just\n # blindly use that.\n if self._prereleases is not None:\n return self._prereleases\n\n # Look at all of our specifiers and determine if they are inclusive\n # operators, and if they are if they are including an explicit\n # prerelease.\n operator, version = self._spec\n if operator in ["==", ">=", "<=", "~=", "===", ">", "<"]:\n # The == specifier can include a trailing .*, if it does we\n # want to remove before parsing.\n if operator == "==" and version.endswith(".*"):\n version = version[:-2]\n\n # Parse the version, and if it is a pre-release than this\n # specifier allows pre-releases.\n if Version(version).is_prerelease:\n return True\n\n return False\n\n @prereleases.setter\n def prereleases(self, value: bool) -> None:\n self._prereleases = value\n\n @property\n def operator(self) -> str:\n """The operator of this specifier.\n\n >>> Specifier("==1.2.3").operator\n '=='\n """\n return self._spec[0]\n\n @property\n def version(self) -> str:\n """The version of this specifier.\n\n >>> Specifier("==1.2.3").version\n '1.2.3'\n """\n return self._spec[1]\n\n def __repr__(self) -> str:\n """A representation of the Specifier that shows all internal state.\n\n >>> Specifier('>=1.0.0')\n <Specifier('>=1.0.0')>\n >>> Specifier('>=1.0.0', prereleases=False)\n <Specifier('>=1.0.0', prereleases=False)>\n >>> Specifier('>=1.0.0', prereleases=True)\n <Specifier('>=1.0.0', prereleases=True)>\n """\n pre = (\n f", prereleases={self.prereleases!r}"\n if self._prereleases is not None\n else ""\n )\n\n return f"<{self.__class__.__name__}({str(self)!r}{pre})>"\n\n def __str__(self) -> str:\n """A string representation of the Specifier that can be round-tripped.\n\n >>> str(Specifier('>=1.0.0'))\n '>=1.0.0'\n >>> str(Specifier('>=1.0.0', prereleases=False))\n '>=1.0.0'\n """\n return "{}{}".format(*self._spec)\n\n @property\n def _canonical_spec(self) -> tuple[str, str]:\n canonical_version = canonicalize_version(\n self._spec[1],\n strip_trailing_zero=(self._spec[0] != "~="),\n )\n return self._spec[0], canonical_version\n\n def __hash__(self) -> int:\n return hash(self._canonical_spec)\n\n def __eq__(self, other: object) -> bool:\n """Whether or not the two Specifier-like objects are equal.\n\n :param other: The other object to check against.\n\n The value of :attr:`prereleases` is ignored.\n\n >>> Specifier("==1.2.3") == Specifier("== 1.2.3.0")\n True\n >>> (Specifier("==1.2.3", prereleases=False) ==\n ... Specifier("==1.2.3", prereleases=True))\n True\n >>> Specifier("==1.2.3") == "==1.2.3"\n True\n >>> Specifier("==1.2.3") == Specifier("==1.2.4")\n False\n >>> Specifier("==1.2.3") == Specifier("~=1.2.3")\n False\n """\n if isinstance(other, str):\n try:\n other = self.__class__(str(other))\n except InvalidSpecifier:\n return NotImplemented\n elif not isinstance(other, self.__class__):\n return NotImplemented\n\n return self._canonical_spec == other._canonical_spec\n\n def _get_operator(self, op: str) -> CallableOperator:\n operator_callable: CallableOperator = getattr(\n self, f"_compare_{self._operators[op]}"\n )\n return operator_callable\n\n def _compare_compatible(self, prospective: Version, spec: str) -> bool:\n # Compatible releases have an equivalent combination of >= and ==. That\n # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to\n # implement this in terms of the other specifiers instead of\n # implementing it ourselves. The only thing we need to do is construct\n # the other specifiers.\n\n # We want everything but the last item in the version, but we want to\n # ignore suffix segments.\n prefix = _version_join(\n list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]\n )\n\n # Add the prefix notation to the end of our string\n prefix += ".*"\n\n return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(\n prospective, prefix\n )\n\n def _compare_equal(self, prospective: Version, spec: str) -> bool:\n # We need special logic to handle prefix matching\n if spec.endswith(".*"):\n # In the case of prefix matching we want to ignore local segment.\n normalized_prospective = canonicalize_version(\n prospective.public, strip_trailing_zero=False\n )\n # Get the normalized version string ignoring the trailing .*\n normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False)\n # Split the spec out by bangs and dots, and pretend that there is\n # an implicit dot in between a release segment and a pre-release segment.\n split_spec = _version_split(normalized_spec)\n\n # Split the prospective version out by bangs and dots, and pretend\n # that there is an implicit dot in between a release segment and\n # a pre-release segment.\n split_prospective = _version_split(normalized_prospective)\n\n # 0-pad the prospective version before shortening it to get the correct\n # shortened version.\n padded_prospective, _ = _pad_version(split_prospective, split_spec)\n\n # Shorten the prospective version to be the same length as the spec\n # so that we can determine if the specifier is a prefix of the\n # prospective version or not.\n shortened_prospective = padded_prospective[: len(split_spec)]\n\n return shortened_prospective == split_spec\n else:\n # Convert our spec string into a Version\n spec_version = Version(spec)\n\n # If the specifier does not have a local segment, then we want to\n # act as if the prospective version also does not have a local\n # segment.\n if not spec_version.local:\n prospective = Version(prospective.public)\n\n return prospective == spec_version\n\n def _compare_not_equal(self, prospective: Version, spec: str) -> bool:\n return not self._compare_equal(prospective, spec)\n\n def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool:\n # NB: Local version identifiers are NOT permitted in the version\n # specifier, so local version labels can be universally removed from\n # the prospective version.\n return Version(prospective.public) <= Version(spec)\n\n def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool:\n # NB: Local version identifiers are NOT permitted in the version\n # specifier, so local version labels can be universally removed from\n # the prospective version.\n return Version(prospective.public) >= Version(spec)\n\n def _compare_less_than(self, prospective: Version, spec_str: str) -> bool:\n # Convert our spec to a Version instance, since we'll want to work with\n # it as a version.\n spec = Version(spec_str)\n\n # Check to see if the prospective version is less than the spec\n # version. If it's not we can short circuit and just return False now\n # instead of doing extra unneeded work.\n if not prospective < spec:\n return False\n\n # This special case is here so that, unless the specifier itself\n # includes is a pre-release version, that we do not accept pre-release\n # versions for the version mentioned in the specifier (e.g. <3.1 should\n # not match 3.1.dev0, but should match 3.0.dev0).\n if not spec.is_prerelease and prospective.is_prerelease:\n if Version(prospective.base_version) == Version(spec.base_version):\n return False\n\n # If we've gotten to here, it means that prospective version is both\n # less than the spec version *and* it's not a pre-release of the same\n # version in the spec.\n return True\n\n def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool:\n # Convert our spec to a Version instance, since we'll want to work with\n # it as a version.\n spec = Version(spec_str)\n\n # Check to see if the prospective version is greater than the spec\n # version. If it's not we can short circuit and just return False now\n # instead of doing extra unneeded work.\n if not prospective > spec:\n return False\n\n # This special case is here so that, unless the specifier itself\n # includes is a post-release version, that we do not accept\n # post-release versions for the version mentioned in the specifier\n # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).\n if not spec.is_postrelease and prospective.is_postrelease:\n if Version(prospective.base_version) == Version(spec.base_version):\n return False\n\n # Ensure that we do not allow a local version of the version mentioned\n # in the specifier, which is technically greater than, to match.\n if prospective.local is not None:\n if Version(prospective.base_version) == Version(spec.base_version):\n return False\n\n # If we've gotten to here, it means that prospective version is both\n # greater than the spec version *and* it's not a pre-release of the\n # same version in the spec.\n return True\n\n def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:\n return str(prospective).lower() == str(spec).lower()\n\n def __contains__(self, item: str | Version) -> bool:\n """Return whether or not the item is contained in this specifier.\n\n :param item: The item to check for.\n\n This is used for the ``in`` operator and behaves the same as\n :meth:`contains` with no ``prereleases`` argument passed.\n\n >>> "1.2.3" in Specifier(">=1.2.3")\n True\n >>> Version("1.2.3") in Specifier(">=1.2.3")\n True\n >>> "1.0.0" in Specifier(">=1.2.3")\n False\n >>> "1.3.0a1" in Specifier(">=1.2.3")\n False\n >>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True)\n True\n """\n return self.contains(item)\n\n def contains(self, item: UnparsedVersion, prereleases: bool | None = None) -> bool:\n """Return whether or not the item is contained in this specifier.\n\n :param item:\n The item to check for, which can be a version string or a\n :class:`Version` instance.\n :param prereleases:\n Whether or not to match prereleases with this Specifier. If set to\n ``None`` (the default), it uses :attr:`prereleases` to determine\n whether or not prereleases are allowed.\n\n >>> Specifier(">=1.2.3").contains("1.2.3")\n True\n >>> Specifier(">=1.2.3").contains(Version("1.2.3"))\n True\n >>> Specifier(">=1.2.3").contains("1.0.0")\n False\n >>> Specifier(">=1.2.3").contains("1.3.0a1")\n False\n >>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1")\n True\n >>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True)\n True\n """\n\n # Determine if prereleases are to be allowed or not.\n if prereleases is None:\n prereleases = self.prereleases\n\n # Normalize item to a Version, this allows us to have a shortcut for\n # "2.0" in Specifier(">=2")\n normalized_item = _coerce_version(item)\n\n # Determine if we should be supporting prereleases in this specifier\n # or not, if we do not support prereleases than we can short circuit\n # logic if this version is a prereleases.\n if normalized_item.is_prerelease and not prereleases:\n return False\n\n # Actually do the comparison to determine if this item is contained\n # within this Specifier or not.\n operator_callable: CallableOperator = self._get_operator(self.operator)\n return operator_callable(normalized_item, self.version)\n\n def filter(\n self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None\n ) -> Iterator[UnparsedVersionVar]:\n """Filter items in the given iterable, that match the specifier.\n\n :param iterable:\n An iterable that can contain version strings and :class:`Version` instances.\n The items in the iterable will be filtered according to the specifier.\n :param prereleases:\n Whether or not to allow prereleases in the returned iterator. If set to\n ``None`` (the default), it will be intelligently decide whether to allow\n prereleases or not (based on the :attr:`prereleases` attribute, and\n whether the only versions matching are prereleases).\n\n This method is smarter than just ``filter(Specifier().contains, [...])``\n because it implements the rule from :pep:`440` that a prerelease item\n SHOULD be accepted if no other versions match the given specifier.\n\n >>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))\n ['1.3']\n >>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")]))\n ['1.2.3', '1.3', <Version('1.4')>]\n >>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"]))\n ['1.5a1']\n >>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))\n ['1.3', '1.5a1']\n >>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))\n ['1.3', '1.5a1']\n """\n\n yielded = False\n found_prereleases = []\n\n kw = {"prereleases": prereleases if prereleases is not None else True}\n\n # Attempt to iterate over all the values in the iterable and if any of\n # them match, yield them.\n for version in iterable:\n parsed_version = _coerce_version(version)\n\n if self.contains(parsed_version, **kw):\n # If our version is a prerelease, and we were not set to allow\n # prereleases, then we'll store it for later in case nothing\n # else matches this specifier.\n if parsed_version.is_prerelease and not (\n prereleases or self.prereleases\n ):\n found_prereleases.append(version)\n # Either this is not a prerelease, or we should have been\n # accepting prereleases from the beginning.\n else:\n yielded = True\n yield version\n\n # Now that we've iterated over everything, determine if we've yielded\n # any values, and if we have not and we have any prereleases stored up\n # then we will go ahead and yield the prereleases.\n if not yielded and found_prereleases:\n for version in found_prereleases:\n yield version\n\n\n_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")\n\n\ndef _version_split(version: str) -> list[str]:\n """Split version into components.\n\n The split components are intended for version comparison. The logic does\n not attempt to retain the original version string, so joining the\n components back with :func:`_version_join` may not produce the original\n version string.\n """\n result: list[str] = []\n\n epoch, _, rest = version.rpartition("!")\n result.append(epoch or "0")\n\n for item in rest.split("."):\n match = _prefix_regex.search(item)\n if match:\n result.extend(match.groups())\n else:\n result.append(item)\n return result\n\n\ndef _version_join(components: list[str]) -> str:\n """Join split version components into a version string.\n\n This function assumes the input came from :func:`_version_split`, where the\n first component must be the epoch (either empty or numeric), and all other\n components numeric.\n """\n epoch, *rest = components\n return f"{epoch}!{'.'.join(rest)}"\n\n\ndef _is_not_suffix(segment: str) -> bool:\n return not any(\n segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")\n )\n\n\ndef _pad_version(left: list[str], right: list[str]) -> tuple[list[str], list[str]]:\n left_split, right_split = [], []\n\n # Get the release segment of our versions\n left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))\n right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))\n\n # Get the rest of our versions\n left_split.append(left[len(left_split[0]) :])\n right_split.append(right[len(right_split[0]) :])\n\n # Insert our padding\n left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))\n right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))\n\n return (\n list(itertools.chain.from_iterable(left_split)),\n list(itertools.chain.from_iterable(right_split)),\n )\n\n\nclass SpecifierSet(BaseSpecifier):\n """This class abstracts handling of a set of version specifiers.\n\n It can be passed a single specifier (``>=3.0``), a comma-separated list of\n specifiers (``>=3.0,!=3.1``), or no specifier at all.\n """\n\n def __init__(\n self,\n specifiers: str | Iterable[Specifier] = "",\n prereleases: bool | None = None,\n ) -> None:\n """Initialize a SpecifierSet instance.\n\n :param specifiers:\n The string representation of a specifier or a comma-separated list of\n specifiers which will be parsed and normalized before use.\n May also be an iterable of ``Specifier`` instances, which will be used\n as is.\n :param prereleases:\n This tells the SpecifierSet if it should accept prerelease versions if\n applicable or not. The default of ``None`` will autodetect it from the\n given specifiers.\n\n :raises InvalidSpecifier:\n If the given ``specifiers`` are not parseable than this exception will be\n raised.\n """\n\n if isinstance(specifiers, str):\n # Split on `,` to break each individual specifier into its own item, and\n # strip each item to remove leading/trailing whitespace.\n split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]\n\n # Make each individual specifier a Specifier and save in a frozen set\n # for later.\n self._specs = frozenset(map(Specifier, split_specifiers))\n else:\n # Save the supplied specifiers in a frozen set.\n self._specs = frozenset(specifiers)\n\n # Store our prereleases value so we can use it later to determine if\n # we accept prereleases or not.\n self._prereleases = prereleases\n\n @property\n def prereleases(self) -> bool | None:\n # If we have been given an explicit prerelease modifier, then we'll\n # pass that through here.\n if self._prereleases is not None:\n return self._prereleases\n\n # If we don't have any specifiers, and we don't have a forced value,\n # then we'll just return None since we don't know if this should have\n # pre-releases or not.\n if not self._specs:\n return None\n\n # Otherwise we'll see if any of the given specifiers accept\n # prereleases, if any of them do we'll return True, otherwise False.\n return any(s.prereleases for s in self._specs)\n\n @prereleases.setter\n def prereleases(self, value: bool) -> None:\n self._prereleases = value\n\n def __repr__(self) -> str:\n """A representation of the specifier set that shows all internal state.\n\n Note that the ordering of the individual specifiers within the set may not\n match the input string.\n\n >>> SpecifierSet('>=1.0.0,!=2.0.0')\n <SpecifierSet('!=2.0.0,>=1.0.0')>\n >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False)\n <SpecifierSet('!=2.0.0,>=1.0.0', prereleases=False)>\n >>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True)\n <SpecifierSet('!=2.0.0,>=1.0.0', prereleases=True)>\n """\n pre = (\n f", prereleases={self.prereleases!r}"\n if self._prereleases is not None\n else ""\n )\n\n return f"<SpecifierSet({str(self)!r}{pre})>"\n\n def __str__(self) -> str:\n """A string representation of the specifier set that can be round-tripped.\n\n Note that the ordering of the individual specifiers within the set may not\n match the input string.\n\n >>> str(SpecifierSet(">=1.0.0,!=1.0.1"))\n '!=1.0.1,>=1.0.0'\n >>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False))\n '!=1.0.1,>=1.0.0'\n """\n return ",".join(sorted(str(s) for s in self._specs))\n\n def __hash__(self) -> int:\n return hash(self._specs)\n\n def __and__(self, other: SpecifierSet | str) -> SpecifierSet:\n """Return a SpecifierSet which is a combination of the two sets.\n\n :param other: The other object to combine with.\n\n >>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1'\n <SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>\n >>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1')\n <SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>\n """\n if isinstance(other, str):\n other = SpecifierSet(other)\n elif not isinstance(other, SpecifierSet):\n return NotImplemented\n\n specifier = SpecifierSet()\n specifier._specs = frozenset(self._specs | other._specs)\n\n if self._prereleases is None and other._prereleases is not None:\n specifier._prereleases = other._prereleases\n elif self._prereleases is not None and other._prereleases is None:\n specifier._prereleases = self._prereleases\n elif self._prereleases == other._prereleases:\n specifier._prereleases = self._prereleases\n else:\n raise ValueError(\n "Cannot combine SpecifierSets with True and False prerelease overrides."\n )\n\n return specifier\n\n def __eq__(self, other: object) -> bool:\n """Whether or not the two SpecifierSet-like objects are equal.\n\n :param other: The other object to check against.\n\n The value of :attr:`prereleases` is ignored.\n\n >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1")\n True\n >>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) ==\n ... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True))\n True\n >>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1"\n True\n >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0")\n False\n >>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2")\n False\n """\n if isinstance(other, (str, Specifier)):\n other = SpecifierSet(str(other))\n elif not isinstance(other, SpecifierSet):\n return NotImplemented\n\n return self._specs == other._specs\n\n def __len__(self) -> int:\n """Returns the number of specifiers in this specifier set."""\n return len(self._specs)\n\n def __iter__(self) -> Iterator[Specifier]:\n """\n Returns an iterator over all the underlying :class:`Specifier` instances\n in this specifier set.\n\n >>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str)\n [<Specifier('!=1.0.1')>, <Specifier('>=1.0.0')>]\n """\n return iter(self._specs)\n\n def __contains__(self, item: UnparsedVersion) -> bool:\n """Return whether or not the item is contained in this specifier.\n\n :param item: The item to check for.\n\n This is used for the ``in`` operator and behaves the same as\n :meth:`contains` with no ``prereleases`` argument passed.\n\n >>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1")\n True\n >>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1")\n True\n >>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1")\n False\n >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1")\n False\n >>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)\n True\n """\n return self.contains(item)\n\n def contains(\n self,\n item: UnparsedVersion,\n prereleases: bool | None = None,\n installed: bool | None = None,\n ) -> bool:\n """Return whether or not the item is contained in this SpecifierSet.\n\n :param item:\n The item to check for, which can be a version string or a\n :class:`Version` instance.\n :param prereleases:\n Whether or not to match prereleases with this SpecifierSet. If set to\n ``None`` (the default), it uses :attr:`prereleases` to determine\n whether or not prereleases are allowed.\n\n >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3")\n True\n >>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3"))\n True\n >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1")\n False\n >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1")\n False\n >>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1")\n True\n >>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True)\n True\n """\n # Ensure that our item is a Version instance.\n if not isinstance(item, Version):\n item = Version(item)\n\n # Determine if we're forcing a prerelease or not, if we're not forcing\n # one for this particular filter call, then we'll use whatever the\n # SpecifierSet thinks for whether or not we should support prereleases.\n if prereleases is None:\n prereleases = self.prereleases\n\n # We can determine if we're going to allow pre-releases by looking to\n # see if any of the underlying items supports them. If none of them do\n # and this item is a pre-release then we do not allow it and we can\n # short circuit that here.\n # Note: This means that 1.0.dev1 would not be contained in something\n # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0\n if not prereleases and item.is_prerelease:\n return False\n\n if installed and item.is_prerelease:\n item = Version(item.base_version)\n\n # We simply dispatch to the underlying specs here to make sure that the\n # given version is contained within all of them.\n # Note: This use of all() here means that an empty set of specifiers\n # will always return True, this is an explicit design decision.\n return all(s.contains(item, prereleases=prereleases) for s in self._specs)\n\n def filter(\n self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None\n ) -> Iterator[UnparsedVersionVar]:\n """Filter items in the given iterable, that match the specifiers in this set.\n\n :param iterable:\n An iterable that can contain version strings and :class:`Version` instances.\n The items in the iterable will be filtered according to the specifier.\n :param prereleases:\n Whether or not to allow prereleases in the returned iterator. If set to\n ``None`` (the default), it will be intelligently decide whether to allow\n prereleases or not (based on the :attr:`prereleases` attribute, and\n whether the only versions matching are prereleases).\n\n This method is smarter than just ``filter(SpecifierSet(...).contains, [...])``\n because it implements the rule from :pep:`440` that a prerelease item\n SHOULD be accepted if no other versions match the given specifier.\n\n >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))\n ['1.3']\n >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")]))\n ['1.3', <Version('1.4')>]\n >>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"]))\n []\n >>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))\n ['1.3', '1.5a1']\n >>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))\n ['1.3', '1.5a1']\n\n An "empty" SpecifierSet will filter items based on the presence of prerelease\n versions in the set.\n\n >>> list(SpecifierSet("").filter(["1.3", "1.5a1"]))\n ['1.3']\n >>> list(SpecifierSet("").filter(["1.5a1"]))\n ['1.5a1']\n >>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"]))\n ['1.3', '1.5a1']\n >>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True))\n ['1.3', '1.5a1']\n """\n # Determine if we're forcing a prerelease or not, if we're not forcing\n # one for this particular filter call, then we'll use whatever the\n # SpecifierSet thinks for whether or not we should support prereleases.\n if prereleases is None:\n prereleases = self.prereleases\n\n # If we have any specifiers, then we want to wrap our iterable in the\n # filter method for each one, this will act as a logical AND amongst\n # each specifier.\n if self._specs:\n for spec in self._specs:\n iterable = spec.filter(iterable, prereleases=bool(prereleases))\n return iter(iterable)\n # If we do not have any specifiers, then we need to have a rough filter\n # which will filter out any pre-releases, unless there are no final\n # releases.\n else:\n filtered: list[UnparsedVersionVar] = []\n found_prereleases: list[UnparsedVersionVar] = []\n\n for item in iterable:\n parsed_version = _coerce_version(item)\n\n # Store any item which is a pre-release for later unless we've\n # already found a final version or we are accepting prereleases\n if parsed_version.is_prerelease and not prereleases:\n if not filtered:\n found_prereleases.append(item)\n else:\n filtered.append(item)\n\n # If we've found no items except for pre-releases, then we'll go\n # ahead and use the pre-releases\n if not filtered and found_prereleases and prereleases is None:\n return iter(found_prereleases)\n\n return iter(filtered)\n
.venv\Lib\site-packages\packaging\specifiers.py
specifiers.py
Python
40,055
0.95
0.175662
0.182898
react-lib
978
2023-09-20T11:00:21.265851
GPL-3.0
false
5e70fd47551983ebba03505f168e3b12
# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n\nfrom __future__ import annotations\n\nimport logging\nimport platform\nimport re\nimport struct\nimport subprocess\nimport sys\nimport sysconfig\nfrom importlib.machinery import EXTENSION_SUFFIXES\nfrom typing import (\n Iterable,\n Iterator,\n Sequence,\n Tuple,\n cast,\n)\n\nfrom . import _manylinux, _musllinux\n\nlogger = logging.getLogger(__name__)\n\nPythonVersion = Sequence[int]\nAppleVersion = Tuple[int, int]\n\nINTERPRETER_SHORT_NAMES: dict[str, str] = {\n "python": "py", # Generic.\n "cpython": "cp",\n "pypy": "pp",\n "ironpython": "ip",\n "jython": "jy",\n}\n\n\n_32_BIT_INTERPRETER = struct.calcsize("P") == 4\n\n\nclass Tag:\n """\n A representation of the tag triple for a wheel.\n\n Instances are considered immutable and thus are hashable. Equality checking\n is also supported.\n """\n\n __slots__ = ["_abi", "_hash", "_interpreter", "_platform"]\n\n def __init__(self, interpreter: str, abi: str, platform: str) -> None:\n self._interpreter = interpreter.lower()\n self._abi = abi.lower()\n self._platform = platform.lower()\n # The __hash__ of every single element in a Set[Tag] will be evaluated each time\n # that a set calls its `.disjoint()` method, which may be called hundreds of\n # times when scanning a page of links for packages with tags matching that\n # Set[Tag]. Pre-computing the value here produces significant speedups for\n # downstream consumers.\n self._hash = hash((self._interpreter, self._abi, self._platform))\n\n @property\n def interpreter(self) -> str:\n return self._interpreter\n\n @property\n def abi(self) -> str:\n return self._abi\n\n @property\n def platform(self) -> str:\n return self._platform\n\n def __eq__(self, other: object) -> bool:\n if not isinstance(other, Tag):\n return NotImplemented\n\n return (\n (self._hash == other._hash) # Short-circuit ASAP for perf reasons.\n and (self._platform == other._platform)\n and (self._abi == other._abi)\n and (self._interpreter == other._interpreter)\n )\n\n def __hash__(self) -> int:\n return self._hash\n\n def __str__(self) -> str:\n return f"{self._interpreter}-{self._abi}-{self._platform}"\n\n def __repr__(self) -> str:\n return f"<{self} @ {id(self)}>"\n\n\ndef parse_tag(tag: str) -> frozenset[Tag]:\n """\n Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.\n\n Returning a set is required due to the possibility that the tag is a\n compressed tag set.\n """\n tags = set()\n interpreters, abis, platforms = tag.split("-")\n for interpreter in interpreters.split("."):\n for abi in abis.split("."):\n for platform_ in platforms.split("."):\n tags.add(Tag(interpreter, abi, platform_))\n return frozenset(tags)\n\n\ndef _get_config_var(name: str, warn: bool = False) -> int | str | None:\n value: int | str | None = sysconfig.get_config_var(name)\n if value is None and warn:\n logger.debug(\n "Config variable '%s' is unset, Python ABI tag may be incorrect", name\n )\n return value\n\n\ndef _normalize_string(string: str) -> str:\n return string.replace(".", "_").replace("-", "_").replace(" ", "_")\n\n\ndef _is_threaded_cpython(abis: list[str]) -> bool:\n """\n Determine if the ABI corresponds to a threaded (`--disable-gil`) build.\n\n The threaded builds are indicated by a "t" in the abiflags.\n """\n if len(abis) == 0:\n return False\n # expect e.g., cp313\n m = re.match(r"cp\d+(.*)", abis[0])\n if not m:\n return False\n abiflags = m.group(1)\n return "t" in abiflags\n\n\ndef _abi3_applies(python_version: PythonVersion, threading: bool) -> bool:\n """\n Determine if the Python version supports abi3.\n\n PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`)\n builds do not support abi3.\n """\n return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading\n\n\ndef _cpython_abis(py_version: PythonVersion, warn: bool = False) -> list[str]:\n py_version = tuple(py_version) # To allow for version comparison.\n abis = []\n version = _version_nodot(py_version[:2])\n threading = debug = pymalloc = ucs4 = ""\n with_debug = _get_config_var("Py_DEBUG", warn)\n has_refcount = hasattr(sys, "gettotalrefcount")\n # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled\n # extension modules is the best option.\n # https://github.com/pypa/pip/issues/3383#issuecomment-173267692\n has_ext = "_d.pyd" in EXTENSION_SUFFIXES\n if with_debug or (with_debug is None and (has_refcount or has_ext)):\n debug = "d"\n if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn):\n threading = "t"\n if py_version < (3, 8):\n with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)\n if with_pymalloc or with_pymalloc is None:\n pymalloc = "m"\n if py_version < (3, 3):\n unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)\n if unicode_size == 4 or (\n unicode_size is None and sys.maxunicode == 0x10FFFF\n ):\n ucs4 = "u"\n elif debug:\n # Debug builds can also load "normal" extension modules.\n # We can also assume no UCS-4 or pymalloc requirement.\n abis.append(f"cp{version}{threading}")\n abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}")\n return abis\n\n\ndef cpython_tags(\n python_version: PythonVersion | None = None,\n abis: Iterable[str] | None = None,\n platforms: Iterable[str] | None = None,\n *,\n warn: bool = False,\n) -> Iterator[Tag]:\n """\n Yields the tags for a CPython interpreter.\n\n The tags consist of:\n - cp<python_version>-<abi>-<platform>\n - cp<python_version>-abi3-<platform>\n - cp<python_version>-none-<platform>\n - cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.\n\n If python_version only specifies a major version then user-provided ABIs and\n the 'none' ABItag will be used.\n\n If 'abi3' or 'none' are specified in 'abis' then they will be yielded at\n their normal position and not at the beginning.\n """\n if not python_version:\n python_version = sys.version_info[:2]\n\n interpreter = f"cp{_version_nodot(python_version[:2])}"\n\n if abis is None:\n if len(python_version) > 1:\n abis = _cpython_abis(python_version, warn)\n else:\n abis = []\n abis = list(abis)\n # 'abi3' and 'none' are explicitly handled later.\n for explicit_abi in ("abi3", "none"):\n try:\n abis.remove(explicit_abi)\n except ValueError:\n pass\n\n platforms = list(platforms or platform_tags())\n for abi in abis:\n for platform_ in platforms:\n yield Tag(interpreter, abi, platform_)\n\n threading = _is_threaded_cpython(abis)\n use_abi3 = _abi3_applies(python_version, threading)\n if use_abi3:\n yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)\n yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)\n\n if use_abi3:\n for minor_version in range(python_version[1] - 1, 1, -1):\n for platform_ in platforms:\n version = _version_nodot((python_version[0], minor_version))\n interpreter = f"cp{version}"\n yield Tag(interpreter, "abi3", platform_)\n\n\ndef _generic_abi() -> list[str]:\n """\n Return the ABI tag based on EXT_SUFFIX.\n """\n # The following are examples of `EXT_SUFFIX`.\n # We want to keep the parts which are related to the ABI and remove the\n # parts which are related to the platform:\n # - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310\n # - mac: '.cpython-310-darwin.so' => cp310\n # - win: '.cp310-win_amd64.pyd' => cp310\n # - win: '.pyd' => cp37 (uses _cpython_abis())\n # - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73\n # - graalpy: '.graalpy-38-native-x86_64-darwin.dylib'\n # => graalpy_38_native\n\n ext_suffix = _get_config_var("EXT_SUFFIX", warn=True)\n if not isinstance(ext_suffix, str) or ext_suffix[0] != ".":\n raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')")\n parts = ext_suffix.split(".")\n if len(parts) < 3:\n # CPython3.7 and earlier uses ".pyd" on Windows.\n return _cpython_abis(sys.version_info[:2])\n soabi = parts[1]\n if soabi.startswith("cpython"):\n # non-windows\n abi = "cp" + soabi.split("-")[1]\n elif soabi.startswith("cp"):\n # windows\n abi = soabi.split("-")[0]\n elif soabi.startswith("pypy"):\n abi = "-".join(soabi.split("-")[:2])\n elif soabi.startswith("graalpy"):\n abi = "-".join(soabi.split("-")[:3])\n elif soabi:\n # pyston, ironpython, others?\n abi = soabi\n else:\n return []\n return [_normalize_string(abi)]\n\n\ndef generic_tags(\n interpreter: str | None = None,\n abis: Iterable[str] | None = None,\n platforms: Iterable[str] | None = None,\n *,\n warn: bool = False,\n) -> Iterator[Tag]:\n """\n Yields the tags for a generic interpreter.\n\n The tags consist of:\n - <interpreter>-<abi>-<platform>\n\n The "none" ABI will be added if it was not explicitly provided.\n """\n if not interpreter:\n interp_name = interpreter_name()\n interp_version = interpreter_version(warn=warn)\n interpreter = "".join([interp_name, interp_version])\n if abis is None:\n abis = _generic_abi()\n else:\n abis = list(abis)\n platforms = list(platforms or platform_tags())\n if "none" not in abis:\n abis.append("none")\n for abi in abis:\n for platform_ in platforms:\n yield Tag(interpreter, abi, platform_)\n\n\ndef _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:\n """\n Yields Python versions in descending order.\n\n After the latest version, the major-only version will be yielded, and then\n all previous versions of that major version.\n """\n if len(py_version) > 1:\n yield f"py{_version_nodot(py_version[:2])}"\n yield f"py{py_version[0]}"\n if len(py_version) > 1:\n for minor in range(py_version[1] - 1, -1, -1):\n yield f"py{_version_nodot((py_version[0], minor))}"\n\n\ndef compatible_tags(\n python_version: PythonVersion | None = None,\n interpreter: str | None = None,\n platforms: Iterable[str] | None = None,\n) -> Iterator[Tag]:\n """\n Yields the sequence of tags that are compatible with a specific version of Python.\n\n The tags consist of:\n - py*-none-<platform>\n - <interpreter>-none-any # ... if `interpreter` is provided.\n - py*-none-any\n """\n if not python_version:\n python_version = sys.version_info[:2]\n platforms = list(platforms or platform_tags())\n for version in _py_interpreter_range(python_version):\n for platform_ in platforms:\n yield Tag(version, "none", platform_)\n if interpreter:\n yield Tag(interpreter, "none", "any")\n for version in _py_interpreter_range(python_version):\n yield Tag(version, "none", "any")\n\n\ndef _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:\n if not is_32bit:\n return arch\n\n if arch.startswith("ppc"):\n return "ppc"\n\n return "i386"\n\n\ndef _mac_binary_formats(version: AppleVersion, cpu_arch: str) -> list[str]:\n formats = [cpu_arch]\n if cpu_arch == "x86_64":\n if version < (10, 4):\n return []\n formats.extend(["intel", "fat64", "fat32"])\n\n elif cpu_arch == "i386":\n if version < (10, 4):\n return []\n formats.extend(["intel", "fat32", "fat"])\n\n elif cpu_arch == "ppc64":\n # TODO: Need to care about 32-bit PPC for ppc64 through 10.2?\n if version > (10, 5) or version < (10, 4):\n return []\n formats.append("fat64")\n\n elif cpu_arch == "ppc":\n if version > (10, 6):\n return []\n formats.extend(["fat32", "fat"])\n\n if cpu_arch in {"arm64", "x86_64"}:\n formats.append("universal2")\n\n if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:\n formats.append("universal")\n\n return formats\n\n\ndef mac_platforms(\n version: AppleVersion | None = None, arch: str | None = None\n) -> Iterator[str]:\n """\n Yields the platform tags for a macOS system.\n\n The `version` parameter is a two-item tuple specifying the macOS version to\n generate platform tags for. The `arch` parameter is the CPU architecture to\n generate platform tags for. Both parameters default to the appropriate value\n for the current system.\n """\n version_str, _, cpu_arch = platform.mac_ver()\n if version is None:\n version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2])))\n if version == (10, 16):\n # When built against an older macOS SDK, Python will report macOS 10.16\n # instead of the real version.\n version_str = subprocess.run(\n [\n sys.executable,\n "-sS",\n "-c",\n "import platform; print(platform.mac_ver()[0])",\n ],\n check=True,\n env={"SYSTEM_VERSION_COMPAT": "0"},\n stdout=subprocess.PIPE,\n text=True,\n ).stdout\n version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2])))\n else:\n version = version\n if arch is None:\n arch = _mac_arch(cpu_arch)\n else:\n arch = arch\n\n if (10, 0) <= version and version < (11, 0):\n # Prior to Mac OS 11, each yearly release of Mac OS bumped the\n # "minor" version number. The major version was always 10.\n major_version = 10\n for minor_version in range(version[1], -1, -1):\n compat_version = major_version, minor_version\n binary_formats = _mac_binary_formats(compat_version, arch)\n for binary_format in binary_formats:\n yield f"macosx_{major_version}_{minor_version}_{binary_format}"\n\n if version >= (11, 0):\n # Starting with Mac OS 11, each yearly release bumps the major version\n # number. The minor versions are now the midyear updates.\n minor_version = 0\n for major_version in range(version[0], 10, -1):\n compat_version = major_version, minor_version\n binary_formats = _mac_binary_formats(compat_version, arch)\n for binary_format in binary_formats:\n yield f"macosx_{major_version}_{minor_version}_{binary_format}"\n\n if version >= (11, 0):\n # Mac OS 11 on x86_64 is compatible with binaries from previous releases.\n # Arm64 support was introduced in 11.0, so no Arm binaries from previous\n # releases exist.\n #\n # However, the "universal2" binary format can have a\n # macOS version earlier than 11.0 when the x86_64 part of the binary supports\n # that version of macOS.\n major_version = 10\n if arch == "x86_64":\n for minor_version in range(16, 3, -1):\n compat_version = major_version, minor_version\n binary_formats = _mac_binary_formats(compat_version, arch)\n for binary_format in binary_formats:\n yield f"macosx_{major_version}_{minor_version}_{binary_format}"\n else:\n for minor_version in range(16, 3, -1):\n compat_version = major_version, minor_version\n binary_format = "universal2"\n yield f"macosx_{major_version}_{minor_version}_{binary_format}"\n\n\ndef ios_platforms(\n version: AppleVersion | None = None, multiarch: str | None = None\n) -> Iterator[str]:\n """\n Yields the platform tags for an iOS system.\n\n :param version: A two-item tuple specifying the iOS version to generate\n platform tags for. Defaults to the current iOS version.\n :param multiarch: The CPU architecture+ABI to generate platform tags for -\n (the value used by `sys.implementation._multiarch` e.g.,\n `arm64_iphoneos` or `x84_64_iphonesimulator`). Defaults to the current\n multiarch value.\n """\n if version is None:\n # if iOS is the current platform, ios_ver *must* be defined. However,\n # it won't exist for CPython versions before 3.13, which causes a mypy\n # error.\n _, release, _, _ = platform.ios_ver() # type: ignore[attr-defined, unused-ignore]\n version = cast("AppleVersion", tuple(map(int, release.split(".")[:2])))\n\n if multiarch is None:\n multiarch = sys.implementation._multiarch\n multiarch = multiarch.replace("-", "_")\n\n ios_platform_template = "ios_{major}_{minor}_{multiarch}"\n\n # Consider any iOS major.minor version from the version requested, down to\n # 12.0. 12.0 is the first iOS version that is known to have enough features\n # to support CPython. Consider every possible minor release up to X.9. There\n # highest the minor has ever gone is 8 (14.8 and 15.8) but having some extra\n # candidates that won't ever match doesn't really hurt, and it saves us from\n # having to keep an explicit list of known iOS versions in the code. Return\n # the results descending order of version number.\n\n # If the requested major version is less than 12, there won't be any matches.\n if version[0] < 12:\n return\n\n # Consider the actual X.Y version that was requested.\n yield ios_platform_template.format(\n major=version[0], minor=version[1], multiarch=multiarch\n )\n\n # Consider every minor version from X.0 to the minor version prior to the\n # version requested by the platform.\n for minor in range(version[1] - 1, -1, -1):\n yield ios_platform_template.format(\n major=version[0], minor=minor, multiarch=multiarch\n )\n\n for major in range(version[0] - 1, 11, -1):\n for minor in range(9, -1, -1):\n yield ios_platform_template.format(\n major=major, minor=minor, multiarch=multiarch\n )\n\n\ndef android_platforms(\n api_level: int | None = None, abi: str | None = None\n) -> Iterator[str]:\n """\n Yields the :attr:`~Tag.platform` tags for Android. If this function is invoked on\n non-Android platforms, the ``api_level`` and ``abi`` arguments are required.\n\n :param int api_level: The maximum `API level\n <https://developer.android.com/tools/releases/platforms>`__ to return. Defaults\n to the current system's version, as returned by ``platform.android_ver``.\n :param str abi: The `Android ABI <https://developer.android.com/ndk/guides/abis>`__,\n e.g. ``arm64_v8a``. Defaults to the current system's ABI , as returned by\n ``sysconfig.get_platform``. Hyphens and periods will be replaced with\n underscores.\n """\n if platform.system() != "Android" and (api_level is None or abi is None):\n raise TypeError(\n "on non-Android platforms, the api_level and abi arguments are required"\n )\n\n if api_level is None:\n # Python 3.13 was the first version to return platform.system() == "Android",\n # and also the first version to define platform.android_ver().\n api_level = platform.android_ver().api_level # type: ignore[attr-defined]\n\n if abi is None:\n abi = sysconfig.get_platform().split("-")[-1]\n abi = _normalize_string(abi)\n\n # 16 is the minimum API level known to have enough features to support CPython\n # without major patching. Yield every API level from the maximum down to the\n # minimum, inclusive.\n min_api_level = 16\n for ver in range(api_level, min_api_level - 1, -1):\n yield f"android_{ver}_{abi}"\n\n\ndef _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:\n linux = _normalize_string(sysconfig.get_platform())\n if not linux.startswith("linux_"):\n # we should never be here, just yield the sysconfig one and return\n yield linux\n return\n if is_32bit:\n if linux == "linux_x86_64":\n linux = "linux_i686"\n elif linux == "linux_aarch64":\n linux = "linux_armv8l"\n _, arch = linux.split("_", 1)\n archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch])\n yield from _manylinux.platform_tags(archs)\n yield from _musllinux.platform_tags(archs)\n for arch in archs:\n yield f"linux_{arch}"\n\n\ndef _generic_platforms() -> Iterator[str]:\n yield _normalize_string(sysconfig.get_platform())\n\n\ndef platform_tags() -> Iterator[str]:\n """\n Provides the platform tags for this installation.\n """\n if platform.system() == "Darwin":\n return mac_platforms()\n elif platform.system() == "iOS":\n return ios_platforms()\n elif platform.system() == "Android":\n return android_platforms()\n elif platform.system() == "Linux":\n return _linux_platforms()\n else:\n return _generic_platforms()\n\n\ndef interpreter_name() -> str:\n """\n Returns the name of the running interpreter.\n\n Some implementations have a reserved, two-letter abbreviation which will\n be returned when appropriate.\n """\n name = sys.implementation.name\n return INTERPRETER_SHORT_NAMES.get(name) or name\n\n\ndef interpreter_version(*, warn: bool = False) -> str:\n """\n Returns the version of the running interpreter.\n """\n version = _get_config_var("py_version_nodot", warn=warn)\n if version:\n version = str(version)\n else:\n version = _version_nodot(sys.version_info[:2])\n return version\n\n\ndef _version_nodot(version: PythonVersion) -> str:\n return "".join(map(str, version))\n\n\ndef sys_tags(*, warn: bool = False) -> Iterator[Tag]:\n """\n Returns the sequence of tag triples for the running interpreter.\n\n The order of the sequence corresponds to priority order for the\n interpreter, from most to least important.\n """\n\n interp_name = interpreter_name()\n if interp_name == "cp":\n yield from cpython_tags(warn=warn)\n else:\n yield from generic_tags()\n\n if interp_name == "pp":\n interp = "pp3"\n elif interp_name == "cp":\n interp = "cp" + interpreter_version(warn=warn)\n else:\n interp = None\n yield from compatible_tags(interpreter=interp)\n
.venv\Lib\site-packages\packaging\tags.py
tags.py
Python
22,745
0.95
0.217988
0.119266
node-utils
601
2025-03-01T23:02:10.762609
BSD-3-Clause
false
343d067796e4b905805026a1740edf57
# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n\nfrom __future__ import annotations\n\nimport functools\nimport re\nfrom typing import NewType, Tuple, Union, cast\n\nfrom .tags import Tag, parse_tag\nfrom .version import InvalidVersion, Version, _TrimmedRelease\n\nBuildTag = Union[Tuple[()], Tuple[int, str]]\nNormalizedName = NewType("NormalizedName", str)\n\n\nclass InvalidName(ValueError):\n """\n An invalid distribution name; users should refer to the packaging user guide.\n """\n\n\nclass InvalidWheelFilename(ValueError):\n """\n An invalid wheel filename was found, users should refer to PEP 427.\n """\n\n\nclass InvalidSdistFilename(ValueError):\n """\n An invalid sdist filename was found, users should refer to the packaging user guide.\n """\n\n\n# Core metadata spec for `Name`\n_validate_regex = re.compile(\n r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE\n)\n_canonicalize_regex = re.compile(r"[-_.]+")\n_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")\n# PEP 427: The build number must start with a digit.\n_build_tag_regex = re.compile(r"(\d+)(.*)")\n\n\ndef canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:\n if validate and not _validate_regex.match(name):\n raise InvalidName(f"name is invalid: {name!r}")\n # This is taken from PEP 503.\n value = _canonicalize_regex.sub("-", name).lower()\n return cast(NormalizedName, value)\n\n\ndef is_normalized_name(name: str) -> bool:\n return _normalized_regex.match(name) is not None\n\n\n@functools.singledispatch\ndef canonicalize_version(\n version: Version | str, *, strip_trailing_zero: bool = True\n) -> str:\n """\n Return a canonical form of a version as a string.\n\n >>> canonicalize_version('1.0.1')\n '1.0.1'\n\n Per PEP 625, versions may have multiple canonical forms, differing\n only by trailing zeros.\n\n >>> canonicalize_version('1.0.0')\n '1'\n >>> canonicalize_version('1.0.0', strip_trailing_zero=False)\n '1.0.0'\n\n Invalid versions are returned unaltered.\n\n >>> canonicalize_version('foo bar baz')\n 'foo bar baz'\n """\n return str(_TrimmedRelease(str(version)) if strip_trailing_zero else version)\n\n\n@canonicalize_version.register\ndef _(version: str, *, strip_trailing_zero: bool = True) -> str:\n try:\n parsed = Version(version)\n except InvalidVersion:\n # Legacy versions cannot be normalized\n return version\n return canonicalize_version(parsed, strip_trailing_zero=strip_trailing_zero)\n\n\ndef parse_wheel_filename(\n filename: str,\n) -> tuple[NormalizedName, Version, BuildTag, frozenset[Tag]]:\n if not filename.endswith(".whl"):\n raise InvalidWheelFilename(\n f"Invalid wheel filename (extension must be '.whl'): {filename!r}"\n )\n\n filename = filename[:-4]\n dashes = filename.count("-")\n if dashes not in (4, 5):\n raise InvalidWheelFilename(\n f"Invalid wheel filename (wrong number of parts): {filename!r}"\n )\n\n parts = filename.split("-", dashes - 2)\n name_part = parts[0]\n # See PEP 427 for the rules on escaping the project name.\n if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:\n raise InvalidWheelFilename(f"Invalid project name: {filename!r}")\n name = canonicalize_name(name_part)\n\n try:\n version = Version(parts[1])\n except InvalidVersion as e:\n raise InvalidWheelFilename(\n f"Invalid wheel filename (invalid version): {filename!r}"\n ) from e\n\n if dashes == 5:\n build_part = parts[2]\n build_match = _build_tag_regex.match(build_part)\n if build_match is None:\n raise InvalidWheelFilename(\n f"Invalid build number: {build_part} in {filename!r}"\n )\n build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))\n else:\n build = ()\n tags = parse_tag(parts[-1])\n return (name, version, build, tags)\n\n\ndef parse_sdist_filename(filename: str) -> tuple[NormalizedName, Version]:\n if filename.endswith(".tar.gz"):\n file_stem = filename[: -len(".tar.gz")]\n elif filename.endswith(".zip"):\n file_stem = filename[: -len(".zip")]\n else:\n raise InvalidSdistFilename(\n f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"\n f" {filename!r}"\n )\n\n # We are requiring a PEP 440 version, which cannot contain dashes,\n # so we split on the last dash.\n name_part, sep, version_part = file_stem.rpartition("-")\n if not sep:\n raise InvalidSdistFilename(f"Invalid sdist filename: {filename!r}")\n\n name = canonicalize_name(name_part)\n\n try:\n version = Version(version_part)\n except InvalidVersion as e:\n raise InvalidSdistFilename(\n f"Invalid sdist filename (invalid version): {filename!r}"\n ) from e\n\n return (name, version)\n
.venv\Lib\site-packages\packaging\utils.py
utils.py
Python
5,050
0.95
0.147239
0.079365
python-kit
142
2024-08-10T19:23:33.505245
BSD-3-Clause
false
f6d73a168977560761887d65c7e9ed18
# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n"""\n.. testsetup::\n\n from packaging.version import parse, Version\n"""\n\nfrom __future__ import annotations\n\nimport itertools\nimport re\nfrom typing import Any, Callable, NamedTuple, SupportsInt, Tuple, Union\n\nfrom ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType\n\n__all__ = ["VERSION_PATTERN", "InvalidVersion", "Version", "parse"]\n\nLocalType = Tuple[Union[int, str], ...]\n\nCmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]]\nCmpLocalType = Union[\n NegativeInfinityType,\n Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...],\n]\nCmpKey = Tuple[\n int,\n Tuple[int, ...],\n CmpPrePostDevType,\n CmpPrePostDevType,\n CmpPrePostDevType,\n CmpLocalType,\n]\nVersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]\n\n\nclass _Version(NamedTuple):\n epoch: int\n release: tuple[int, ...]\n dev: tuple[str, int] | None\n pre: tuple[str, int] | None\n post: tuple[str, int] | None\n local: LocalType | None\n\n\ndef parse(version: str) -> Version:\n """Parse the given version string.\n\n >>> parse('1.0.dev1')\n <Version('1.0.dev1')>\n\n :param version: The version string to parse.\n :raises InvalidVersion: When the version string is not a valid version.\n """\n return Version(version)\n\n\nclass InvalidVersion(ValueError):\n """Raised when a version string is not a valid version.\n\n >>> Version("invalid")\n Traceback (most recent call last):\n ...\n packaging.version.InvalidVersion: Invalid version: 'invalid'\n """\n\n\nclass _BaseVersion:\n _key: tuple[Any, ...]\n\n def __hash__(self) -> int:\n return hash(self._key)\n\n # Please keep the duplicated `isinstance` check\n # in the six comparisons hereunder\n # unless you find a way to avoid adding overhead function calls.\n def __lt__(self, other: _BaseVersion) -> bool:\n if not isinstance(other, _BaseVersion):\n return NotImplemented\n\n return self._key < other._key\n\n def __le__(self, other: _BaseVersion) -> bool:\n if not isinstance(other, _BaseVersion):\n return NotImplemented\n\n return self._key <= other._key\n\n def __eq__(self, other: object) -> bool:\n if not isinstance(other, _BaseVersion):\n return NotImplemented\n\n return self._key == other._key\n\n def __ge__(self, other: _BaseVersion) -> bool:\n if not isinstance(other, _BaseVersion):\n return NotImplemented\n\n return self._key >= other._key\n\n def __gt__(self, other: _BaseVersion) -> bool:\n if not isinstance(other, _BaseVersion):\n return NotImplemented\n\n return self._key > other._key\n\n def __ne__(self, other: object) -> bool:\n if not isinstance(other, _BaseVersion):\n return NotImplemented\n\n return self._key != other._key\n\n\n# Deliberately not anchored to the start and end of the string, to make it\n# easier for 3rd party code to reuse\n_VERSION_PATTERN = r"""\n v?\n (?:\n (?:(?P<epoch>[0-9]+)!)? # epoch\n (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment\n (?P<pre> # pre-release\n [-_\.]?\n (?P<pre_l>alpha|a|beta|b|preview|pre|c|rc)\n [-_\.]?\n (?P<pre_n>[0-9]+)?\n )?\n (?P<post> # post release\n (?:-(?P<post_n1>[0-9]+))\n |\n (?:\n [-_\.]?\n (?P<post_l>post|rev|r)\n [-_\.]?\n (?P<post_n2>[0-9]+)?\n )\n )?\n (?P<dev> # dev release\n [-_\.]?\n (?P<dev_l>dev)\n [-_\.]?\n (?P<dev_n>[0-9]+)?\n )?\n )\n (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version\n"""\n\nVERSION_PATTERN = _VERSION_PATTERN\n"""\nA string containing the regular expression used to match a valid version.\n\nThe pattern is not anchored at either end, and is intended for embedding in larger\nexpressions (for example, matching a version number as part of a file name). The\nregular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``\nflags set.\n\n:meta hide-value:\n"""\n\n\nclass Version(_BaseVersion):\n """This class abstracts handling of a project's versions.\n\n A :class:`Version` instance is comparison aware and can be compared and\n sorted using the standard Python interfaces.\n\n >>> v1 = Version("1.0a5")\n >>> v2 = Version("1.0")\n >>> v1\n <Version('1.0a5')>\n >>> v2\n <Version('1.0')>\n >>> v1 < v2\n True\n >>> v1 == v2\n False\n >>> v1 > v2\n False\n >>> v1 >= v2\n False\n >>> v1 <= v2\n True\n """\n\n _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)\n _key: CmpKey\n\n def __init__(self, version: str) -> None:\n """Initialize a Version object.\n\n :param version:\n The string representation of a version which will be parsed and normalized\n before use.\n :raises InvalidVersion:\n If the ``version`` does not conform to PEP 440 in any way then this\n exception will be raised.\n """\n\n # Validate the version and parse it into pieces\n match = self._regex.search(version)\n if not match:\n raise InvalidVersion(f"Invalid version: {version!r}")\n\n # Store the parsed out pieces of the version\n self._version = _Version(\n epoch=int(match.group("epoch")) if match.group("epoch") else 0,\n release=tuple(int(i) for i in match.group("release").split(".")),\n pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),\n post=_parse_letter_version(\n match.group("post_l"), match.group("post_n1") or match.group("post_n2")\n ),\n dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),\n local=_parse_local_version(match.group("local")),\n )\n\n # Generate a key which will be used for sorting\n self._key = _cmpkey(\n self._version.epoch,\n self._version.release,\n self._version.pre,\n self._version.post,\n self._version.dev,\n self._version.local,\n )\n\n def __repr__(self) -> str:\n """A representation of the Version that shows all internal state.\n\n >>> Version('1.0.0')\n <Version('1.0.0')>\n """\n return f"<Version('{self}')>"\n\n def __str__(self) -> str:\n """A string representation of the version that can be round-tripped.\n\n >>> str(Version("1.0a5"))\n '1.0a5'\n """\n parts = []\n\n # Epoch\n if self.epoch != 0:\n parts.append(f"{self.epoch}!")\n\n # Release segment\n parts.append(".".join(str(x) for x in self.release))\n\n # Pre-release\n if self.pre is not None:\n parts.append("".join(str(x) for x in self.pre))\n\n # Post-release\n if self.post is not None:\n parts.append(f".post{self.post}")\n\n # Development release\n if self.dev is not None:\n parts.append(f".dev{self.dev}")\n\n # Local version segment\n if self.local is not None:\n parts.append(f"+{self.local}")\n\n return "".join(parts)\n\n @property\n def epoch(self) -> int:\n """The epoch of the version.\n\n >>> Version("2.0.0").epoch\n 0\n >>> Version("1!2.0.0").epoch\n 1\n """\n return self._version.epoch\n\n @property\n def release(self) -> tuple[int, ...]:\n """The components of the "release" segment of the version.\n\n >>> Version("1.2.3").release\n (1, 2, 3)\n >>> Version("2.0.0").release\n (2, 0, 0)\n >>> Version("1!2.0.0.post0").release\n (2, 0, 0)\n\n Includes trailing zeroes but not the epoch or any pre-release / development /\n post-release suffixes.\n """\n return self._version.release\n\n @property\n def pre(self) -> tuple[str, int] | None:\n """The pre-release segment of the version.\n\n >>> print(Version("1.2.3").pre)\n None\n >>> Version("1.2.3a1").pre\n ('a', 1)\n >>> Version("1.2.3b1").pre\n ('b', 1)\n >>> Version("1.2.3rc1").pre\n ('rc', 1)\n """\n return self._version.pre\n\n @property\n def post(self) -> int | None:\n """The post-release number of the version.\n\n >>> print(Version("1.2.3").post)\n None\n >>> Version("1.2.3.post1").post\n 1\n """\n return self._version.post[1] if self._version.post else None\n\n @property\n def dev(self) -> int | None:\n """The development number of the version.\n\n >>> print(Version("1.2.3").dev)\n None\n >>> Version("1.2.3.dev1").dev\n 1\n """\n return self._version.dev[1] if self._version.dev else None\n\n @property\n def local(self) -> str | None:\n """The local version segment of the version.\n\n >>> print(Version("1.2.3").local)\n None\n >>> Version("1.2.3+abc").local\n 'abc'\n """\n if self._version.local:\n return ".".join(str(x) for x in self._version.local)\n else:\n return None\n\n @property\n def public(self) -> str:\n """The public portion of the version.\n\n >>> Version("1.2.3").public\n '1.2.3'\n >>> Version("1.2.3+abc").public\n '1.2.3'\n >>> Version("1!1.2.3dev1+abc").public\n '1!1.2.3.dev1'\n """\n return str(self).split("+", 1)[0]\n\n @property\n def base_version(self) -> str:\n """The "base version" of the version.\n\n >>> Version("1.2.3").base_version\n '1.2.3'\n >>> Version("1.2.3+abc").base_version\n '1.2.3'\n >>> Version("1!1.2.3dev1+abc").base_version\n '1!1.2.3'\n\n The "base version" is the public version of the project without any pre or post\n release markers.\n """\n parts = []\n\n # Epoch\n if self.epoch != 0:\n parts.append(f"{self.epoch}!")\n\n # Release segment\n parts.append(".".join(str(x) for x in self.release))\n\n return "".join(parts)\n\n @property\n def is_prerelease(self) -> bool:\n """Whether this version is a pre-release.\n\n >>> Version("1.2.3").is_prerelease\n False\n >>> Version("1.2.3a1").is_prerelease\n True\n >>> Version("1.2.3b1").is_prerelease\n True\n >>> Version("1.2.3rc1").is_prerelease\n True\n >>> Version("1.2.3dev1").is_prerelease\n True\n """\n return self.dev is not None or self.pre is not None\n\n @property\n def is_postrelease(self) -> bool:\n """Whether this version is a post-release.\n\n >>> Version("1.2.3").is_postrelease\n False\n >>> Version("1.2.3.post1").is_postrelease\n True\n """\n return self.post is not None\n\n @property\n def is_devrelease(self) -> bool:\n """Whether this version is a development release.\n\n >>> Version("1.2.3").is_devrelease\n False\n >>> Version("1.2.3.dev1").is_devrelease\n True\n """\n return self.dev is not None\n\n @property\n def major(self) -> int:\n """The first item of :attr:`release` or ``0`` if unavailable.\n\n >>> Version("1.2.3").major\n 1\n """\n return self.release[0] if len(self.release) >= 1 else 0\n\n @property\n def minor(self) -> int:\n """The second item of :attr:`release` or ``0`` if unavailable.\n\n >>> Version("1.2.3").minor\n 2\n >>> Version("1").minor\n 0\n """\n return self.release[1] if len(self.release) >= 2 else 0\n\n @property\n def micro(self) -> int:\n """The third item of :attr:`release` or ``0`` if unavailable.\n\n >>> Version("1.2.3").micro\n 3\n >>> Version("1").micro\n 0\n """\n return self.release[2] if len(self.release) >= 3 else 0\n\n\nclass _TrimmedRelease(Version):\n @property\n def release(self) -> tuple[int, ...]:\n """\n Release segment without any trailing zeros.\n\n >>> _TrimmedRelease('1.0.0').release\n (1,)\n >>> _TrimmedRelease('0.0').release\n (0,)\n """\n rel = super().release\n nonzeros = (index for index, val in enumerate(rel) if val)\n last_nonzero = max(nonzeros, default=0)\n return rel[: last_nonzero + 1]\n\n\ndef _parse_letter_version(\n letter: str | None, number: str | bytes | SupportsInt | None\n) -> tuple[str, int] | None:\n if letter:\n # We consider there to be an implicit 0 in a pre-release if there is\n # not a numeral associated with it.\n if number is None:\n number = 0\n\n # We normalize any letters to their lower case form\n letter = letter.lower()\n\n # We consider some words to be alternate spellings of other words and\n # in those cases we want to normalize the spellings to our preferred\n # spelling.\n if letter == "alpha":\n letter = "a"\n elif letter == "beta":\n letter = "b"\n elif letter in ["c", "pre", "preview"]:\n letter = "rc"\n elif letter in ["rev", "r"]:\n letter = "post"\n\n return letter, int(number)\n\n assert not letter\n if number:\n # We assume if we are given a number, but we are not given a letter\n # then this is using the implicit post release syntax (e.g. 1.0-1)\n letter = "post"\n\n return letter, int(number)\n\n return None\n\n\n_local_version_separators = re.compile(r"[\._-]")\n\n\ndef _parse_local_version(local: str | None) -> LocalType | None:\n """\n Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").\n """\n if local is not None:\n return tuple(\n part.lower() if not part.isdigit() else int(part)\n for part in _local_version_separators.split(local)\n )\n return None\n\n\ndef _cmpkey(\n epoch: int,\n release: tuple[int, ...],\n pre: tuple[str, int] | None,\n post: tuple[str, int] | None,\n dev: tuple[str, int] | None,\n local: LocalType | None,\n) -> CmpKey:\n # When we compare a release version, we want to compare it with all of the\n # trailing zeros removed. So we'll use a reverse the list, drop all the now\n # leading zeros until we come to something non zero, then take the rest\n # re-reverse it back into the correct order and make it a tuple and use\n # that for our sorting key.\n _release = tuple(\n reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))\n )\n\n # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.\n # We'll do this by abusing the pre segment, but we _only_ want to do this\n # if there is not a pre or a post segment. If we have one of those then\n # the normal sorting rules will handle this case correctly.\n if pre is None and post is None and dev is not None:\n _pre: CmpPrePostDevType = NegativeInfinity\n # Versions without a pre-release (except as noted above) should sort after\n # those with one.\n elif pre is None:\n _pre = Infinity\n else:\n _pre = pre\n\n # Versions without a post segment should sort before those with one.\n if post is None:\n _post: CmpPrePostDevType = NegativeInfinity\n\n else:\n _post = post\n\n # Versions without a development segment should sort after those with one.\n if dev is None:\n _dev: CmpPrePostDevType = Infinity\n\n else:\n _dev = dev\n\n if local is None:\n # Versions without a local segment should sort before those with one.\n _local: CmpLocalType = NegativeInfinity\n else:\n # Versions with a local segment need that segment parsed to implement\n # the sorting rules in PEP440.\n # - Alpha numeric segments sort before numeric segments\n # - Alpha numeric segments sort lexicographically\n # - Numeric segments sort numerically\n # - Shorter versions sort before longer versions when the prefixes\n # match exactly\n _local = tuple(\n (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local\n )\n\n return epoch, _release, _pre, _post, _dev, _local\n
.venv\Lib\site-packages\packaging\version.py
version.py
Python
16,676
0.95
0.152921
0.102564
vue-tools
812
2023-07-15T08:24:19.101312
GPL-3.0
false
fa56706c3c4b493a029a0069d9601e0e
"""\nELF file parser.\n\nThis provides a class ``ELFFile`` that parses an ELF executable in a similar\ninterface to ``ZipFile``. Only the read interface is implemented.\n\nBased on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca\nELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html\n"""\n\nfrom __future__ import annotations\n\nimport enum\nimport os\nimport struct\nfrom typing import IO\n\n\nclass ELFInvalid(ValueError):\n pass\n\n\nclass EIClass(enum.IntEnum):\n C32 = 1\n C64 = 2\n\n\nclass EIData(enum.IntEnum):\n Lsb = 1\n Msb = 2\n\n\nclass EMachine(enum.IntEnum):\n I386 = 3\n S390 = 22\n Arm = 40\n X8664 = 62\n AArc64 = 183\n\n\nclass ELFFile:\n """\n Representation of an ELF executable.\n """\n\n def __init__(self, f: IO[bytes]) -> None:\n self._f = f\n\n try:\n ident = self._read("16B")\n except struct.error as e:\n raise ELFInvalid("unable to parse identification") from e\n magic = bytes(ident[:4])\n if magic != b"\x7fELF":\n raise ELFInvalid(f"invalid magic: {magic!r}")\n\n self.capacity = ident[4] # Format for program header (bitness).\n self.encoding = ident[5] # Data structure encoding (endianness).\n\n try:\n # e_fmt: Format for program header.\n # p_fmt: Format for section header.\n # p_idx: Indexes to find p_type, p_offset, and p_filesz.\n e_fmt, self._p_fmt, self._p_idx = {\n (1, 1): ("<HHIIIIIHHH", "<IIIIIIII", (0, 1, 4)), # 32-bit LSB.\n (1, 2): (">HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB.\n (2, 1): ("<HHIQQQIHHH", "<IIQQQQQQ", (0, 2, 5)), # 64-bit LSB.\n (2, 2): (">HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB.\n }[(self.capacity, self.encoding)]\n except KeyError as e:\n raise ELFInvalid(\n f"unrecognized capacity ({self.capacity}) or encoding ({self.encoding})"\n ) from e\n\n try:\n (\n _,\n self.machine, # Architecture type.\n _,\n _,\n self._e_phoff, # Offset of program header.\n _,\n self.flags, # Processor-specific flags.\n _,\n self._e_phentsize, # Size of section.\n self._e_phnum, # Number of sections.\n ) = self._read(e_fmt)\n except struct.error as e:\n raise ELFInvalid("unable to parse machine and section information") from e\n\n def _read(self, fmt: str) -> tuple[int, ...]:\n return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))\n\n @property\n def interpreter(self) -> str | None:\n """\n The path recorded in the ``PT_INTERP`` section header.\n """\n for index in range(self._e_phnum):\n self._f.seek(self._e_phoff + self._e_phentsize * index)\n try:\n data = self._read(self._p_fmt)\n except struct.error:\n continue\n if data[self._p_idx[0]] != 3: # Not PT_INTERP.\n continue\n self._f.seek(data[self._p_idx[1]])\n return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0")\n return None\n
.venv\Lib\site-packages\packaging\_elffile.py
_elffile.py
Python
3,286
0.95
0.174312
0.034091
node-utils
343
2024-03-24T23:07:05.431809
MIT
false
e83ac3c80a6482b83578c3ef6cfed4b9
from __future__ import annotations\n\nimport collections\nimport contextlib\nimport functools\nimport os\nimport re\nimport sys\nimport warnings\nfrom typing import Generator, Iterator, NamedTuple, Sequence\n\nfrom ._elffile import EIClass, EIData, ELFFile, EMachine\n\nEF_ARM_ABIMASK = 0xFF000000\nEF_ARM_ABI_VER5 = 0x05000000\nEF_ARM_ABI_FLOAT_HARD = 0x00000400\n\n\n# `os.PathLike` not a generic type until Python 3.9, so sticking with `str`\n# as the type for `path` until then.\n@contextlib.contextmanager\ndef _parse_elf(path: str) -> Generator[ELFFile | None, None, None]:\n try:\n with open(path, "rb") as f:\n yield ELFFile(f)\n except (OSError, TypeError, ValueError):\n yield None\n\n\ndef _is_linux_armhf(executable: str) -> bool:\n # hard-float ABI can be detected from the ELF header of the running\n # process\n # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf\n with _parse_elf(executable) as f:\n return (\n f is not None\n and f.capacity == EIClass.C32\n and f.encoding == EIData.Lsb\n and f.machine == EMachine.Arm\n and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5\n and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD\n )\n\n\ndef _is_linux_i686(executable: str) -> bool:\n with _parse_elf(executable) as f:\n return (\n f is not None\n and f.capacity == EIClass.C32\n and f.encoding == EIData.Lsb\n and f.machine == EMachine.I386\n )\n\n\ndef _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:\n if "armv7l" in archs:\n return _is_linux_armhf(executable)\n if "i686" in archs:\n return _is_linux_i686(executable)\n allowed_archs = {\n "x86_64",\n "aarch64",\n "ppc64",\n "ppc64le",\n "s390x",\n "loongarch64",\n "riscv64",\n }\n return any(arch in allowed_archs for arch in archs)\n\n\n# If glibc ever changes its major version, we need to know what the last\n# minor version was, so we can build the complete list of all versions.\n# For now, guess what the highest minor version might be, assume it will\n# be 50 for testing. Once this actually happens, update the dictionary\n# with the actual value.\n_LAST_GLIBC_MINOR: dict[int, int] = collections.defaultdict(lambda: 50)\n\n\nclass _GLibCVersion(NamedTuple):\n major: int\n minor: int\n\n\ndef _glibc_version_string_confstr() -> str | None:\n """\n Primary implementation of glibc_version_string using os.confstr.\n """\n # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely\n # to be broken or missing. This strategy is used in the standard library\n # platform module.\n # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183\n try:\n # Should be a string like "glibc 2.17".\n version_string: str | None = os.confstr("CS_GNU_LIBC_VERSION")\n assert version_string is not None\n _, version = version_string.rsplit()\n except (AssertionError, AttributeError, OSError, ValueError):\n # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...\n return None\n return version\n\n\ndef _glibc_version_string_ctypes() -> str | None:\n """\n Fallback implementation of glibc_version_string using ctypes.\n """\n try:\n import ctypes\n except ImportError:\n return None\n\n # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen\n # manpage says, "If filename is NULL, then the returned handle is for the\n # main program". This way we can let the linker do the work to figure out\n # which libc our process is actually using.\n #\n # We must also handle the special case where the executable is not a\n # dynamically linked executable. This can occur when using musl libc,\n # for example. In this situation, dlopen() will error, leading to an\n # OSError. Interestingly, at least in the case of musl, there is no\n # errno set on the OSError. The single string argument used to construct\n # OSError comes from libc itself and is therefore not portable to\n # hard code here. In any case, failure to call dlopen() means we\n # can proceed, so we bail on our attempt.\n try:\n process_namespace = ctypes.CDLL(None)\n except OSError:\n return None\n\n try:\n gnu_get_libc_version = process_namespace.gnu_get_libc_version\n except AttributeError:\n # Symbol doesn't exist -> therefore, we are not linked to\n # glibc.\n return None\n\n # Call gnu_get_libc_version, which returns a string like "2.5"\n gnu_get_libc_version.restype = ctypes.c_char_p\n version_str: str = gnu_get_libc_version()\n # py2 / py3 compatibility:\n if not isinstance(version_str, str):\n version_str = version_str.decode("ascii")\n\n return version_str\n\n\ndef _glibc_version_string() -> str | None:\n """Returns glibc version string, or None if not using glibc."""\n return _glibc_version_string_confstr() or _glibc_version_string_ctypes()\n\n\ndef _parse_glibc_version(version_str: str) -> tuple[int, int]:\n """Parse glibc version.\n\n We use a regexp instead of str.split because we want to discard any\n random junk that might come after the minor version -- this might happen\n in patched/forked versions of glibc (e.g. Linaro's version of glibc\n uses version strings like "2.20-2014.11"). See gh-3588.\n """\n m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)\n if not m:\n warnings.warn(\n f"Expected glibc version with 2 components major.minor, got: {version_str}",\n RuntimeWarning,\n stacklevel=2,\n )\n return -1, -1\n return int(m.group("major")), int(m.group("minor"))\n\n\n@functools.lru_cache\ndef _get_glibc_version() -> tuple[int, int]:\n version_str = _glibc_version_string()\n if version_str is None:\n return (-1, -1)\n return _parse_glibc_version(version_str)\n\n\n# From PEP 513, PEP 600\ndef _is_compatible(arch: str, version: _GLibCVersion) -> bool:\n sys_glibc = _get_glibc_version()\n if sys_glibc < version:\n return False\n # Check for presence of _manylinux module.\n try:\n import _manylinux\n except ImportError:\n return True\n if hasattr(_manylinux, "manylinux_compatible"):\n result = _manylinux.manylinux_compatible(version[0], version[1], arch)\n if result is not None:\n return bool(result)\n return True\n if version == _GLibCVersion(2, 5):\n if hasattr(_manylinux, "manylinux1_compatible"):\n return bool(_manylinux.manylinux1_compatible)\n if version == _GLibCVersion(2, 12):\n if hasattr(_manylinux, "manylinux2010_compatible"):\n return bool(_manylinux.manylinux2010_compatible)\n if version == _GLibCVersion(2, 17):\n if hasattr(_manylinux, "manylinux2014_compatible"):\n return bool(_manylinux.manylinux2014_compatible)\n return True\n\n\n_LEGACY_MANYLINUX_MAP = {\n # CentOS 7 w/ glibc 2.17 (PEP 599)\n (2, 17): "manylinux2014",\n # CentOS 6 w/ glibc 2.12 (PEP 571)\n (2, 12): "manylinux2010",\n # CentOS 5 w/ glibc 2.5 (PEP 513)\n (2, 5): "manylinux1",\n}\n\n\ndef platform_tags(archs: Sequence[str]) -> Iterator[str]:\n """Generate manylinux tags compatible to the current platform.\n\n :param archs: Sequence of compatible architectures.\n The first one shall be the closest to the actual architecture and be the part of\n platform tag after the ``linux_`` prefix, e.g. ``x86_64``.\n The ``linux_`` prefix is assumed as a prerequisite for the current platform to\n be manylinux-compatible.\n\n :returns: An iterator of compatible manylinux tags.\n """\n if not _have_compatible_abi(sys.executable, archs):\n return\n # Oldest glibc to be supported regardless of architecture is (2, 17).\n too_old_glibc2 = _GLibCVersion(2, 16)\n if set(archs) & {"x86_64", "i686"}:\n # On x86/i686 also oldest glibc to be supported is (2, 5).\n too_old_glibc2 = _GLibCVersion(2, 4)\n current_glibc = _GLibCVersion(*_get_glibc_version())\n glibc_max_list = [current_glibc]\n # We can assume compatibility across glibc major versions.\n # https://sourceware.org/bugzilla/show_bug.cgi?id=24636\n #\n # Build a list of maximum glibc versions so that we can\n # output the canonical list of all glibc from current_glibc\n # down to too_old_glibc2, including all intermediary versions.\n for glibc_major in range(current_glibc.major - 1, 1, -1):\n glibc_minor = _LAST_GLIBC_MINOR[glibc_major]\n glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))\n for arch in archs:\n for glibc_max in glibc_max_list:\n if glibc_max.major == too_old_glibc2.major:\n min_minor = too_old_glibc2.minor\n else:\n # For other glibc major versions oldest supported is (x, 0).\n min_minor = -1\n for glibc_minor in range(glibc_max.minor, min_minor, -1):\n glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)\n tag = "manylinux_{}_{}".format(*glibc_version)\n if _is_compatible(arch, glibc_version):\n yield f"{tag}_{arch}"\n # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.\n if glibc_version in _LEGACY_MANYLINUX_MAP:\n legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]\n if _is_compatible(arch, glibc_version):\n yield f"{legacy_tag}_{arch}"\n
.venv\Lib\site-packages\packaging\_manylinux.py
_manylinux.py
Python
9,596
0.95
0.19084
0.214286
awesome-app
550
2024-09-13T02:08:04.696909
MIT
false
46426bd482848dbd15d36e0807583368
"""PEP 656 support.\n\nThis module implements logic to detect if the currently running Python is\nlinked against musl, and what musl version is used.\n"""\n\nfrom __future__ import annotations\n\nimport functools\nimport re\nimport subprocess\nimport sys\nfrom typing import Iterator, NamedTuple, Sequence\n\nfrom ._elffile import ELFFile\n\n\nclass _MuslVersion(NamedTuple):\n major: int\n minor: int\n\n\ndef _parse_musl_version(output: str) -> _MuslVersion | None:\n lines = [n for n in (n.strip() for n in output.splitlines()) if n]\n if len(lines) < 2 or lines[0][:4] != "musl":\n return None\n m = re.match(r"Version (\d+)\.(\d+)", lines[1])\n if not m:\n return None\n return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))\n\n\n@functools.lru_cache\ndef _get_musl_version(executable: str) -> _MuslVersion | None:\n """Detect currently-running musl runtime version.\n\n This is done by checking the specified executable's dynamic linking\n information, and invoking the loader to parse its output for a version\n string. If the loader is musl, the output would be something like::\n\n musl libc (x86_64)\n Version 1.2.2\n Dynamic Program Loader\n """\n try:\n with open(executable, "rb") as f:\n ld = ELFFile(f).interpreter\n except (OSError, TypeError, ValueError):\n return None\n if ld is None or "musl" not in ld:\n return None\n proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)\n return _parse_musl_version(proc.stderr)\n\n\ndef platform_tags(archs: Sequence[str]) -> Iterator[str]:\n """Generate musllinux tags compatible to the current platform.\n\n :param archs: Sequence of compatible architectures.\n The first one shall be the closest to the actual architecture and be the part of\n platform tag after the ``linux_`` prefix, e.g. ``x86_64``.\n The ``linux_`` prefix is assumed as a prerequisite for the current platform to\n be musllinux-compatible.\n\n :returns: An iterator of compatible musllinux tags.\n """\n sys_musl = _get_musl_version(sys.executable)\n if sys_musl is None: # Python not dynamically linked against musl.\n return\n for arch in archs:\n for minor in range(sys_musl.minor, -1, -1):\n yield f"musllinux_{sys_musl.major}_{minor}_{arch}"\n\n\nif __name__ == "__main__": # pragma: no cover\n import sysconfig\n\n plat = sysconfig.get_platform()\n assert plat.startswith("linux-"), "not linux"\n\n print("plat:", plat)\n print("musl:", _get_musl_version(sys.executable))\n print("tags:", end=" ")\n for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):\n print(t, end="\n ")\n
.venv\Lib\site-packages\packaging\_musllinux.py
_musllinux.py
Python
2,694
0.95
0.223529
0
python-kit
114
2024-07-31T08:33:11.193661
GPL-3.0
false
d0d487bb6b89df7d122f768d8f1f2f2d
"""Handwritten parser of dependency specifiers.\n\nThe docstring for each __parse_* function contains EBNF-inspired grammar representing\nthe implementation.\n"""\n\nfrom __future__ import annotations\n\nimport ast\nfrom typing import NamedTuple, Sequence, Tuple, Union\n\nfrom ._tokenizer import DEFAULT_RULES, Tokenizer\n\n\nclass Node:\n def __init__(self, value: str) -> None:\n self.value = value\n\n def __str__(self) -> str:\n return self.value\n\n def __repr__(self) -> str:\n return f"<{self.__class__.__name__}('{self}')>"\n\n def serialize(self) -> str:\n raise NotImplementedError\n\n\nclass Variable(Node):\n def serialize(self) -> str:\n return str(self)\n\n\nclass Value(Node):\n def serialize(self) -> str:\n return f'"{self}"'\n\n\nclass Op(Node):\n def serialize(self) -> str:\n return str(self)\n\n\nMarkerVar = Union[Variable, Value]\nMarkerItem = Tuple[MarkerVar, Op, MarkerVar]\nMarkerAtom = Union[MarkerItem, Sequence["MarkerAtom"]]\nMarkerList = Sequence[Union["MarkerList", MarkerAtom, str]]\n\n\nclass ParsedRequirement(NamedTuple):\n name: str\n url: str\n extras: list[str]\n specifier: str\n marker: MarkerList | None\n\n\n# --------------------------------------------------------------------------------------\n# Recursive descent parser for dependency specifier\n# --------------------------------------------------------------------------------------\ndef parse_requirement(source: str) -> ParsedRequirement:\n return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES))\n\n\ndef _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement:\n """\n requirement = WS? IDENTIFIER WS? extras WS? requirement_details\n """\n tokenizer.consume("WS")\n\n name_token = tokenizer.expect(\n "IDENTIFIER", expected="package name at the start of dependency specifier"\n )\n name = name_token.text\n tokenizer.consume("WS")\n\n extras = _parse_extras(tokenizer)\n tokenizer.consume("WS")\n\n url, specifier, marker = _parse_requirement_details(tokenizer)\n tokenizer.expect("END", expected="end of dependency specifier")\n\n return ParsedRequirement(name, url, extras, specifier, marker)\n\n\ndef _parse_requirement_details(\n tokenizer: Tokenizer,\n) -> tuple[str, str, MarkerList | None]:\n """\n requirement_details = AT URL (WS requirement_marker?)?\n | specifier WS? (requirement_marker)?\n """\n\n specifier = ""\n url = ""\n marker = None\n\n if tokenizer.check("AT"):\n tokenizer.read()\n tokenizer.consume("WS")\n\n url_start = tokenizer.position\n url = tokenizer.expect("URL", expected="URL after @").text\n if tokenizer.check("END", peek=True):\n return (url, specifier, marker)\n\n tokenizer.expect("WS", expected="whitespace after URL")\n\n # The input might end after whitespace.\n if tokenizer.check("END", peek=True):\n return (url, specifier, marker)\n\n marker = _parse_requirement_marker(\n tokenizer, span_start=url_start, after="URL and whitespace"\n )\n else:\n specifier_start = tokenizer.position\n specifier = _parse_specifier(tokenizer)\n tokenizer.consume("WS")\n\n if tokenizer.check("END", peek=True):\n return (url, specifier, marker)\n\n marker = _parse_requirement_marker(\n tokenizer,\n span_start=specifier_start,\n after=(\n "version specifier"\n if specifier\n else "name and no valid version specifier"\n ),\n )\n\n return (url, specifier, marker)\n\n\ndef _parse_requirement_marker(\n tokenizer: Tokenizer, *, span_start: int, after: str\n) -> MarkerList:\n """\n requirement_marker = SEMICOLON marker WS?\n """\n\n if not tokenizer.check("SEMICOLON"):\n tokenizer.raise_syntax_error(\n f"Expected end or semicolon (after {after})",\n span_start=span_start,\n )\n tokenizer.read()\n\n marker = _parse_marker(tokenizer)\n tokenizer.consume("WS")\n\n return marker\n\n\ndef _parse_extras(tokenizer: Tokenizer) -> list[str]:\n """\n extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)?\n """\n if not tokenizer.check("LEFT_BRACKET", peek=True):\n return []\n\n with tokenizer.enclosing_tokens(\n "LEFT_BRACKET",\n "RIGHT_BRACKET",\n around="extras",\n ):\n tokenizer.consume("WS")\n extras = _parse_extras_list(tokenizer)\n tokenizer.consume("WS")\n\n return extras\n\n\ndef _parse_extras_list(tokenizer: Tokenizer) -> list[str]:\n """\n extras_list = identifier (wsp* ',' wsp* identifier)*\n """\n extras: list[str] = []\n\n if not tokenizer.check("IDENTIFIER"):\n return extras\n\n extras.append(tokenizer.read().text)\n\n while True:\n tokenizer.consume("WS")\n if tokenizer.check("IDENTIFIER", peek=True):\n tokenizer.raise_syntax_error("Expected comma between extra names")\n elif not tokenizer.check("COMMA"):\n break\n\n tokenizer.read()\n tokenizer.consume("WS")\n\n extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma")\n extras.append(extra_token.text)\n\n return extras\n\n\ndef _parse_specifier(tokenizer: Tokenizer) -> str:\n """\n specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS\n | WS? version_many WS?\n """\n with tokenizer.enclosing_tokens(\n "LEFT_PARENTHESIS",\n "RIGHT_PARENTHESIS",\n around="version specifier",\n ):\n tokenizer.consume("WS")\n parsed_specifiers = _parse_version_many(tokenizer)\n tokenizer.consume("WS")\n\n return parsed_specifiers\n\n\ndef _parse_version_many(tokenizer: Tokenizer) -> str:\n """\n version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)?\n """\n parsed_specifiers = ""\n while tokenizer.check("SPECIFIER"):\n span_start = tokenizer.position\n parsed_specifiers += tokenizer.read().text\n if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True):\n tokenizer.raise_syntax_error(\n ".* suffix can only be used with `==` or `!=` operators",\n span_start=span_start,\n span_end=tokenizer.position + 1,\n )\n if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True):\n tokenizer.raise_syntax_error(\n "Local version label can only be used with `==` or `!=` operators",\n span_start=span_start,\n span_end=tokenizer.position,\n )\n tokenizer.consume("WS")\n if not tokenizer.check("COMMA"):\n break\n parsed_specifiers += tokenizer.read().text\n tokenizer.consume("WS")\n\n return parsed_specifiers\n\n\n# --------------------------------------------------------------------------------------\n# Recursive descent parser for marker expression\n# --------------------------------------------------------------------------------------\ndef parse_marker(source: str) -> MarkerList:\n return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES))\n\n\ndef _parse_full_marker(tokenizer: Tokenizer) -> MarkerList:\n retval = _parse_marker(tokenizer)\n tokenizer.expect("END", expected="end of marker expression")\n return retval\n\n\ndef _parse_marker(tokenizer: Tokenizer) -> MarkerList:\n """\n marker = marker_atom (BOOLOP marker_atom)+\n """\n expression = [_parse_marker_atom(tokenizer)]\n while tokenizer.check("BOOLOP"):\n token = tokenizer.read()\n expr_right = _parse_marker_atom(tokenizer)\n expression.extend((token.text, expr_right))\n return expression\n\n\ndef _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom:\n """\n marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS?\n | WS? marker_item WS?\n """\n\n tokenizer.consume("WS")\n if tokenizer.check("LEFT_PARENTHESIS", peek=True):\n with tokenizer.enclosing_tokens(\n "LEFT_PARENTHESIS",\n "RIGHT_PARENTHESIS",\n around="marker expression",\n ):\n tokenizer.consume("WS")\n marker: MarkerAtom = _parse_marker(tokenizer)\n tokenizer.consume("WS")\n else:\n marker = _parse_marker_item(tokenizer)\n tokenizer.consume("WS")\n return marker\n\n\ndef _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem:\n """\n marker_item = WS? marker_var WS? marker_op WS? marker_var WS?\n """\n tokenizer.consume("WS")\n marker_var_left = _parse_marker_var(tokenizer)\n tokenizer.consume("WS")\n marker_op = _parse_marker_op(tokenizer)\n tokenizer.consume("WS")\n marker_var_right = _parse_marker_var(tokenizer)\n tokenizer.consume("WS")\n return (marker_var_left, marker_op, marker_var_right)\n\n\ndef _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:\n """\n marker_var = VARIABLE | QUOTED_STRING\n """\n if tokenizer.check("VARIABLE"):\n return process_env_var(tokenizer.read().text.replace(".", "_"))\n elif tokenizer.check("QUOTED_STRING"):\n return process_python_str(tokenizer.read().text)\n else:\n tokenizer.raise_syntax_error(\n message="Expected a marker variable or quoted string"\n )\n\n\ndef process_env_var(env_var: str) -> Variable:\n if env_var in ("platform_python_implementation", "python_implementation"):\n return Variable("platform_python_implementation")\n else:\n return Variable(env_var)\n\n\ndef process_python_str(python_str: str) -> Value:\n value = ast.literal_eval(python_str)\n return Value(str(value))\n\n\ndef _parse_marker_op(tokenizer: Tokenizer) -> Op:\n """\n marker_op = IN | NOT IN | OP\n """\n if tokenizer.check("IN"):\n tokenizer.read()\n return Op("in")\n elif tokenizer.check("NOT"):\n tokenizer.read()\n tokenizer.expect("WS", expected="whitespace after 'not'")\n tokenizer.expect("IN", expected="'in' after 'not'")\n return Op("not in")\n elif tokenizer.check("OP"):\n return Op(tokenizer.read().text)\n else:\n return tokenizer.raise_syntax_error(\n "Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in"\n )\n
.venv\Lib\site-packages\packaging\_parser.py
_parser.py
Python
10,221
0.95
0.147309
0.025641
react-lib
771
2024-04-29T14:06:05.379949
GPL-3.0
false
b8877d075d76fdd0aee2efa2001819a7
# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n\n\nclass InfinityType:\n def __repr__(self) -> str:\n return "Infinity"\n\n def __hash__(self) -> int:\n return hash(repr(self))\n\n def __lt__(self, other: object) -> bool:\n return False\n\n def __le__(self, other: object) -> bool:\n return False\n\n def __eq__(self, other: object) -> bool:\n return isinstance(other, self.__class__)\n\n def __gt__(self, other: object) -> bool:\n return True\n\n def __ge__(self, other: object) -> bool:\n return True\n\n def __neg__(self: object) -> "NegativeInfinityType":\n return NegativeInfinity\n\n\nInfinity = InfinityType()\n\n\nclass NegativeInfinityType:\n def __repr__(self) -> str:\n return "-Infinity"\n\n def __hash__(self) -> int:\n return hash(repr(self))\n\n def __lt__(self, other: object) -> bool:\n return True\n\n def __le__(self, other: object) -> bool:\n return True\n\n def __eq__(self, other: object) -> bool:\n return isinstance(other, self.__class__)\n\n def __gt__(self, other: object) -> bool:\n return False\n\n def __ge__(self, other: object) -> bool:\n return False\n\n def __neg__(self: object) -> InfinityType:\n return Infinity\n\n\nNegativeInfinity = NegativeInfinityType()\n
.venv\Lib\site-packages\packaging\_structures.py
_structures.py
Python
1,431
0.95
0.311475
0.076923
python-kit
644
2024-12-17T19:10:07.349910
GPL-3.0
false
de664fedc083927d3d084f416190d876
from __future__ import annotations\n\nimport contextlib\nimport re\nfrom dataclasses import dataclass\nfrom typing import Iterator, NoReturn\n\nfrom .specifiers import Specifier\n\n\n@dataclass\nclass Token:\n name: str\n text: str\n position: int\n\n\nclass ParserSyntaxError(Exception):\n """The provided source text could not be parsed correctly."""\n\n def __init__(\n self,\n message: str,\n *,\n source: str,\n span: tuple[int, int],\n ) -> None:\n self.span = span\n self.message = message\n self.source = source\n\n super().__init__()\n\n def __str__(self) -> str:\n marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^"\n return "\n ".join([self.message, self.source, marker])\n\n\nDEFAULT_RULES: dict[str, str | re.Pattern[str]] = {\n "LEFT_PARENTHESIS": r"\(",\n "RIGHT_PARENTHESIS": r"\)",\n "LEFT_BRACKET": r"\[",\n "RIGHT_BRACKET": r"\]",\n "SEMICOLON": r";",\n "COMMA": r",",\n "QUOTED_STRING": re.compile(\n r"""\n (\n ('[^']*')\n |\n ("[^"]*")\n )\n """,\n re.VERBOSE,\n ),\n "OP": r"(===|==|~=|!=|<=|>=|<|>)",\n "BOOLOP": r"\b(or|and)\b",\n "IN": r"\bin\b",\n "NOT": r"\bnot\b",\n "VARIABLE": re.compile(\n r"""\n \b(\n python_version\n |python_full_version\n |os[._]name\n |sys[._]platform\n |platform_(release|system)\n |platform[._](version|machine|python_implementation)\n |python_implementation\n |implementation_(name|version)\n |extras?\n |dependency_groups\n )\b\n """,\n re.VERBOSE,\n ),\n "SPECIFIER": re.compile(\n Specifier._operator_regex_str + Specifier._version_regex_str,\n re.VERBOSE | re.IGNORECASE,\n ),\n "AT": r"\@",\n "URL": r"[^ \t]+",\n "IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b",\n "VERSION_PREFIX_TRAIL": r"\.\*",\n "VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*",\n "WS": r"[ \t]+",\n "END": r"$",\n}\n\n\nclass Tokenizer:\n """Context-sensitive token parsing.\n\n Provides methods to examine the input stream to check whether the next token\n matches.\n """\n\n def __init__(\n self,\n source: str,\n *,\n rules: dict[str, str | re.Pattern[str]],\n ) -> None:\n self.source = source\n self.rules: dict[str, re.Pattern[str]] = {\n name: re.compile(pattern) for name, pattern in rules.items()\n }\n self.next_token: Token | None = None\n self.position = 0\n\n def consume(self, name: str) -> None:\n """Move beyond provided token name, if at current position."""\n if self.check(name):\n self.read()\n\n def check(self, name: str, *, peek: bool = False) -> bool:\n """Check whether the next token has the provided name.\n\n By default, if the check succeeds, the token *must* be read before\n another check. If `peek` is set to `True`, the token is not loaded and\n would need to be checked again.\n """\n assert self.next_token is None, (\n f"Cannot check for {name!r}, already have {self.next_token!r}"\n )\n assert name in self.rules, f"Unknown token name: {name!r}"\n\n expression = self.rules[name]\n\n match = expression.match(self.source, self.position)\n if match is None:\n return False\n if not peek:\n self.next_token = Token(name, match[0], self.position)\n return True\n\n def expect(self, name: str, *, expected: str) -> Token:\n """Expect a certain token name next, failing with a syntax error otherwise.\n\n The token is *not* read.\n """\n if not self.check(name):\n raise self.raise_syntax_error(f"Expected {expected}")\n return self.read()\n\n def read(self) -> Token:\n """Consume the next token and return it."""\n token = self.next_token\n assert token is not None\n\n self.position += len(token.text)\n self.next_token = None\n\n return token\n\n def raise_syntax_error(\n self,\n message: str,\n *,\n span_start: int | None = None,\n span_end: int | None = None,\n ) -> NoReturn:\n """Raise ParserSyntaxError at the given position."""\n span = (\n self.position if span_start is None else span_start,\n self.position if span_end is None else span_end,\n )\n raise ParserSyntaxError(\n message,\n source=self.source,\n span=span,\n )\n\n @contextlib.contextmanager\n def enclosing_tokens(\n self, open_token: str, close_token: str, *, around: str\n ) -> Iterator[None]:\n if self.check(open_token):\n open_position = self.position\n self.read()\n else:\n open_position = None\n\n yield\n\n if open_position is None:\n return\n\n if not self.check(close_token):\n self.raise_syntax_error(\n f"Expected matching {close_token} for {open_token}, after {around}",\n span_start=open_position,\n )\n\n self.read()\n
.venv\Lib\site-packages\packaging\_tokenizer.py
_tokenizer.py
Python
5,310
0.85
0.133333
0.018293
vue-tools
952
2024-09-20T17:14:45.331927
MIT
false
58bff3ae79b26a93d63f3a9429d70860
# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n\n__title__ = "packaging"\n__summary__ = "Core utilities for Python packages"\n__uri__ = "https://github.com/pypa/packaging"\n\n__version__ = "25.0"\n\n__author__ = "Donald Stufft and individual contributors"\n__email__ = "donald@stufft.io"\n\n__license__ = "BSD-2-Clause or Apache-2.0"\n__copyright__ = f"2014 {__author__}"\n
.venv\Lib\site-packages\packaging\__init__.py
__init__.py
Python
494
0.8
0.133333
0.272727
vue-tools
81
2024-12-25T12:33:48.399164
Apache-2.0
false
bb0d0797c99f155fda872b2ee324d93d
\nfrom __future__ import annotations\n\nfrom typing import TypedDict\n\nclass SPDXLicense(TypedDict):\n id: str\n deprecated: bool\n\nclass SPDXException(TypedDict):\n id: str\n deprecated: bool\n\n\nVERSION = '3.25.0'\n\nLICENSES: dict[str, SPDXLicense] = {\n '0bsd': {'id': '0BSD', 'deprecated': False},\n '3d-slicer-1.0': {'id': '3D-Slicer-1.0', 'deprecated': False},\n 'aal': {'id': 'AAL', 'deprecated': False},\n 'abstyles': {'id': 'Abstyles', 'deprecated': False},\n 'adacore-doc': {'id': 'AdaCore-doc', 'deprecated': False},\n 'adobe-2006': {'id': 'Adobe-2006', 'deprecated': False},\n 'adobe-display-postscript': {'id': 'Adobe-Display-PostScript', 'deprecated': False},\n 'adobe-glyph': {'id': 'Adobe-Glyph', 'deprecated': False},\n 'adobe-utopia': {'id': 'Adobe-Utopia', 'deprecated': False},\n 'adsl': {'id': 'ADSL', 'deprecated': False},\n 'afl-1.1': {'id': 'AFL-1.1', 'deprecated': False},\n 'afl-1.2': {'id': 'AFL-1.2', 'deprecated': False},\n 'afl-2.0': {'id': 'AFL-2.0', 'deprecated': False},\n 'afl-2.1': {'id': 'AFL-2.1', 'deprecated': False},\n 'afl-3.0': {'id': 'AFL-3.0', 'deprecated': False},\n 'afmparse': {'id': 'Afmparse', 'deprecated': False},\n 'agpl-1.0': {'id': 'AGPL-1.0', 'deprecated': True},\n 'agpl-1.0-only': {'id': 'AGPL-1.0-only', 'deprecated': False},\n 'agpl-1.0-or-later': {'id': 'AGPL-1.0-or-later', 'deprecated': False},\n 'agpl-3.0': {'id': 'AGPL-3.0', 'deprecated': True},\n 'agpl-3.0-only': {'id': 'AGPL-3.0-only', 'deprecated': False},\n 'agpl-3.0-or-later': {'id': 'AGPL-3.0-or-later', 'deprecated': False},\n 'aladdin': {'id': 'Aladdin', 'deprecated': False},\n 'amd-newlib': {'id': 'AMD-newlib', 'deprecated': False},\n 'amdplpa': {'id': 'AMDPLPA', 'deprecated': False},\n 'aml': {'id': 'AML', 'deprecated': False},\n 'aml-glslang': {'id': 'AML-glslang', 'deprecated': False},\n 'ampas': {'id': 'AMPAS', 'deprecated': False},\n 'antlr-pd': {'id': 'ANTLR-PD', 'deprecated': False},\n 'antlr-pd-fallback': {'id': 'ANTLR-PD-fallback', 'deprecated': False},\n 'any-osi': {'id': 'any-OSI', 'deprecated': False},\n 'apache-1.0': {'id': 'Apache-1.0', 'deprecated': False},\n 'apache-1.1': {'id': 'Apache-1.1', 'deprecated': False},\n 'apache-2.0': {'id': 'Apache-2.0', 'deprecated': False},\n 'apafml': {'id': 'APAFML', 'deprecated': False},\n 'apl-1.0': {'id': 'APL-1.0', 'deprecated': False},\n 'app-s2p': {'id': 'App-s2p', 'deprecated': False},\n 'apsl-1.0': {'id': 'APSL-1.0', 'deprecated': False},\n 'apsl-1.1': {'id': 'APSL-1.1', 'deprecated': False},\n 'apsl-1.2': {'id': 'APSL-1.2', 'deprecated': False},\n 'apsl-2.0': {'id': 'APSL-2.0', 'deprecated': False},\n 'arphic-1999': {'id': 'Arphic-1999', 'deprecated': False},\n 'artistic-1.0': {'id': 'Artistic-1.0', 'deprecated': False},\n 'artistic-1.0-cl8': {'id': 'Artistic-1.0-cl8', 'deprecated': False},\n 'artistic-1.0-perl': {'id': 'Artistic-1.0-Perl', 'deprecated': False},\n 'artistic-2.0': {'id': 'Artistic-2.0', 'deprecated': False},\n 'aswf-digital-assets-1.0': {'id': 'ASWF-Digital-Assets-1.0', 'deprecated': False},\n 'aswf-digital-assets-1.1': {'id': 'ASWF-Digital-Assets-1.1', 'deprecated': False},\n 'baekmuk': {'id': 'Baekmuk', 'deprecated': False},\n 'bahyph': {'id': 'Bahyph', 'deprecated': False},\n 'barr': {'id': 'Barr', 'deprecated': False},\n 'bcrypt-solar-designer': {'id': 'bcrypt-Solar-Designer', 'deprecated': False},\n 'beerware': {'id': 'Beerware', 'deprecated': False},\n 'bitstream-charter': {'id': 'Bitstream-Charter', 'deprecated': False},\n 'bitstream-vera': {'id': 'Bitstream-Vera', 'deprecated': False},\n 'bittorrent-1.0': {'id': 'BitTorrent-1.0', 'deprecated': False},\n 'bittorrent-1.1': {'id': 'BitTorrent-1.1', 'deprecated': False},\n 'blessing': {'id': 'blessing', 'deprecated': False},\n 'blueoak-1.0.0': {'id': 'BlueOak-1.0.0', 'deprecated': False},\n 'boehm-gc': {'id': 'Boehm-GC', 'deprecated': False},\n 'borceux': {'id': 'Borceux', 'deprecated': False},\n 'brian-gladman-2-clause': {'id': 'Brian-Gladman-2-Clause', 'deprecated': False},\n 'brian-gladman-3-clause': {'id': 'Brian-Gladman-3-Clause', 'deprecated': False},\n 'bsd-1-clause': {'id': 'BSD-1-Clause', 'deprecated': False},\n 'bsd-2-clause': {'id': 'BSD-2-Clause', 'deprecated': False},\n 'bsd-2-clause-darwin': {'id': 'BSD-2-Clause-Darwin', 'deprecated': False},\n 'bsd-2-clause-first-lines': {'id': 'BSD-2-Clause-first-lines', 'deprecated': False},\n 'bsd-2-clause-freebsd': {'id': 'BSD-2-Clause-FreeBSD', 'deprecated': True},\n 'bsd-2-clause-netbsd': {'id': 'BSD-2-Clause-NetBSD', 'deprecated': True},\n 'bsd-2-clause-patent': {'id': 'BSD-2-Clause-Patent', 'deprecated': False},\n 'bsd-2-clause-views': {'id': 'BSD-2-Clause-Views', 'deprecated': False},\n 'bsd-3-clause': {'id': 'BSD-3-Clause', 'deprecated': False},\n 'bsd-3-clause-acpica': {'id': 'BSD-3-Clause-acpica', 'deprecated': False},\n 'bsd-3-clause-attribution': {'id': 'BSD-3-Clause-Attribution', 'deprecated': False},\n 'bsd-3-clause-clear': {'id': 'BSD-3-Clause-Clear', 'deprecated': False},\n 'bsd-3-clause-flex': {'id': 'BSD-3-Clause-flex', 'deprecated': False},\n 'bsd-3-clause-hp': {'id': 'BSD-3-Clause-HP', 'deprecated': False},\n 'bsd-3-clause-lbnl': {'id': 'BSD-3-Clause-LBNL', 'deprecated': False},\n 'bsd-3-clause-modification': {'id': 'BSD-3-Clause-Modification', 'deprecated': False},\n 'bsd-3-clause-no-military-license': {'id': 'BSD-3-Clause-No-Military-License', 'deprecated': False},\n 'bsd-3-clause-no-nuclear-license': {'id': 'BSD-3-Clause-No-Nuclear-License', 'deprecated': False},\n 'bsd-3-clause-no-nuclear-license-2014': {'id': 'BSD-3-Clause-No-Nuclear-License-2014', 'deprecated': False},\n 'bsd-3-clause-no-nuclear-warranty': {'id': 'BSD-3-Clause-No-Nuclear-Warranty', 'deprecated': False},\n 'bsd-3-clause-open-mpi': {'id': 'BSD-3-Clause-Open-MPI', 'deprecated': False},\n 'bsd-3-clause-sun': {'id': 'BSD-3-Clause-Sun', 'deprecated': False},\n 'bsd-4-clause': {'id': 'BSD-4-Clause', 'deprecated': False},\n 'bsd-4-clause-shortened': {'id': 'BSD-4-Clause-Shortened', 'deprecated': False},\n 'bsd-4-clause-uc': {'id': 'BSD-4-Clause-UC', 'deprecated': False},\n 'bsd-4.3reno': {'id': 'BSD-4.3RENO', 'deprecated': False},\n 'bsd-4.3tahoe': {'id': 'BSD-4.3TAHOE', 'deprecated': False},\n 'bsd-advertising-acknowledgement': {'id': 'BSD-Advertising-Acknowledgement', 'deprecated': False},\n 'bsd-attribution-hpnd-disclaimer': {'id': 'BSD-Attribution-HPND-disclaimer', 'deprecated': False},\n 'bsd-inferno-nettverk': {'id': 'BSD-Inferno-Nettverk', 'deprecated': False},\n 'bsd-protection': {'id': 'BSD-Protection', 'deprecated': False},\n 'bsd-source-beginning-file': {'id': 'BSD-Source-beginning-file', 'deprecated': False},\n 'bsd-source-code': {'id': 'BSD-Source-Code', 'deprecated': False},\n 'bsd-systemics': {'id': 'BSD-Systemics', 'deprecated': False},\n 'bsd-systemics-w3works': {'id': 'BSD-Systemics-W3Works', 'deprecated': False},\n 'bsl-1.0': {'id': 'BSL-1.0', 'deprecated': False},\n 'busl-1.1': {'id': 'BUSL-1.1', 'deprecated': False},\n 'bzip2-1.0.5': {'id': 'bzip2-1.0.5', 'deprecated': True},\n 'bzip2-1.0.6': {'id': 'bzip2-1.0.6', 'deprecated': False},\n 'c-uda-1.0': {'id': 'C-UDA-1.0', 'deprecated': False},\n 'cal-1.0': {'id': 'CAL-1.0', 'deprecated': False},\n 'cal-1.0-combined-work-exception': {'id': 'CAL-1.0-Combined-Work-Exception', 'deprecated': False},\n 'caldera': {'id': 'Caldera', 'deprecated': False},\n 'caldera-no-preamble': {'id': 'Caldera-no-preamble', 'deprecated': False},\n 'catharon': {'id': 'Catharon', 'deprecated': False},\n 'catosl-1.1': {'id': 'CATOSL-1.1', 'deprecated': False},\n 'cc-by-1.0': {'id': 'CC-BY-1.0', 'deprecated': False},\n 'cc-by-2.0': {'id': 'CC-BY-2.0', 'deprecated': False},\n 'cc-by-2.5': {'id': 'CC-BY-2.5', 'deprecated': False},\n 'cc-by-2.5-au': {'id': 'CC-BY-2.5-AU', 'deprecated': False},\n 'cc-by-3.0': {'id': 'CC-BY-3.0', 'deprecated': False},\n 'cc-by-3.0-at': {'id': 'CC-BY-3.0-AT', 'deprecated': False},\n 'cc-by-3.0-au': {'id': 'CC-BY-3.0-AU', 'deprecated': False},\n 'cc-by-3.0-de': {'id': 'CC-BY-3.0-DE', 'deprecated': False},\n 'cc-by-3.0-igo': {'id': 'CC-BY-3.0-IGO', 'deprecated': False},\n 'cc-by-3.0-nl': {'id': 'CC-BY-3.0-NL', 'deprecated': False},\n 'cc-by-3.0-us': {'id': 'CC-BY-3.0-US', 'deprecated': False},\n 'cc-by-4.0': {'id': 'CC-BY-4.0', 'deprecated': False},\n 'cc-by-nc-1.0': {'id': 'CC-BY-NC-1.0', 'deprecated': False},\n 'cc-by-nc-2.0': {'id': 'CC-BY-NC-2.0', 'deprecated': False},\n 'cc-by-nc-2.5': {'id': 'CC-BY-NC-2.5', 'deprecated': False},\n 'cc-by-nc-3.0': {'id': 'CC-BY-NC-3.0', 'deprecated': False},\n 'cc-by-nc-3.0-de': {'id': 'CC-BY-NC-3.0-DE', 'deprecated': False},\n 'cc-by-nc-4.0': {'id': 'CC-BY-NC-4.0', 'deprecated': False},\n 'cc-by-nc-nd-1.0': {'id': 'CC-BY-NC-ND-1.0', 'deprecated': False},\n 'cc-by-nc-nd-2.0': {'id': 'CC-BY-NC-ND-2.0', 'deprecated': False},\n 'cc-by-nc-nd-2.5': {'id': 'CC-BY-NC-ND-2.5', 'deprecated': False},\n 'cc-by-nc-nd-3.0': {'id': 'CC-BY-NC-ND-3.0', 'deprecated': False},\n 'cc-by-nc-nd-3.0-de': {'id': 'CC-BY-NC-ND-3.0-DE', 'deprecated': False},\n 'cc-by-nc-nd-3.0-igo': {'id': 'CC-BY-NC-ND-3.0-IGO', 'deprecated': False},\n 'cc-by-nc-nd-4.0': {'id': 'CC-BY-NC-ND-4.0', 'deprecated': False},\n 'cc-by-nc-sa-1.0': {'id': 'CC-BY-NC-SA-1.0', 'deprecated': False},\n 'cc-by-nc-sa-2.0': {'id': 'CC-BY-NC-SA-2.0', 'deprecated': False},\n 'cc-by-nc-sa-2.0-de': {'id': 'CC-BY-NC-SA-2.0-DE', 'deprecated': False},\n 'cc-by-nc-sa-2.0-fr': {'id': 'CC-BY-NC-SA-2.0-FR', 'deprecated': False},\n 'cc-by-nc-sa-2.0-uk': {'id': 'CC-BY-NC-SA-2.0-UK', 'deprecated': False},\n 'cc-by-nc-sa-2.5': {'id': 'CC-BY-NC-SA-2.5', 'deprecated': False},\n 'cc-by-nc-sa-3.0': {'id': 'CC-BY-NC-SA-3.0', 'deprecated': False},\n 'cc-by-nc-sa-3.0-de': {'id': 'CC-BY-NC-SA-3.0-DE', 'deprecated': False},\n 'cc-by-nc-sa-3.0-igo': {'id': 'CC-BY-NC-SA-3.0-IGO', 'deprecated': False},\n 'cc-by-nc-sa-4.0': {'id': 'CC-BY-NC-SA-4.0', 'deprecated': False},\n 'cc-by-nd-1.0': {'id': 'CC-BY-ND-1.0', 'deprecated': False},\n 'cc-by-nd-2.0': {'id': 'CC-BY-ND-2.0', 'deprecated': False},\n 'cc-by-nd-2.5': {'id': 'CC-BY-ND-2.5', 'deprecated': False},\n 'cc-by-nd-3.0': {'id': 'CC-BY-ND-3.0', 'deprecated': False},\n 'cc-by-nd-3.0-de': {'id': 'CC-BY-ND-3.0-DE', 'deprecated': False},\n 'cc-by-nd-4.0': {'id': 'CC-BY-ND-4.0', 'deprecated': False},\n 'cc-by-sa-1.0': {'id': 'CC-BY-SA-1.0', 'deprecated': False},\n 'cc-by-sa-2.0': {'id': 'CC-BY-SA-2.0', 'deprecated': False},\n 'cc-by-sa-2.0-uk': {'id': 'CC-BY-SA-2.0-UK', 'deprecated': False},\n 'cc-by-sa-2.1-jp': {'id': 'CC-BY-SA-2.1-JP', 'deprecated': False},\n 'cc-by-sa-2.5': {'id': 'CC-BY-SA-2.5', 'deprecated': False},\n 'cc-by-sa-3.0': {'id': 'CC-BY-SA-3.0', 'deprecated': False},\n 'cc-by-sa-3.0-at': {'id': 'CC-BY-SA-3.0-AT', 'deprecated': False},\n 'cc-by-sa-3.0-de': {'id': 'CC-BY-SA-3.0-DE', 'deprecated': False},\n 'cc-by-sa-3.0-igo': {'id': 'CC-BY-SA-3.0-IGO', 'deprecated': False},\n 'cc-by-sa-4.0': {'id': 'CC-BY-SA-4.0', 'deprecated': False},\n 'cc-pddc': {'id': 'CC-PDDC', 'deprecated': False},\n 'cc0-1.0': {'id': 'CC0-1.0', 'deprecated': False},\n 'cddl-1.0': {'id': 'CDDL-1.0', 'deprecated': False},\n 'cddl-1.1': {'id': 'CDDL-1.1', 'deprecated': False},\n 'cdl-1.0': {'id': 'CDL-1.0', 'deprecated': False},\n 'cdla-permissive-1.0': {'id': 'CDLA-Permissive-1.0', 'deprecated': False},\n 'cdla-permissive-2.0': {'id': 'CDLA-Permissive-2.0', 'deprecated': False},\n 'cdla-sharing-1.0': {'id': 'CDLA-Sharing-1.0', 'deprecated': False},\n 'cecill-1.0': {'id': 'CECILL-1.0', 'deprecated': False},\n 'cecill-1.1': {'id': 'CECILL-1.1', 'deprecated': False},\n 'cecill-2.0': {'id': 'CECILL-2.0', 'deprecated': False},\n 'cecill-2.1': {'id': 'CECILL-2.1', 'deprecated': False},\n 'cecill-b': {'id': 'CECILL-B', 'deprecated': False},\n 'cecill-c': {'id': 'CECILL-C', 'deprecated': False},\n 'cern-ohl-1.1': {'id': 'CERN-OHL-1.1', 'deprecated': False},\n 'cern-ohl-1.2': {'id': 'CERN-OHL-1.2', 'deprecated': False},\n 'cern-ohl-p-2.0': {'id': 'CERN-OHL-P-2.0', 'deprecated': False},\n 'cern-ohl-s-2.0': {'id': 'CERN-OHL-S-2.0', 'deprecated': False},\n 'cern-ohl-w-2.0': {'id': 'CERN-OHL-W-2.0', 'deprecated': False},\n 'cfitsio': {'id': 'CFITSIO', 'deprecated': False},\n 'check-cvs': {'id': 'check-cvs', 'deprecated': False},\n 'checkmk': {'id': 'checkmk', 'deprecated': False},\n 'clartistic': {'id': 'ClArtistic', 'deprecated': False},\n 'clips': {'id': 'Clips', 'deprecated': False},\n 'cmu-mach': {'id': 'CMU-Mach', 'deprecated': False},\n 'cmu-mach-nodoc': {'id': 'CMU-Mach-nodoc', 'deprecated': False},\n 'cnri-jython': {'id': 'CNRI-Jython', 'deprecated': False},\n 'cnri-python': {'id': 'CNRI-Python', 'deprecated': False},\n 'cnri-python-gpl-compatible': {'id': 'CNRI-Python-GPL-Compatible', 'deprecated': False},\n 'coil-1.0': {'id': 'COIL-1.0', 'deprecated': False},\n 'community-spec-1.0': {'id': 'Community-Spec-1.0', 'deprecated': False},\n 'condor-1.1': {'id': 'Condor-1.1', 'deprecated': False},\n 'copyleft-next-0.3.0': {'id': 'copyleft-next-0.3.0', 'deprecated': False},\n 'copyleft-next-0.3.1': {'id': 'copyleft-next-0.3.1', 'deprecated': False},\n 'cornell-lossless-jpeg': {'id': 'Cornell-Lossless-JPEG', 'deprecated': False},\n 'cpal-1.0': {'id': 'CPAL-1.0', 'deprecated': False},\n 'cpl-1.0': {'id': 'CPL-1.0', 'deprecated': False},\n 'cpol-1.02': {'id': 'CPOL-1.02', 'deprecated': False},\n 'cronyx': {'id': 'Cronyx', 'deprecated': False},\n 'crossword': {'id': 'Crossword', 'deprecated': False},\n 'crystalstacker': {'id': 'CrystalStacker', 'deprecated': False},\n 'cua-opl-1.0': {'id': 'CUA-OPL-1.0', 'deprecated': False},\n 'cube': {'id': 'Cube', 'deprecated': False},\n 'curl': {'id': 'curl', 'deprecated': False},\n 'cve-tou': {'id': 'cve-tou', 'deprecated': False},\n 'd-fsl-1.0': {'id': 'D-FSL-1.0', 'deprecated': False},\n 'dec-3-clause': {'id': 'DEC-3-Clause', 'deprecated': False},\n 'diffmark': {'id': 'diffmark', 'deprecated': False},\n 'dl-de-by-2.0': {'id': 'DL-DE-BY-2.0', 'deprecated': False},\n 'dl-de-zero-2.0': {'id': 'DL-DE-ZERO-2.0', 'deprecated': False},\n 'doc': {'id': 'DOC', 'deprecated': False},\n 'docbook-schema': {'id': 'DocBook-Schema', 'deprecated': False},\n 'docbook-xml': {'id': 'DocBook-XML', 'deprecated': False},\n 'dotseqn': {'id': 'Dotseqn', 'deprecated': False},\n 'drl-1.0': {'id': 'DRL-1.0', 'deprecated': False},\n 'drl-1.1': {'id': 'DRL-1.1', 'deprecated': False},\n 'dsdp': {'id': 'DSDP', 'deprecated': False},\n 'dtoa': {'id': 'dtoa', 'deprecated': False},\n 'dvipdfm': {'id': 'dvipdfm', 'deprecated': False},\n 'ecl-1.0': {'id': 'ECL-1.0', 'deprecated': False},\n 'ecl-2.0': {'id': 'ECL-2.0', 'deprecated': False},\n 'ecos-2.0': {'id': 'eCos-2.0', 'deprecated': True},\n 'efl-1.0': {'id': 'EFL-1.0', 'deprecated': False},\n 'efl-2.0': {'id': 'EFL-2.0', 'deprecated': False},\n 'egenix': {'id': 'eGenix', 'deprecated': False},\n 'elastic-2.0': {'id': 'Elastic-2.0', 'deprecated': False},\n 'entessa': {'id': 'Entessa', 'deprecated': False},\n 'epics': {'id': 'EPICS', 'deprecated': False},\n 'epl-1.0': {'id': 'EPL-1.0', 'deprecated': False},\n 'epl-2.0': {'id': 'EPL-2.0', 'deprecated': False},\n 'erlpl-1.1': {'id': 'ErlPL-1.1', 'deprecated': False},\n 'etalab-2.0': {'id': 'etalab-2.0', 'deprecated': False},\n 'eudatagrid': {'id': 'EUDatagrid', 'deprecated': False},\n 'eupl-1.0': {'id': 'EUPL-1.0', 'deprecated': False},\n 'eupl-1.1': {'id': 'EUPL-1.1', 'deprecated': False},\n 'eupl-1.2': {'id': 'EUPL-1.2', 'deprecated': False},\n 'eurosym': {'id': 'Eurosym', 'deprecated': False},\n 'fair': {'id': 'Fair', 'deprecated': False},\n 'fbm': {'id': 'FBM', 'deprecated': False},\n 'fdk-aac': {'id': 'FDK-AAC', 'deprecated': False},\n 'ferguson-twofish': {'id': 'Ferguson-Twofish', 'deprecated': False},\n 'frameworx-1.0': {'id': 'Frameworx-1.0', 'deprecated': False},\n 'freebsd-doc': {'id': 'FreeBSD-DOC', 'deprecated': False},\n 'freeimage': {'id': 'FreeImage', 'deprecated': False},\n 'fsfap': {'id': 'FSFAP', 'deprecated': False},\n 'fsfap-no-warranty-disclaimer': {'id': 'FSFAP-no-warranty-disclaimer', 'deprecated': False},\n 'fsful': {'id': 'FSFUL', 'deprecated': False},\n 'fsfullr': {'id': 'FSFULLR', 'deprecated': False},\n 'fsfullrwd': {'id': 'FSFULLRWD', 'deprecated': False},\n 'ftl': {'id': 'FTL', 'deprecated': False},\n 'furuseth': {'id': 'Furuseth', 'deprecated': False},\n 'fwlw': {'id': 'fwlw', 'deprecated': False},\n 'gcr-docs': {'id': 'GCR-docs', 'deprecated': False},\n 'gd': {'id': 'GD', 'deprecated': False},\n 'gfdl-1.1': {'id': 'GFDL-1.1', 'deprecated': True},\n 'gfdl-1.1-invariants-only': {'id': 'GFDL-1.1-invariants-only', 'deprecated': False},\n 'gfdl-1.1-invariants-or-later': {'id': 'GFDL-1.1-invariants-or-later', 'deprecated': False},\n 'gfdl-1.1-no-invariants-only': {'id': 'GFDL-1.1-no-invariants-only', 'deprecated': False},\n 'gfdl-1.1-no-invariants-or-later': {'id': 'GFDL-1.1-no-invariants-or-later', 'deprecated': False},\n 'gfdl-1.1-only': {'id': 'GFDL-1.1-only', 'deprecated': False},\n 'gfdl-1.1-or-later': {'id': 'GFDL-1.1-or-later', 'deprecated': False},\n 'gfdl-1.2': {'id': 'GFDL-1.2', 'deprecated': True},\n 'gfdl-1.2-invariants-only': {'id': 'GFDL-1.2-invariants-only', 'deprecated': False},\n 'gfdl-1.2-invariants-or-later': {'id': 'GFDL-1.2-invariants-or-later', 'deprecated': False},\n 'gfdl-1.2-no-invariants-only': {'id': 'GFDL-1.2-no-invariants-only', 'deprecated': False},\n 'gfdl-1.2-no-invariants-or-later': {'id': 'GFDL-1.2-no-invariants-or-later', 'deprecated': False},\n 'gfdl-1.2-only': {'id': 'GFDL-1.2-only', 'deprecated': False},\n 'gfdl-1.2-or-later': {'id': 'GFDL-1.2-or-later', 'deprecated': False},\n 'gfdl-1.3': {'id': 'GFDL-1.3', 'deprecated': True},\n 'gfdl-1.3-invariants-only': {'id': 'GFDL-1.3-invariants-only', 'deprecated': False},\n 'gfdl-1.3-invariants-or-later': {'id': 'GFDL-1.3-invariants-or-later', 'deprecated': False},\n 'gfdl-1.3-no-invariants-only': {'id': 'GFDL-1.3-no-invariants-only', 'deprecated': False},\n 'gfdl-1.3-no-invariants-or-later': {'id': 'GFDL-1.3-no-invariants-or-later', 'deprecated': False},\n 'gfdl-1.3-only': {'id': 'GFDL-1.3-only', 'deprecated': False},\n 'gfdl-1.3-or-later': {'id': 'GFDL-1.3-or-later', 'deprecated': False},\n 'giftware': {'id': 'Giftware', 'deprecated': False},\n 'gl2ps': {'id': 'GL2PS', 'deprecated': False},\n 'glide': {'id': 'Glide', 'deprecated': False},\n 'glulxe': {'id': 'Glulxe', 'deprecated': False},\n 'glwtpl': {'id': 'GLWTPL', 'deprecated': False},\n 'gnuplot': {'id': 'gnuplot', 'deprecated': False},\n 'gpl-1.0': {'id': 'GPL-1.0', 'deprecated': True},\n 'gpl-1.0+': {'id': 'GPL-1.0+', 'deprecated': True},\n 'gpl-1.0-only': {'id': 'GPL-1.0-only', 'deprecated': False},\n 'gpl-1.0-or-later': {'id': 'GPL-1.0-or-later', 'deprecated': False},\n 'gpl-2.0': {'id': 'GPL-2.0', 'deprecated': True},\n 'gpl-2.0+': {'id': 'GPL-2.0+', 'deprecated': True},\n 'gpl-2.0-only': {'id': 'GPL-2.0-only', 'deprecated': False},\n 'gpl-2.0-or-later': {'id': 'GPL-2.0-or-later', 'deprecated': False},\n 'gpl-2.0-with-autoconf-exception': {'id': 'GPL-2.0-with-autoconf-exception', 'deprecated': True},\n 'gpl-2.0-with-bison-exception': {'id': 'GPL-2.0-with-bison-exception', 'deprecated': True},\n 'gpl-2.0-with-classpath-exception': {'id': 'GPL-2.0-with-classpath-exception', 'deprecated': True},\n 'gpl-2.0-with-font-exception': {'id': 'GPL-2.0-with-font-exception', 'deprecated': True},\n 'gpl-2.0-with-gcc-exception': {'id': 'GPL-2.0-with-GCC-exception', 'deprecated': True},\n 'gpl-3.0': {'id': 'GPL-3.0', 'deprecated': True},\n 'gpl-3.0+': {'id': 'GPL-3.0+', 'deprecated': True},\n 'gpl-3.0-only': {'id': 'GPL-3.0-only', 'deprecated': False},\n 'gpl-3.0-or-later': {'id': 'GPL-3.0-or-later', 'deprecated': False},\n 'gpl-3.0-with-autoconf-exception': {'id': 'GPL-3.0-with-autoconf-exception', 'deprecated': True},\n 'gpl-3.0-with-gcc-exception': {'id': 'GPL-3.0-with-GCC-exception', 'deprecated': True},\n 'graphics-gems': {'id': 'Graphics-Gems', 'deprecated': False},\n 'gsoap-1.3b': {'id': 'gSOAP-1.3b', 'deprecated': False},\n 'gtkbook': {'id': 'gtkbook', 'deprecated': False},\n 'gutmann': {'id': 'Gutmann', 'deprecated': False},\n 'haskellreport': {'id': 'HaskellReport', 'deprecated': False},\n 'hdparm': {'id': 'hdparm', 'deprecated': False},\n 'hidapi': {'id': 'HIDAPI', 'deprecated': False},\n 'hippocratic-2.1': {'id': 'Hippocratic-2.1', 'deprecated': False},\n 'hp-1986': {'id': 'HP-1986', 'deprecated': False},\n 'hp-1989': {'id': 'HP-1989', 'deprecated': False},\n 'hpnd': {'id': 'HPND', 'deprecated': False},\n 'hpnd-dec': {'id': 'HPND-DEC', 'deprecated': False},\n 'hpnd-doc': {'id': 'HPND-doc', 'deprecated': False},\n 'hpnd-doc-sell': {'id': 'HPND-doc-sell', 'deprecated': False},\n 'hpnd-export-us': {'id': 'HPND-export-US', 'deprecated': False},\n 'hpnd-export-us-acknowledgement': {'id': 'HPND-export-US-acknowledgement', 'deprecated': False},\n 'hpnd-export-us-modify': {'id': 'HPND-export-US-modify', 'deprecated': False},\n 'hpnd-export2-us': {'id': 'HPND-export2-US', 'deprecated': False},\n 'hpnd-fenneberg-livingston': {'id': 'HPND-Fenneberg-Livingston', 'deprecated': False},\n 'hpnd-inria-imag': {'id': 'HPND-INRIA-IMAG', 'deprecated': False},\n 'hpnd-intel': {'id': 'HPND-Intel', 'deprecated': False},\n 'hpnd-kevlin-henney': {'id': 'HPND-Kevlin-Henney', 'deprecated': False},\n 'hpnd-markus-kuhn': {'id': 'HPND-Markus-Kuhn', 'deprecated': False},\n 'hpnd-merchantability-variant': {'id': 'HPND-merchantability-variant', 'deprecated': False},\n 'hpnd-mit-disclaimer': {'id': 'HPND-MIT-disclaimer', 'deprecated': False},\n 'hpnd-netrek': {'id': 'HPND-Netrek', 'deprecated': False},\n 'hpnd-pbmplus': {'id': 'HPND-Pbmplus', 'deprecated': False},\n 'hpnd-sell-mit-disclaimer-xserver': {'id': 'HPND-sell-MIT-disclaimer-xserver', 'deprecated': False},\n 'hpnd-sell-regexpr': {'id': 'HPND-sell-regexpr', 'deprecated': False},\n 'hpnd-sell-variant': {'id': 'HPND-sell-variant', 'deprecated': False},\n 'hpnd-sell-variant-mit-disclaimer': {'id': 'HPND-sell-variant-MIT-disclaimer', 'deprecated': False},\n 'hpnd-sell-variant-mit-disclaimer-rev': {'id': 'HPND-sell-variant-MIT-disclaimer-rev', 'deprecated': False},\n 'hpnd-uc': {'id': 'HPND-UC', 'deprecated': False},\n 'hpnd-uc-export-us': {'id': 'HPND-UC-export-US', 'deprecated': False},\n 'htmltidy': {'id': 'HTMLTIDY', 'deprecated': False},\n 'ibm-pibs': {'id': 'IBM-pibs', 'deprecated': False},\n 'icu': {'id': 'ICU', 'deprecated': False},\n 'iec-code-components-eula': {'id': 'IEC-Code-Components-EULA', 'deprecated': False},\n 'ijg': {'id': 'IJG', 'deprecated': False},\n 'ijg-short': {'id': 'IJG-short', 'deprecated': False},\n 'imagemagick': {'id': 'ImageMagick', 'deprecated': False},\n 'imatix': {'id': 'iMatix', 'deprecated': False},\n 'imlib2': {'id': 'Imlib2', 'deprecated': False},\n 'info-zip': {'id': 'Info-ZIP', 'deprecated': False},\n 'inner-net-2.0': {'id': 'Inner-Net-2.0', 'deprecated': False},\n 'intel': {'id': 'Intel', 'deprecated': False},\n 'intel-acpi': {'id': 'Intel-ACPI', 'deprecated': False},\n 'interbase-1.0': {'id': 'Interbase-1.0', 'deprecated': False},\n 'ipa': {'id': 'IPA', 'deprecated': False},\n 'ipl-1.0': {'id': 'IPL-1.0', 'deprecated': False},\n 'isc': {'id': 'ISC', 'deprecated': False},\n 'isc-veillard': {'id': 'ISC-Veillard', 'deprecated': False},\n 'jam': {'id': 'Jam', 'deprecated': False},\n 'jasper-2.0': {'id': 'JasPer-2.0', 'deprecated': False},\n 'jpl-image': {'id': 'JPL-image', 'deprecated': False},\n 'jpnic': {'id': 'JPNIC', 'deprecated': False},\n 'json': {'id': 'JSON', 'deprecated': False},\n 'kastrup': {'id': 'Kastrup', 'deprecated': False},\n 'kazlib': {'id': 'Kazlib', 'deprecated': False},\n 'knuth-ctan': {'id': 'Knuth-CTAN', 'deprecated': False},\n 'lal-1.2': {'id': 'LAL-1.2', 'deprecated': False},\n 'lal-1.3': {'id': 'LAL-1.3', 'deprecated': False},\n 'latex2e': {'id': 'Latex2e', 'deprecated': False},\n 'latex2e-translated-notice': {'id': 'Latex2e-translated-notice', 'deprecated': False},\n 'leptonica': {'id': 'Leptonica', 'deprecated': False},\n 'lgpl-2.0': {'id': 'LGPL-2.0', 'deprecated': True},\n 'lgpl-2.0+': {'id': 'LGPL-2.0+', 'deprecated': True},\n 'lgpl-2.0-only': {'id': 'LGPL-2.0-only', 'deprecated': False},\n 'lgpl-2.0-or-later': {'id': 'LGPL-2.0-or-later', 'deprecated': False},\n 'lgpl-2.1': {'id': 'LGPL-2.1', 'deprecated': True},\n 'lgpl-2.1+': {'id': 'LGPL-2.1+', 'deprecated': True},\n 'lgpl-2.1-only': {'id': 'LGPL-2.1-only', 'deprecated': False},\n 'lgpl-2.1-or-later': {'id': 'LGPL-2.1-or-later', 'deprecated': False},\n 'lgpl-3.0': {'id': 'LGPL-3.0', 'deprecated': True},\n 'lgpl-3.0+': {'id': 'LGPL-3.0+', 'deprecated': True},\n 'lgpl-3.0-only': {'id': 'LGPL-3.0-only', 'deprecated': False},\n 'lgpl-3.0-or-later': {'id': 'LGPL-3.0-or-later', 'deprecated': False},\n 'lgpllr': {'id': 'LGPLLR', 'deprecated': False},\n 'libpng': {'id': 'Libpng', 'deprecated': False},\n 'libpng-2.0': {'id': 'libpng-2.0', 'deprecated': False},\n 'libselinux-1.0': {'id': 'libselinux-1.0', 'deprecated': False},\n 'libtiff': {'id': 'libtiff', 'deprecated': False},\n 'libutil-david-nugent': {'id': 'libutil-David-Nugent', 'deprecated': False},\n 'liliq-p-1.1': {'id': 'LiLiQ-P-1.1', 'deprecated': False},\n 'liliq-r-1.1': {'id': 'LiLiQ-R-1.1', 'deprecated': False},\n 'liliq-rplus-1.1': {'id': 'LiLiQ-Rplus-1.1', 'deprecated': False},\n 'linux-man-pages-1-para': {'id': 'Linux-man-pages-1-para', 'deprecated': False},\n 'linux-man-pages-copyleft': {'id': 'Linux-man-pages-copyleft', 'deprecated': False},\n 'linux-man-pages-copyleft-2-para': {'id': 'Linux-man-pages-copyleft-2-para', 'deprecated': False},\n 'linux-man-pages-copyleft-var': {'id': 'Linux-man-pages-copyleft-var', 'deprecated': False},\n 'linux-openib': {'id': 'Linux-OpenIB', 'deprecated': False},\n 'loop': {'id': 'LOOP', 'deprecated': False},\n 'lpd-document': {'id': 'LPD-document', 'deprecated': False},\n 'lpl-1.0': {'id': 'LPL-1.0', 'deprecated': False},\n 'lpl-1.02': {'id': 'LPL-1.02', 'deprecated': False},\n 'lppl-1.0': {'id': 'LPPL-1.0', 'deprecated': False},\n 'lppl-1.1': {'id': 'LPPL-1.1', 'deprecated': False},\n 'lppl-1.2': {'id': 'LPPL-1.2', 'deprecated': False},\n 'lppl-1.3a': {'id': 'LPPL-1.3a', 'deprecated': False},\n 'lppl-1.3c': {'id': 'LPPL-1.3c', 'deprecated': False},\n 'lsof': {'id': 'lsof', 'deprecated': False},\n 'lucida-bitmap-fonts': {'id': 'Lucida-Bitmap-Fonts', 'deprecated': False},\n 'lzma-sdk-9.11-to-9.20': {'id': 'LZMA-SDK-9.11-to-9.20', 'deprecated': False},\n 'lzma-sdk-9.22': {'id': 'LZMA-SDK-9.22', 'deprecated': False},\n 'mackerras-3-clause': {'id': 'Mackerras-3-Clause', 'deprecated': False},\n 'mackerras-3-clause-acknowledgment': {'id': 'Mackerras-3-Clause-acknowledgment', 'deprecated': False},\n 'magaz': {'id': 'magaz', 'deprecated': False},\n 'mailprio': {'id': 'mailprio', 'deprecated': False},\n 'makeindex': {'id': 'MakeIndex', 'deprecated': False},\n 'martin-birgmeier': {'id': 'Martin-Birgmeier', 'deprecated': False},\n 'mcphee-slideshow': {'id': 'McPhee-slideshow', 'deprecated': False},\n 'metamail': {'id': 'metamail', 'deprecated': False},\n 'minpack': {'id': 'Minpack', 'deprecated': False},\n 'miros': {'id': 'MirOS', 'deprecated': False},\n 'mit': {'id': 'MIT', 'deprecated': False},\n 'mit-0': {'id': 'MIT-0', 'deprecated': False},\n 'mit-advertising': {'id': 'MIT-advertising', 'deprecated': False},\n 'mit-cmu': {'id': 'MIT-CMU', 'deprecated': False},\n 'mit-enna': {'id': 'MIT-enna', 'deprecated': False},\n 'mit-feh': {'id': 'MIT-feh', 'deprecated': False},\n 'mit-festival': {'id': 'MIT-Festival', 'deprecated': False},\n 'mit-khronos-old': {'id': 'MIT-Khronos-old', 'deprecated': False},\n 'mit-modern-variant': {'id': 'MIT-Modern-Variant', 'deprecated': False},\n 'mit-open-group': {'id': 'MIT-open-group', 'deprecated': False},\n 'mit-testregex': {'id': 'MIT-testregex', 'deprecated': False},\n 'mit-wu': {'id': 'MIT-Wu', 'deprecated': False},\n 'mitnfa': {'id': 'MITNFA', 'deprecated': False},\n 'mmixware': {'id': 'MMIXware', 'deprecated': False},\n 'motosoto': {'id': 'Motosoto', 'deprecated': False},\n 'mpeg-ssg': {'id': 'MPEG-SSG', 'deprecated': False},\n 'mpi-permissive': {'id': 'mpi-permissive', 'deprecated': False},\n 'mpich2': {'id': 'mpich2', 'deprecated': False},\n 'mpl-1.0': {'id': 'MPL-1.0', 'deprecated': False},\n 'mpl-1.1': {'id': 'MPL-1.1', 'deprecated': False},\n 'mpl-2.0': {'id': 'MPL-2.0', 'deprecated': False},\n 'mpl-2.0-no-copyleft-exception': {'id': 'MPL-2.0-no-copyleft-exception', 'deprecated': False},\n 'mplus': {'id': 'mplus', 'deprecated': False},\n 'ms-lpl': {'id': 'MS-LPL', 'deprecated': False},\n 'ms-pl': {'id': 'MS-PL', 'deprecated': False},\n 'ms-rl': {'id': 'MS-RL', 'deprecated': False},\n 'mtll': {'id': 'MTLL', 'deprecated': False},\n 'mulanpsl-1.0': {'id': 'MulanPSL-1.0', 'deprecated': False},\n 'mulanpsl-2.0': {'id': 'MulanPSL-2.0', 'deprecated': False},\n 'multics': {'id': 'Multics', 'deprecated': False},\n 'mup': {'id': 'Mup', 'deprecated': False},\n 'naist-2003': {'id': 'NAIST-2003', 'deprecated': False},\n 'nasa-1.3': {'id': 'NASA-1.3', 'deprecated': False},\n 'naumen': {'id': 'Naumen', 'deprecated': False},\n 'nbpl-1.0': {'id': 'NBPL-1.0', 'deprecated': False},\n 'ncbi-pd': {'id': 'NCBI-PD', 'deprecated': False},\n 'ncgl-uk-2.0': {'id': 'NCGL-UK-2.0', 'deprecated': False},\n 'ncl': {'id': 'NCL', 'deprecated': False},\n 'ncsa': {'id': 'NCSA', 'deprecated': False},\n 'net-snmp': {'id': 'Net-SNMP', 'deprecated': True},\n 'netcdf': {'id': 'NetCDF', 'deprecated': False},\n 'newsletr': {'id': 'Newsletr', 'deprecated': False},\n 'ngpl': {'id': 'NGPL', 'deprecated': False},\n 'nicta-1.0': {'id': 'NICTA-1.0', 'deprecated': False},\n 'nist-pd': {'id': 'NIST-PD', 'deprecated': False},\n 'nist-pd-fallback': {'id': 'NIST-PD-fallback', 'deprecated': False},\n 'nist-software': {'id': 'NIST-Software', 'deprecated': False},\n 'nlod-1.0': {'id': 'NLOD-1.0', 'deprecated': False},\n 'nlod-2.0': {'id': 'NLOD-2.0', 'deprecated': False},\n 'nlpl': {'id': 'NLPL', 'deprecated': False},\n 'nokia': {'id': 'Nokia', 'deprecated': False},\n 'nosl': {'id': 'NOSL', 'deprecated': False},\n 'noweb': {'id': 'Noweb', 'deprecated': False},\n 'npl-1.0': {'id': 'NPL-1.0', 'deprecated': False},\n 'npl-1.1': {'id': 'NPL-1.1', 'deprecated': False},\n 'nposl-3.0': {'id': 'NPOSL-3.0', 'deprecated': False},\n 'nrl': {'id': 'NRL', 'deprecated': False},\n 'ntp': {'id': 'NTP', 'deprecated': False},\n 'ntp-0': {'id': 'NTP-0', 'deprecated': False},\n 'nunit': {'id': 'Nunit', 'deprecated': True},\n 'o-uda-1.0': {'id': 'O-UDA-1.0', 'deprecated': False},\n 'oar': {'id': 'OAR', 'deprecated': False},\n 'occt-pl': {'id': 'OCCT-PL', 'deprecated': False},\n 'oclc-2.0': {'id': 'OCLC-2.0', 'deprecated': False},\n 'odbl-1.0': {'id': 'ODbL-1.0', 'deprecated': False},\n 'odc-by-1.0': {'id': 'ODC-By-1.0', 'deprecated': False},\n 'offis': {'id': 'OFFIS', 'deprecated': False},\n 'ofl-1.0': {'id': 'OFL-1.0', 'deprecated': False},\n 'ofl-1.0-no-rfn': {'id': 'OFL-1.0-no-RFN', 'deprecated': False},\n 'ofl-1.0-rfn': {'id': 'OFL-1.0-RFN', 'deprecated': False},\n 'ofl-1.1': {'id': 'OFL-1.1', 'deprecated': False},\n 'ofl-1.1-no-rfn': {'id': 'OFL-1.1-no-RFN', 'deprecated': False},\n 'ofl-1.1-rfn': {'id': 'OFL-1.1-RFN', 'deprecated': False},\n 'ogc-1.0': {'id': 'OGC-1.0', 'deprecated': False},\n 'ogdl-taiwan-1.0': {'id': 'OGDL-Taiwan-1.0', 'deprecated': False},\n 'ogl-canada-2.0': {'id': 'OGL-Canada-2.0', 'deprecated': False},\n 'ogl-uk-1.0': {'id': 'OGL-UK-1.0', 'deprecated': False},\n 'ogl-uk-2.0': {'id': 'OGL-UK-2.0', 'deprecated': False},\n 'ogl-uk-3.0': {'id': 'OGL-UK-3.0', 'deprecated': False},\n 'ogtsl': {'id': 'OGTSL', 'deprecated': False},\n 'oldap-1.1': {'id': 'OLDAP-1.1', 'deprecated': False},\n 'oldap-1.2': {'id': 'OLDAP-1.2', 'deprecated': False},\n 'oldap-1.3': {'id': 'OLDAP-1.3', 'deprecated': False},\n 'oldap-1.4': {'id': 'OLDAP-1.4', 'deprecated': False},\n 'oldap-2.0': {'id': 'OLDAP-2.0', 'deprecated': False},\n 'oldap-2.0.1': {'id': 'OLDAP-2.0.1', 'deprecated': False},\n 'oldap-2.1': {'id': 'OLDAP-2.1', 'deprecated': False},\n 'oldap-2.2': {'id': 'OLDAP-2.2', 'deprecated': False},\n 'oldap-2.2.1': {'id': 'OLDAP-2.2.1', 'deprecated': False},\n 'oldap-2.2.2': {'id': 'OLDAP-2.2.2', 'deprecated': False},\n 'oldap-2.3': {'id': 'OLDAP-2.3', 'deprecated': False},\n 'oldap-2.4': {'id': 'OLDAP-2.4', 'deprecated': False},\n 'oldap-2.5': {'id': 'OLDAP-2.5', 'deprecated': False},\n 'oldap-2.6': {'id': 'OLDAP-2.6', 'deprecated': False},\n 'oldap-2.7': {'id': 'OLDAP-2.7', 'deprecated': False},\n 'oldap-2.8': {'id': 'OLDAP-2.8', 'deprecated': False},\n 'olfl-1.3': {'id': 'OLFL-1.3', 'deprecated': False},\n 'oml': {'id': 'OML', 'deprecated': False},\n 'openpbs-2.3': {'id': 'OpenPBS-2.3', 'deprecated': False},\n 'openssl': {'id': 'OpenSSL', 'deprecated': False},\n 'openssl-standalone': {'id': 'OpenSSL-standalone', 'deprecated': False},\n 'openvision': {'id': 'OpenVision', 'deprecated': False},\n 'opl-1.0': {'id': 'OPL-1.0', 'deprecated': False},\n 'opl-uk-3.0': {'id': 'OPL-UK-3.0', 'deprecated': False},\n 'opubl-1.0': {'id': 'OPUBL-1.0', 'deprecated': False},\n 'oset-pl-2.1': {'id': 'OSET-PL-2.1', 'deprecated': False},\n 'osl-1.0': {'id': 'OSL-1.0', 'deprecated': False},\n 'osl-1.1': {'id': 'OSL-1.1', 'deprecated': False},\n 'osl-2.0': {'id': 'OSL-2.0', 'deprecated': False},\n 'osl-2.1': {'id': 'OSL-2.1', 'deprecated': False},\n 'osl-3.0': {'id': 'OSL-3.0', 'deprecated': False},\n 'padl': {'id': 'PADL', 'deprecated': False},\n 'parity-6.0.0': {'id': 'Parity-6.0.0', 'deprecated': False},\n 'parity-7.0.0': {'id': 'Parity-7.0.0', 'deprecated': False},\n 'pddl-1.0': {'id': 'PDDL-1.0', 'deprecated': False},\n 'php-3.0': {'id': 'PHP-3.0', 'deprecated': False},\n 'php-3.01': {'id': 'PHP-3.01', 'deprecated': False},\n 'pixar': {'id': 'Pixar', 'deprecated': False},\n 'pkgconf': {'id': 'pkgconf', 'deprecated': False},\n 'plexus': {'id': 'Plexus', 'deprecated': False},\n 'pnmstitch': {'id': 'pnmstitch', 'deprecated': False},\n 'polyform-noncommercial-1.0.0': {'id': 'PolyForm-Noncommercial-1.0.0', 'deprecated': False},\n 'polyform-small-business-1.0.0': {'id': 'PolyForm-Small-Business-1.0.0', 'deprecated': False},\n 'postgresql': {'id': 'PostgreSQL', 'deprecated': False},\n 'ppl': {'id': 'PPL', 'deprecated': False},\n 'psf-2.0': {'id': 'PSF-2.0', 'deprecated': False},\n 'psfrag': {'id': 'psfrag', 'deprecated': False},\n 'psutils': {'id': 'psutils', 'deprecated': False},\n 'python-2.0': {'id': 'Python-2.0', 'deprecated': False},\n 'python-2.0.1': {'id': 'Python-2.0.1', 'deprecated': False},\n 'python-ldap': {'id': 'python-ldap', 'deprecated': False},\n 'qhull': {'id': 'Qhull', 'deprecated': False},\n 'qpl-1.0': {'id': 'QPL-1.0', 'deprecated': False},\n 'qpl-1.0-inria-2004': {'id': 'QPL-1.0-INRIA-2004', 'deprecated': False},\n 'radvd': {'id': 'radvd', 'deprecated': False},\n 'rdisc': {'id': 'Rdisc', 'deprecated': False},\n 'rhecos-1.1': {'id': 'RHeCos-1.1', 'deprecated': False},\n 'rpl-1.1': {'id': 'RPL-1.1', 'deprecated': False},\n 'rpl-1.5': {'id': 'RPL-1.5', 'deprecated': False},\n 'rpsl-1.0': {'id': 'RPSL-1.0', 'deprecated': False},\n 'rsa-md': {'id': 'RSA-MD', 'deprecated': False},\n 'rscpl': {'id': 'RSCPL', 'deprecated': False},\n 'ruby': {'id': 'Ruby', 'deprecated': False},\n 'ruby-pty': {'id': 'Ruby-pty', 'deprecated': False},\n 'sax-pd': {'id': 'SAX-PD', 'deprecated': False},\n 'sax-pd-2.0': {'id': 'SAX-PD-2.0', 'deprecated': False},\n 'saxpath': {'id': 'Saxpath', 'deprecated': False},\n 'scea': {'id': 'SCEA', 'deprecated': False},\n 'schemereport': {'id': 'SchemeReport', 'deprecated': False},\n 'sendmail': {'id': 'Sendmail', 'deprecated': False},\n 'sendmail-8.23': {'id': 'Sendmail-8.23', 'deprecated': False},\n 'sgi-b-1.0': {'id': 'SGI-B-1.0', 'deprecated': False},\n 'sgi-b-1.1': {'id': 'SGI-B-1.1', 'deprecated': False},\n 'sgi-b-2.0': {'id': 'SGI-B-2.0', 'deprecated': False},\n 'sgi-opengl': {'id': 'SGI-OpenGL', 'deprecated': False},\n 'sgp4': {'id': 'SGP4', 'deprecated': False},\n 'shl-0.5': {'id': 'SHL-0.5', 'deprecated': False},\n 'shl-0.51': {'id': 'SHL-0.51', 'deprecated': False},\n 'simpl-2.0': {'id': 'SimPL-2.0', 'deprecated': False},\n 'sissl': {'id': 'SISSL', 'deprecated': False},\n 'sissl-1.2': {'id': 'SISSL-1.2', 'deprecated': False},\n 'sl': {'id': 'SL', 'deprecated': False},\n 'sleepycat': {'id': 'Sleepycat', 'deprecated': False},\n 'smlnj': {'id': 'SMLNJ', 'deprecated': False},\n 'smppl': {'id': 'SMPPL', 'deprecated': False},\n 'snia': {'id': 'SNIA', 'deprecated': False},\n 'snprintf': {'id': 'snprintf', 'deprecated': False},\n 'softsurfer': {'id': 'softSurfer', 'deprecated': False},\n 'soundex': {'id': 'Soundex', 'deprecated': False},\n 'spencer-86': {'id': 'Spencer-86', 'deprecated': False},\n 'spencer-94': {'id': 'Spencer-94', 'deprecated': False},\n 'spencer-99': {'id': 'Spencer-99', 'deprecated': False},\n 'spl-1.0': {'id': 'SPL-1.0', 'deprecated': False},\n 'ssh-keyscan': {'id': 'ssh-keyscan', 'deprecated': False},\n 'ssh-openssh': {'id': 'SSH-OpenSSH', 'deprecated': False},\n 'ssh-short': {'id': 'SSH-short', 'deprecated': False},\n 'ssleay-standalone': {'id': 'SSLeay-standalone', 'deprecated': False},\n 'sspl-1.0': {'id': 'SSPL-1.0', 'deprecated': False},\n 'standardml-nj': {'id': 'StandardML-NJ', 'deprecated': True},\n 'sugarcrm-1.1.3': {'id': 'SugarCRM-1.1.3', 'deprecated': False},\n 'sun-ppp': {'id': 'Sun-PPP', 'deprecated': False},\n 'sun-ppp-2000': {'id': 'Sun-PPP-2000', 'deprecated': False},\n 'sunpro': {'id': 'SunPro', 'deprecated': False},\n 'swl': {'id': 'SWL', 'deprecated': False},\n 'swrule': {'id': 'swrule', 'deprecated': False},\n 'symlinks': {'id': 'Symlinks', 'deprecated': False},\n 'tapr-ohl-1.0': {'id': 'TAPR-OHL-1.0', 'deprecated': False},\n 'tcl': {'id': 'TCL', 'deprecated': False},\n 'tcp-wrappers': {'id': 'TCP-wrappers', 'deprecated': False},\n 'termreadkey': {'id': 'TermReadKey', 'deprecated': False},\n 'tgppl-1.0': {'id': 'TGPPL-1.0', 'deprecated': False},\n 'threeparttable': {'id': 'threeparttable', 'deprecated': False},\n 'tmate': {'id': 'TMate', 'deprecated': False},\n 'torque-1.1': {'id': 'TORQUE-1.1', 'deprecated': False},\n 'tosl': {'id': 'TOSL', 'deprecated': False},\n 'tpdl': {'id': 'TPDL', 'deprecated': False},\n 'tpl-1.0': {'id': 'TPL-1.0', 'deprecated': False},\n 'ttwl': {'id': 'TTWL', 'deprecated': False},\n 'ttyp0': {'id': 'TTYP0', 'deprecated': False},\n 'tu-berlin-1.0': {'id': 'TU-Berlin-1.0', 'deprecated': False},\n 'tu-berlin-2.0': {'id': 'TU-Berlin-2.0', 'deprecated': False},\n 'ubuntu-font-1.0': {'id': 'Ubuntu-font-1.0', 'deprecated': False},\n 'ucar': {'id': 'UCAR', 'deprecated': False},\n 'ucl-1.0': {'id': 'UCL-1.0', 'deprecated': False},\n 'ulem': {'id': 'ulem', 'deprecated': False},\n 'umich-merit': {'id': 'UMich-Merit', 'deprecated': False},\n 'unicode-3.0': {'id': 'Unicode-3.0', 'deprecated': False},\n 'unicode-dfs-2015': {'id': 'Unicode-DFS-2015', 'deprecated': False},\n 'unicode-dfs-2016': {'id': 'Unicode-DFS-2016', 'deprecated': False},\n 'unicode-tou': {'id': 'Unicode-TOU', 'deprecated': False},\n 'unixcrypt': {'id': 'UnixCrypt', 'deprecated': False},\n 'unlicense': {'id': 'Unlicense', 'deprecated': False},\n 'upl-1.0': {'id': 'UPL-1.0', 'deprecated': False},\n 'urt-rle': {'id': 'URT-RLE', 'deprecated': False},\n 'vim': {'id': 'Vim', 'deprecated': False},\n 'vostrom': {'id': 'VOSTROM', 'deprecated': False},\n 'vsl-1.0': {'id': 'VSL-1.0', 'deprecated': False},\n 'w3c': {'id': 'W3C', 'deprecated': False},\n 'w3c-19980720': {'id': 'W3C-19980720', 'deprecated': False},\n 'w3c-20150513': {'id': 'W3C-20150513', 'deprecated': False},\n 'w3m': {'id': 'w3m', 'deprecated': False},\n 'watcom-1.0': {'id': 'Watcom-1.0', 'deprecated': False},\n 'widget-workshop': {'id': 'Widget-Workshop', 'deprecated': False},\n 'wsuipa': {'id': 'Wsuipa', 'deprecated': False},\n 'wtfpl': {'id': 'WTFPL', 'deprecated': False},\n 'wxwindows': {'id': 'wxWindows', 'deprecated': True},\n 'x11': {'id': 'X11', 'deprecated': False},\n 'x11-distribute-modifications-variant': {'id': 'X11-distribute-modifications-variant', 'deprecated': False},\n 'x11-swapped': {'id': 'X11-swapped', 'deprecated': False},\n 'xdebug-1.03': {'id': 'Xdebug-1.03', 'deprecated': False},\n 'xerox': {'id': 'Xerox', 'deprecated': False},\n 'xfig': {'id': 'Xfig', 'deprecated': False},\n 'xfree86-1.1': {'id': 'XFree86-1.1', 'deprecated': False},\n 'xinetd': {'id': 'xinetd', 'deprecated': False},\n 'xkeyboard-config-zinoviev': {'id': 'xkeyboard-config-Zinoviev', 'deprecated': False},\n 'xlock': {'id': 'xlock', 'deprecated': False},\n 'xnet': {'id': 'Xnet', 'deprecated': False},\n 'xpp': {'id': 'xpp', 'deprecated': False},\n 'xskat': {'id': 'XSkat', 'deprecated': False},\n 'xzoom': {'id': 'xzoom', 'deprecated': False},\n 'ypl-1.0': {'id': 'YPL-1.0', 'deprecated': False},\n 'ypl-1.1': {'id': 'YPL-1.1', 'deprecated': False},\n 'zed': {'id': 'Zed', 'deprecated': False},\n 'zeeff': {'id': 'Zeeff', 'deprecated': False},\n 'zend-2.0': {'id': 'Zend-2.0', 'deprecated': False},\n 'zimbra-1.3': {'id': 'Zimbra-1.3', 'deprecated': False},\n 'zimbra-1.4': {'id': 'Zimbra-1.4', 'deprecated': False},\n 'zlib': {'id': 'Zlib', 'deprecated': False},\n 'zlib-acknowledgement': {'id': 'zlib-acknowledgement', 'deprecated': False},\n 'zpl-1.1': {'id': 'ZPL-1.1', 'deprecated': False},\n 'zpl-2.0': {'id': 'ZPL-2.0', 'deprecated': False},\n 'zpl-2.1': {'id': 'ZPL-2.1', 'deprecated': False},\n}\n\nEXCEPTIONS: dict[str, SPDXException] = {\n '389-exception': {'id': '389-exception', 'deprecated': False},\n 'asterisk-exception': {'id': 'Asterisk-exception', 'deprecated': False},\n 'asterisk-linking-protocols-exception': {'id': 'Asterisk-linking-protocols-exception', 'deprecated': False},\n 'autoconf-exception-2.0': {'id': 'Autoconf-exception-2.0', 'deprecated': False},\n 'autoconf-exception-3.0': {'id': 'Autoconf-exception-3.0', 'deprecated': False},\n 'autoconf-exception-generic': {'id': 'Autoconf-exception-generic', 'deprecated': False},\n 'autoconf-exception-generic-3.0': {'id': 'Autoconf-exception-generic-3.0', 'deprecated': False},\n 'autoconf-exception-macro': {'id': 'Autoconf-exception-macro', 'deprecated': False},\n 'bison-exception-1.24': {'id': 'Bison-exception-1.24', 'deprecated': False},\n 'bison-exception-2.2': {'id': 'Bison-exception-2.2', 'deprecated': False},\n 'bootloader-exception': {'id': 'Bootloader-exception', 'deprecated': False},\n 'classpath-exception-2.0': {'id': 'Classpath-exception-2.0', 'deprecated': False},\n 'clisp-exception-2.0': {'id': 'CLISP-exception-2.0', 'deprecated': False},\n 'cryptsetup-openssl-exception': {'id': 'cryptsetup-OpenSSL-exception', 'deprecated': False},\n 'digirule-foss-exception': {'id': 'DigiRule-FOSS-exception', 'deprecated': False},\n 'ecos-exception-2.0': {'id': 'eCos-exception-2.0', 'deprecated': False},\n 'erlang-otp-linking-exception': {'id': 'erlang-otp-linking-exception', 'deprecated': False},\n 'fawkes-runtime-exception': {'id': 'Fawkes-Runtime-exception', 'deprecated': False},\n 'fltk-exception': {'id': 'FLTK-exception', 'deprecated': False},\n 'fmt-exception': {'id': 'fmt-exception', 'deprecated': False},\n 'font-exception-2.0': {'id': 'Font-exception-2.0', 'deprecated': False},\n 'freertos-exception-2.0': {'id': 'freertos-exception-2.0', 'deprecated': False},\n 'gcc-exception-2.0': {'id': 'GCC-exception-2.0', 'deprecated': False},\n 'gcc-exception-2.0-note': {'id': 'GCC-exception-2.0-note', 'deprecated': False},\n 'gcc-exception-3.1': {'id': 'GCC-exception-3.1', 'deprecated': False},\n 'gmsh-exception': {'id': 'Gmsh-exception', 'deprecated': False},\n 'gnat-exception': {'id': 'GNAT-exception', 'deprecated': False},\n 'gnome-examples-exception': {'id': 'GNOME-examples-exception', 'deprecated': False},\n 'gnu-compiler-exception': {'id': 'GNU-compiler-exception', 'deprecated': False},\n 'gnu-javamail-exception': {'id': 'gnu-javamail-exception', 'deprecated': False},\n 'gpl-3.0-interface-exception': {'id': 'GPL-3.0-interface-exception', 'deprecated': False},\n 'gpl-3.0-linking-exception': {'id': 'GPL-3.0-linking-exception', 'deprecated': False},\n 'gpl-3.0-linking-source-exception': {'id': 'GPL-3.0-linking-source-exception', 'deprecated': False},\n 'gpl-cc-1.0': {'id': 'GPL-CC-1.0', 'deprecated': False},\n 'gstreamer-exception-2005': {'id': 'GStreamer-exception-2005', 'deprecated': False},\n 'gstreamer-exception-2008': {'id': 'GStreamer-exception-2008', 'deprecated': False},\n 'i2p-gpl-java-exception': {'id': 'i2p-gpl-java-exception', 'deprecated': False},\n 'kicad-libraries-exception': {'id': 'KiCad-libraries-exception', 'deprecated': False},\n 'lgpl-3.0-linking-exception': {'id': 'LGPL-3.0-linking-exception', 'deprecated': False},\n 'libpri-openh323-exception': {'id': 'libpri-OpenH323-exception', 'deprecated': False},\n 'libtool-exception': {'id': 'Libtool-exception', 'deprecated': False},\n 'linux-syscall-note': {'id': 'Linux-syscall-note', 'deprecated': False},\n 'llgpl': {'id': 'LLGPL', 'deprecated': False},\n 'llvm-exception': {'id': 'LLVM-exception', 'deprecated': False},\n 'lzma-exception': {'id': 'LZMA-exception', 'deprecated': False},\n 'mif-exception': {'id': 'mif-exception', 'deprecated': False},\n 'nokia-qt-exception-1.1': {'id': 'Nokia-Qt-exception-1.1', 'deprecated': True},\n 'ocaml-lgpl-linking-exception': {'id': 'OCaml-LGPL-linking-exception', 'deprecated': False},\n 'occt-exception-1.0': {'id': 'OCCT-exception-1.0', 'deprecated': False},\n 'openjdk-assembly-exception-1.0': {'id': 'OpenJDK-assembly-exception-1.0', 'deprecated': False},\n 'openvpn-openssl-exception': {'id': 'openvpn-openssl-exception', 'deprecated': False},\n 'pcre2-exception': {'id': 'PCRE2-exception', 'deprecated': False},\n 'ps-or-pdf-font-exception-20170817': {'id': 'PS-or-PDF-font-exception-20170817', 'deprecated': False},\n 'qpl-1.0-inria-2004-exception': {'id': 'QPL-1.0-INRIA-2004-exception', 'deprecated': False},\n 'qt-gpl-exception-1.0': {'id': 'Qt-GPL-exception-1.0', 'deprecated': False},\n 'qt-lgpl-exception-1.1': {'id': 'Qt-LGPL-exception-1.1', 'deprecated': False},\n 'qwt-exception-1.0': {'id': 'Qwt-exception-1.0', 'deprecated': False},\n 'romic-exception': {'id': 'romic-exception', 'deprecated': False},\n 'rrdtool-floss-exception-2.0': {'id': 'RRDtool-FLOSS-exception-2.0', 'deprecated': False},\n 'sane-exception': {'id': 'SANE-exception', 'deprecated': False},\n 'shl-2.0': {'id': 'SHL-2.0', 'deprecated': False},\n 'shl-2.1': {'id': 'SHL-2.1', 'deprecated': False},\n 'stunnel-exception': {'id': 'stunnel-exception', 'deprecated': False},\n 'swi-exception': {'id': 'SWI-exception', 'deprecated': False},\n 'swift-exception': {'id': 'Swift-exception', 'deprecated': False},\n 'texinfo-exception': {'id': 'Texinfo-exception', 'deprecated': False},\n 'u-boot-exception-2.0': {'id': 'u-boot-exception-2.0', 'deprecated': False},\n 'ubdl-exception': {'id': 'UBDL-exception', 'deprecated': False},\n 'universal-foss-exception-1.0': {'id': 'Universal-FOSS-exception-1.0', 'deprecated': False},\n 'vsftpd-openssl-exception': {'id': 'vsftpd-openssl-exception', 'deprecated': False},\n 'wxwindows-exception-3.1': {'id': 'WxWindows-exception-3.1', 'deprecated': False},\n 'x11vnc-openssl-exception': {'id': 'x11vnc-openssl-exception', 'deprecated': False},\n}\n
.venv\Lib\site-packages\packaging\licenses\_spdx.py
_spdx.py
Python
48,398
0.85
0.002635
0
vue-tools
680
2025-05-09T13:49:56.303614
BSD-3-Clause
false
313a72cf4425cf31a445d4745d659eb3
#######################################################################################\n#\n# Adapted from:\n# https://github.com/pypa/hatch/blob/5352e44/backend/src/hatchling/licenses/parse.py\n#\n# MIT License\n#\n# Copyright (c) 2017-present Ofek Lev <oss@ofek.dev>\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy of this\n# software and associated documentation files (the "Software"), to deal in the Software\n# without restriction, including without limitation the rights to use, copy, modify,\n# merge, publish, distribute, sublicense, and/or sell copies of the Software, and to\n# permit persons to whom the Software is furnished to do so, subject to the following\n# conditions:\n#\n# The above copyright notice and this permission notice shall be included in all copies\n# or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,\n# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\n# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\n# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF\n# CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE\n# OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n#\n#\n# With additional allowance of arbitrary `LicenseRef-` identifiers, not just\n# `LicenseRef-Public-Domain` and `LicenseRef-Proprietary`.\n#\n#######################################################################################\nfrom __future__ import annotations\n\nimport re\nfrom typing import NewType, cast\n\nfrom packaging.licenses._spdx import EXCEPTIONS, LICENSES\n\n__all__ = [\n "InvalidLicenseExpression",\n "NormalizedLicenseExpression",\n "canonicalize_license_expression",\n]\n\nlicense_ref_allowed = re.compile("^[A-Za-z0-9.-]*$")\n\nNormalizedLicenseExpression = NewType("NormalizedLicenseExpression", str)\n\n\nclass InvalidLicenseExpression(ValueError):\n """Raised when a license-expression string is invalid\n\n >>> canonicalize_license_expression("invalid")\n Traceback (most recent call last):\n ...\n packaging.licenses.InvalidLicenseExpression: Invalid license expression: 'invalid'\n """\n\n\ndef canonicalize_license_expression(\n raw_license_expression: str,\n) -> NormalizedLicenseExpression:\n if not raw_license_expression:\n message = f"Invalid license expression: {raw_license_expression!r}"\n raise InvalidLicenseExpression(message)\n\n # Pad any parentheses so tokenization can be achieved by merely splitting on\n # whitespace.\n license_expression = raw_license_expression.replace("(", " ( ").replace(")", " ) ")\n licenseref_prefix = "LicenseRef-"\n license_refs = {\n ref.lower(): "LicenseRef-" + ref[len(licenseref_prefix) :]\n for ref in license_expression.split()\n if ref.lower().startswith(licenseref_prefix.lower())\n }\n\n # Normalize to lower case so we can look up licenses/exceptions\n # and so boolean operators are Python-compatible.\n license_expression = license_expression.lower()\n\n tokens = license_expression.split()\n\n # Rather than implementing boolean logic, we create an expression that Python can\n # parse. Everything that is not involved with the grammar itself is treated as\n # `False` and the expression should evaluate as such.\n python_tokens = []\n for token in tokens:\n if token not in {"or", "and", "with", "(", ")"}:\n python_tokens.append("False")\n elif token == "with":\n python_tokens.append("or")\n elif token == "(" and python_tokens and python_tokens[-1] not in {"or", "and"}:\n message = f"Invalid license expression: {raw_license_expression!r}"\n raise InvalidLicenseExpression(message)\n else:\n python_tokens.append(token)\n\n python_expression = " ".join(python_tokens)\n try:\n invalid = eval(python_expression, globals(), locals())\n except Exception:\n invalid = True\n\n if invalid is not False:\n message = f"Invalid license expression: {raw_license_expression!r}"\n raise InvalidLicenseExpression(message) from None\n\n # Take a final pass to check for unknown licenses/exceptions.\n normalized_tokens = []\n for token in tokens:\n if token in {"or", "and", "with", "(", ")"}:\n normalized_tokens.append(token.upper())\n continue\n\n if normalized_tokens and normalized_tokens[-1] == "WITH":\n if token not in EXCEPTIONS:\n message = f"Unknown license exception: {token!r}"\n raise InvalidLicenseExpression(message)\n\n normalized_tokens.append(EXCEPTIONS[token]["id"])\n else:\n if token.endswith("+"):\n final_token = token[:-1]\n suffix = "+"\n else:\n final_token = token\n suffix = ""\n\n if final_token.startswith("licenseref-"):\n if not license_ref_allowed.match(final_token):\n message = f"Invalid licenseref: {final_token!r}"\n raise InvalidLicenseExpression(message)\n normalized_tokens.append(license_refs[final_token] + suffix)\n else:\n if final_token not in LICENSES:\n message = f"Unknown license: {final_token!r}"\n raise InvalidLicenseExpression(message)\n normalized_tokens.append(LICENSES[final_token]["id"] + suffix)\n\n normalized_expression = " ".join(normalized_tokens)\n\n return cast(\n NormalizedLicenseExpression,\n normalized_expression.replace("( ", "(").replace(" )", ")"),\n )\n
.venv\Lib\site-packages\packaging\licenses\__init__.py
__init__.py
Python
5,715
0.95
0.124138
0.317073
python-kit
700
2025-06-10T23:46:10.271687
Apache-2.0
false
846baef42775413a7e03df50cec988e7
\n\n
.venv\Lib\site-packages\packaging\licenses\__pycache__\_spdx.cpython-313.pyc
_spdx.cpython-313.pyc
Other
47,431
0.8
0
0
awesome-app
314
2024-11-13T21:45:41.582772
BSD-3-Clause
false
0ab3dadc622d4b69fa67ded876af0f55